expr.c revision 110611
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "rtl.h"
26#include "tree.h"
27#include "obstack.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-config.h"
34#include "insn-attr.h"
35/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
36#include "expr.h"
37#include "optabs.h"
38#include "libfuncs.h"
39#include "recog.h"
40#include "reload.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "ggc.h"
45#include "langhooks.h"
46#include "intl.h"
47#include "tm_p.h"
48
49/* Decide whether a function's arguments should be processed
50   from first to last or from last to first.
51
52   They should if the stack and args grow in opposite directions, but
53   only if we have push insns.  */
54
55#ifdef PUSH_ROUNDING
56
57#ifndef PUSH_ARGS_REVERSED
58#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59#define PUSH_ARGS_REVERSED	/* If it's last to first.  */
60#endif
61#endif
62
63#endif
64
65#ifndef STACK_PUSH_CODE
66#ifdef STACK_GROWS_DOWNWARD
67#define STACK_PUSH_CODE PRE_DEC
68#else
69#define STACK_PUSH_CODE PRE_INC
70#endif
71#endif
72
73/* Assume that case vectors are not pc-relative.  */
74#ifndef CASE_VECTOR_PC_RELATIVE
75#define CASE_VECTOR_PC_RELATIVE 0
76#endif
77
78/* If this is nonzero, we do not bother generating VOLATILE
79   around volatile memory references, and we are willing to
80   output indirect addresses.  If cse is to follow, we reject
81   indirect addresses so a useful potential cse is generated;
82   if it is used only once, instruction combination will produce
83   the same indirect address eventually.  */
84int cse_not_expected;
85
86/* Chain of pending expressions for PLACEHOLDER_EXPR to replace.  */
87static tree placeholder_list = 0;
88
89/* This structure is used by move_by_pieces to describe the move to
90   be performed.  */
91struct move_by_pieces
92{
93  rtx to;
94  rtx to_addr;
95  int autinc_to;
96  int explicit_inc_to;
97  rtx from;
98  rtx from_addr;
99  int autinc_from;
100  int explicit_inc_from;
101  unsigned HOST_WIDE_INT len;
102  HOST_WIDE_INT offset;
103  int reverse;
104};
105
106/* This structure is used by store_by_pieces to describe the clear to
107   be performed.  */
108
109struct store_by_pieces
110{
111  rtx to;
112  rtx to_addr;
113  int autinc_to;
114  int explicit_inc_to;
115  unsigned HOST_WIDE_INT len;
116  HOST_WIDE_INT offset;
117  rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
118  PTR constfundata;
119  int reverse;
120};
121
122extern struct obstack permanent_obstack;
123
124static rtx enqueue_insn		PARAMS ((rtx, rtx));
125static unsigned HOST_WIDE_INT move_by_pieces_ninsns
126				PARAMS ((unsigned HOST_WIDE_INT,
127					 unsigned int));
128static void move_by_pieces_1	PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
129					 struct move_by_pieces *));
130static rtx clear_by_pieces_1	PARAMS ((PTR, HOST_WIDE_INT,
131					 enum machine_mode));
132static void clear_by_pieces	PARAMS ((rtx, unsigned HOST_WIDE_INT,
133					 unsigned int));
134static void store_by_pieces_1	PARAMS ((struct store_by_pieces *,
135					 unsigned int));
136static void store_by_pieces_2	PARAMS ((rtx (*) (rtx, ...),
137					 enum machine_mode,
138					 struct store_by_pieces *));
139static rtx get_subtarget	PARAMS ((rtx));
140static int is_zeros_p		PARAMS ((tree));
141static int mostly_zeros_p	PARAMS ((tree));
142static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143					     HOST_WIDE_INT, enum machine_mode,
144					     tree, tree, int, int));
145static void store_constructor	PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146static rtx store_field		PARAMS ((rtx, HOST_WIDE_INT,
147					 HOST_WIDE_INT, enum machine_mode,
148					 tree, enum machine_mode, int, tree,
149					 int));
150static rtx var_rtx		PARAMS ((tree));
151static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153static int is_aligning_offset	PARAMS ((tree, tree));
154static rtx expand_increment	PARAMS ((tree, int, int));
155static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157static void do_compare_and_jump	PARAMS ((tree, enum rtx_code, enum rtx_code,
158					 rtx, rtx));
159static rtx do_store_flag	PARAMS ((tree, rtx, enum machine_mode, int));
160#ifdef PUSH_ROUNDING
161static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
162#endif
163static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
164
165/* Record for each mode whether we can move a register directly to or
166   from an object of that mode in memory.  If we can't, we won't try
167   to use that mode directly when accessing a field of that mode.  */
168
169static char direct_load[NUM_MACHINE_MODES];
170static char direct_store[NUM_MACHINE_MODES];
171
172/* If a memory-to-memory move would take MOVE_RATIO or more simple
173   move-instruction sequences, we will do a movstr or libcall instead.  */
174
175#ifndef MOVE_RATIO
176#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
177#define MOVE_RATIO 2
178#else
179/* If we are optimizing for space (-Os), cut down the default move ratio.  */
180#define MOVE_RATIO (optimize_size ? 3 : 15)
181#endif
182#endif
183
184/* This macro is used to determine whether move_by_pieces should be called
185   to perform a structure copy.  */
186#ifndef MOVE_BY_PIECES_P
187#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
188  (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
189#endif
190
191/* This array records the insn_code of insns to perform block moves.  */
192enum insn_code movstr_optab[NUM_MACHINE_MODES];
193
194/* This array records the insn_code of insns to perform block clears.  */
195enum insn_code clrstr_optab[NUM_MACHINE_MODES];
196
197/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
198
199#ifndef SLOW_UNALIGNED_ACCESS
200#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
201#endif
202
203/* This is run once per compilation to set up which modes can be used
204   directly in memory and to initialize the block move optab.  */
205
206void
207init_expr_once ()
208{
209  rtx insn, pat;
210  enum machine_mode mode;
211  int num_clobbers;
212  rtx mem, mem1;
213
214  start_sequence ();
215
216  /* Try indexing by frame ptr and try by stack ptr.
217     It is known that on the Convex the stack ptr isn't a valid index.
218     With luck, one or the other is valid on any machine.  */
219  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
220  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
221
222  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
223  pat = PATTERN (insn);
224
225  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
226       mode = (enum machine_mode) ((int) mode + 1))
227    {
228      int regno;
229      rtx reg;
230
231      direct_load[(int) mode] = direct_store[(int) mode] = 0;
232      PUT_MODE (mem, mode);
233      PUT_MODE (mem1, mode);
234
235      /* See if there is some register that can be used in this mode and
236	 directly loaded or stored from memory.  */
237
238      if (mode != VOIDmode && mode != BLKmode)
239	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
240	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
241	     regno++)
242	  {
243	    if (! HARD_REGNO_MODE_OK (regno, mode))
244	      continue;
245
246	    reg = gen_rtx_REG (mode, regno);
247
248	    SET_SRC (pat) = mem;
249	    SET_DEST (pat) = reg;
250	    if (recog (pat, insn, &num_clobbers) >= 0)
251	      direct_load[(int) mode] = 1;
252
253	    SET_SRC (pat) = mem1;
254	    SET_DEST (pat) = reg;
255	    if (recog (pat, insn, &num_clobbers) >= 0)
256	      direct_load[(int) mode] = 1;
257
258	    SET_SRC (pat) = reg;
259	    SET_DEST (pat) = mem;
260	    if (recog (pat, insn, &num_clobbers) >= 0)
261	      direct_store[(int) mode] = 1;
262
263	    SET_SRC (pat) = reg;
264	    SET_DEST (pat) = mem1;
265	    if (recog (pat, insn, &num_clobbers) >= 0)
266	      direct_store[(int) mode] = 1;
267	  }
268    }
269
270  end_sequence ();
271}
272
273/* This is run at the start of compiling a function.  */
274
275void
276init_expr ()
277{
278  cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
279
280  pending_chain = 0;
281  pending_stack_adjust = 0;
282  stack_pointer_delta = 0;
283  inhibit_defer_pop = 0;
284  saveregs_value = 0;
285  apply_args_value = 0;
286  forced_labels = 0;
287}
288
289void
290mark_expr_status (p)
291     struct expr_status *p;
292{
293  if (p == NULL)
294    return;
295
296  ggc_mark_rtx (p->x_saveregs_value);
297  ggc_mark_rtx (p->x_apply_args_value);
298  ggc_mark_rtx (p->x_forced_labels);
299}
300
301void
302free_expr_status (f)
303     struct function *f;
304{
305  free (f->expr);
306  f->expr = NULL;
307}
308
309/* Small sanity check that the queue is empty at the end of a function.  */
310
311void
312finish_expr_for_function ()
313{
314  if (pending_chain)
315    abort ();
316}
317
318/* Manage the queue of increment instructions to be output
319   for POSTINCREMENT_EXPR expressions, etc.  */
320
321/* Queue up to increment (or change) VAR later.  BODY says how:
322   BODY should be the same thing you would pass to emit_insn
323   to increment right away.  It will go to emit_insn later on.
324
325   The value is a QUEUED expression to be used in place of VAR
326   where you want to guarantee the pre-incrementation value of VAR.  */
327
328static rtx
329enqueue_insn (var, body)
330     rtx var, body;
331{
332  pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
333				  body, pending_chain);
334  return pending_chain;
335}
336
337/* Use protect_from_queue to convert a QUEUED expression
338   into something that you can put immediately into an instruction.
339   If the queued incrementation has not happened yet,
340   protect_from_queue returns the variable itself.
341   If the incrementation has happened, protect_from_queue returns a temp
342   that contains a copy of the old value of the variable.
343
344   Any time an rtx which might possibly be a QUEUED is to be put
345   into an instruction, it must be passed through protect_from_queue first.
346   QUEUED expressions are not meaningful in instructions.
347
348   Do not pass a value through protect_from_queue and then hold
349   on to it for a while before putting it in an instruction!
350   If the queue is flushed in between, incorrect code will result.  */
351
352rtx
353protect_from_queue (x, modify)
354     rtx x;
355     int modify;
356{
357  RTX_CODE code = GET_CODE (x);
358
359#if 0  /* A QUEUED can hang around after the queue is forced out.  */
360  /* Shortcut for most common case.  */
361  if (pending_chain == 0)
362    return x;
363#endif
364
365  if (code != QUEUED)
366    {
367      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
368	 use of autoincrement.  Make a copy of the contents of the memory
369	 location rather than a copy of the address, but not if the value is
370	 of mode BLKmode.  Don't modify X in place since it might be
371	 shared.  */
372      if (code == MEM && GET_MODE (x) != BLKmode
373	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
374	{
375	  rtx y = XEXP (x, 0);
376	  rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
377
378	  if (QUEUED_INSN (y))
379	    {
380	      rtx temp = gen_reg_rtx (GET_MODE (x));
381
382	      emit_insn_before (gen_move_insn (temp, new),
383				QUEUED_INSN (y));
384	      return temp;
385	    }
386
387	  /* Copy the address into a pseudo, so that the returned value
388	     remains correct across calls to emit_queue.  */
389	  return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
390	}
391
392      /* Otherwise, recursively protect the subexpressions of all
393	 the kinds of rtx's that can contain a QUEUED.  */
394      if (code == MEM)
395	{
396	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
397	  if (tem != XEXP (x, 0))
398	    {
399	      x = copy_rtx (x);
400	      XEXP (x, 0) = tem;
401	    }
402	}
403      else if (code == PLUS || code == MULT)
404	{
405	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
406	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
407	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
408	    {
409	      x = copy_rtx (x);
410	      XEXP (x, 0) = new0;
411	      XEXP (x, 1) = new1;
412	    }
413	}
414      return x;
415    }
416  /* If the increment has not happened, use the variable itself.  Copy it
417     into a new pseudo so that the value remains correct across calls to
418     emit_queue.  */
419  if (QUEUED_INSN (x) == 0)
420    return copy_to_reg (QUEUED_VAR (x));
421  /* If the increment has happened and a pre-increment copy exists,
422     use that copy.  */
423  if (QUEUED_COPY (x) != 0)
424    return QUEUED_COPY (x);
425  /* The increment has happened but we haven't set up a pre-increment copy.
426     Set one up now, and use it.  */
427  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
428  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
429		    QUEUED_INSN (x));
430  return QUEUED_COPY (x);
431}
432
433/* Return nonzero if X contains a QUEUED expression:
434   if it contains anything that will be altered by a queued increment.
435   We handle only combinations of MEM, PLUS, MINUS and MULT operators
436   since memory addresses generally contain only those.  */
437
438int
439queued_subexp_p (x)
440     rtx x;
441{
442  enum rtx_code code = GET_CODE (x);
443  switch (code)
444    {
445    case QUEUED:
446      return 1;
447    case MEM:
448      return queued_subexp_p (XEXP (x, 0));
449    case MULT:
450    case PLUS:
451    case MINUS:
452      return (queued_subexp_p (XEXP (x, 0))
453	      || queued_subexp_p (XEXP (x, 1)));
454    default:
455      return 0;
456    }
457}
458
459/* Perform all the pending incrementations.  */
460
461void
462emit_queue ()
463{
464  rtx p;
465  while ((p = pending_chain))
466    {
467      rtx body = QUEUED_BODY (p);
468
469      if (GET_CODE (body) == SEQUENCE)
470	{
471	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
472	  emit_insn (QUEUED_BODY (p));
473	}
474      else
475	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
476      pending_chain = QUEUED_NEXT (p);
477    }
478}
479
480/* Copy data from FROM to TO, where the machine modes are not the same.
481   Both modes may be integer, or both may be floating.
482   UNSIGNEDP should be nonzero if FROM is an unsigned type.
483   This causes zero-extension instead of sign-extension.  */
484
485void
486convert_move (to, from, unsignedp)
487     rtx to, from;
488     int unsignedp;
489{
490  enum machine_mode to_mode = GET_MODE (to);
491  enum machine_mode from_mode = GET_MODE (from);
492  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
493  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
494  enum insn_code code;
495  rtx libcall;
496
497  /* rtx code for making an equivalent value.  */
498  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
499
500  to = protect_from_queue (to, 1);
501  from = protect_from_queue (from, 0);
502
503  if (to_real != from_real)
504    abort ();
505
506  /* If FROM is a SUBREG that indicates that we have already done at least
507     the required extension, strip it.  We don't handle such SUBREGs as
508     TO here.  */
509
510  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
511      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
512	  >= GET_MODE_SIZE (to_mode))
513      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
514    from = gen_lowpart (to_mode, from), from_mode = to_mode;
515
516  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
517    abort ();
518
519  if (to_mode == from_mode
520      || (from_mode == VOIDmode && CONSTANT_P (from)))
521    {
522      emit_move_insn (to, from);
523      return;
524    }
525
526  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
527    {
528      if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
529	abort ();
530
531      if (VECTOR_MODE_P (to_mode))
532	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
533      else
534	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
535
536      emit_move_insn (to, from);
537      return;
538    }
539
540  if (to_real != from_real)
541    abort ();
542
543  if (to_real)
544    {
545      rtx value, insns;
546
547      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
548	{
549	  /* Try converting directly if the insn is supported.  */
550	  if ((code = can_extend_p (to_mode, from_mode, 0))
551	      != CODE_FOR_nothing)
552	    {
553	      emit_unop_insn (code, to, from, UNKNOWN);
554	      return;
555	    }
556	}
557
558#ifdef HAVE_trunchfqf2
559      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
560	{
561	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
562	  return;
563	}
564#endif
565#ifdef HAVE_trunctqfqf2
566      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
567	{
568	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
569	  return;
570	}
571#endif
572#ifdef HAVE_truncsfqf2
573      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
574	{
575	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
576	  return;
577	}
578#endif
579#ifdef HAVE_truncdfqf2
580      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
581	{
582	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
583	  return;
584	}
585#endif
586#ifdef HAVE_truncxfqf2
587      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
588	{
589	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
590	  return;
591	}
592#endif
593#ifdef HAVE_trunctfqf2
594      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
595	{
596	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597	  return;
598	}
599#endif
600
601#ifdef HAVE_trunctqfhf2
602      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
603	{
604	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
605	  return;
606	}
607#endif
608#ifdef HAVE_truncsfhf2
609      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
610	{
611	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
612	  return;
613	}
614#endif
615#ifdef HAVE_truncdfhf2
616      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
617	{
618	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
619	  return;
620	}
621#endif
622#ifdef HAVE_truncxfhf2
623      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
624	{
625	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
626	  return;
627	}
628#endif
629#ifdef HAVE_trunctfhf2
630      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
631	{
632	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633	  return;
634	}
635#endif
636
637#ifdef HAVE_truncsftqf2
638      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
639	{
640	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
641	  return;
642	}
643#endif
644#ifdef HAVE_truncdftqf2
645      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
646	{
647	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
648	  return;
649	}
650#endif
651#ifdef HAVE_truncxftqf2
652      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
653	{
654	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
655	  return;
656	}
657#endif
658#ifdef HAVE_trunctftqf2
659      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
660	{
661	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662	  return;
663	}
664#endif
665
666#ifdef HAVE_truncdfsf2
667      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
668	{
669	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
670	  return;
671	}
672#endif
673#ifdef HAVE_truncxfsf2
674      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
675	{
676	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
677	  return;
678	}
679#endif
680#ifdef HAVE_trunctfsf2
681      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
682	{
683	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
684	  return;
685	}
686#endif
687#ifdef HAVE_truncxfdf2
688      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
689	{
690	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
691	  return;
692	}
693#endif
694#ifdef HAVE_trunctfdf2
695      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
696	{
697	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
698	  return;
699	}
700#endif
701
702      libcall = (rtx) 0;
703      switch (from_mode)
704	{
705	case SFmode:
706	  switch (to_mode)
707	    {
708	    case DFmode:
709	      libcall = extendsfdf2_libfunc;
710	      break;
711
712	    case XFmode:
713	      libcall = extendsfxf2_libfunc;
714	      break;
715
716	    case TFmode:
717	      libcall = extendsftf2_libfunc;
718	      break;
719
720	    default:
721	      break;
722	    }
723	  break;
724
725	case DFmode:
726	  switch (to_mode)
727	    {
728	    case SFmode:
729	      libcall = truncdfsf2_libfunc;
730	      break;
731
732	    case XFmode:
733	      libcall = extenddfxf2_libfunc;
734	      break;
735
736	    case TFmode:
737	      libcall = extenddftf2_libfunc;
738	      break;
739
740	    default:
741	      break;
742	    }
743	  break;
744
745	case XFmode:
746	  switch (to_mode)
747	    {
748	    case SFmode:
749	      libcall = truncxfsf2_libfunc;
750	      break;
751
752	    case DFmode:
753	      libcall = truncxfdf2_libfunc;
754	      break;
755
756	    default:
757	      break;
758	    }
759	  break;
760
761	case TFmode:
762	  switch (to_mode)
763	    {
764	    case SFmode:
765	      libcall = trunctfsf2_libfunc;
766	      break;
767
768	    case DFmode:
769	      libcall = trunctfdf2_libfunc;
770	      break;
771
772	    default:
773	      break;
774	    }
775	  break;
776
777	default:
778	  break;
779	}
780
781      if (libcall == (rtx) 0)
782	/* This conversion is not implemented yet.  */
783	abort ();
784
785      start_sequence ();
786      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
787				       1, from, from_mode);
788      insns = get_insns ();
789      end_sequence ();
790      emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791								    from));
792      return;
793    }
794
795  /* Now both modes are integers.  */
796
797  /* Handle expanding beyond a word.  */
798  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
799      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
800    {
801      rtx insns;
802      rtx lowpart;
803      rtx fill_value;
804      rtx lowfrom;
805      int i;
806      enum machine_mode lowpart_mode;
807      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
808
809      /* Try converting directly if the insn is supported.  */
810      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
811	  != CODE_FOR_nothing)
812	{
813	  /* If FROM is a SUBREG, put it into a register.  Do this
814	     so that we always generate the same set of insns for
815	     better cse'ing; if an intermediate assignment occurred,
816	     we won't be doing the operation directly on the SUBREG.  */
817	  if (optimize > 0 && GET_CODE (from) == SUBREG)
818	    from = force_reg (from_mode, from);
819	  emit_unop_insn (code, to, from, equiv_code);
820	  return;
821	}
822      /* Next, try converting via full word.  */
823      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
824	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
825		   != CODE_FOR_nothing))
826	{
827	  if (GET_CODE (to) == REG)
828	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
829	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
830	  emit_unop_insn (code, to,
831			  gen_lowpart (word_mode, to), equiv_code);
832	  return;
833	}
834
835      /* No special multiword conversion insn; do it by hand.  */
836      start_sequence ();
837
838      /* Since we will turn this into a no conflict block, we must ensure
839	 that the source does not overlap the target.  */
840
841      if (reg_overlap_mentioned_p (to, from))
842	from = force_reg (from_mode, from);
843
844      /* Get a copy of FROM widened to a word, if necessary.  */
845      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
846	lowpart_mode = word_mode;
847      else
848	lowpart_mode = from_mode;
849
850      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
851
852      lowpart = gen_lowpart (lowpart_mode, to);
853      emit_move_insn (lowpart, lowfrom);
854
855      /* Compute the value to put in each remaining word.  */
856      if (unsignedp)
857	fill_value = const0_rtx;
858      else
859	{
860#ifdef HAVE_slt
861	  if (HAVE_slt
862	      && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
863	      && STORE_FLAG_VALUE == -1)
864	    {
865	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
866			     lowpart_mode, 0);
867	      fill_value = gen_reg_rtx (word_mode);
868	      emit_insn (gen_slt (fill_value));
869	    }
870	  else
871#endif
872	    {
873	      fill_value
874		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
875				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
876				NULL_RTX, 0);
877	      fill_value = convert_to_mode (word_mode, fill_value, 1);
878	    }
879	}
880
881      /* Fill the remaining words.  */
882      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
883	{
884	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
885	  rtx subword = operand_subword (to, index, 1, to_mode);
886
887	  if (subword == 0)
888	    abort ();
889
890	  if (fill_value != subword)
891	    emit_move_insn (subword, fill_value);
892	}
893
894      insns = get_insns ();
895      end_sequence ();
896
897      emit_no_conflict_block (insns, to, from, NULL_RTX,
898			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
899      return;
900    }
901
902  /* Truncating multi-word to a word or less.  */
903  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
904      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
905    {
906      if (!((GET_CODE (from) == MEM
907	     && ! MEM_VOLATILE_P (from)
908	     && direct_load[(int) to_mode]
909	     && ! mode_dependent_address_p (XEXP (from, 0)))
910	    || GET_CODE (from) == REG
911	    || GET_CODE (from) == SUBREG))
912	from = force_reg (from_mode, from);
913      convert_move (to, gen_lowpart (word_mode, from), 0);
914      return;
915    }
916
917  /* Handle pointer conversion.  */			/* SPEE 900220.  */
918  if (to_mode == PQImode)
919    {
920      if (from_mode != QImode)
921	from = convert_to_mode (QImode, from, unsignedp);
922
923#ifdef HAVE_truncqipqi2
924      if (HAVE_truncqipqi2)
925	{
926	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
927	  return;
928	}
929#endif /* HAVE_truncqipqi2 */
930      abort ();
931    }
932
933  if (from_mode == PQImode)
934    {
935      if (to_mode != QImode)
936	{
937	  from = convert_to_mode (QImode, from, unsignedp);
938	  from_mode = QImode;
939	}
940      else
941	{
942#ifdef HAVE_extendpqiqi2
943	  if (HAVE_extendpqiqi2)
944	    {
945	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
946	      return;
947	    }
948#endif /* HAVE_extendpqiqi2 */
949	  abort ();
950	}
951    }
952
953  if (to_mode == PSImode)
954    {
955      if (from_mode != SImode)
956	from = convert_to_mode (SImode, from, unsignedp);
957
958#ifdef HAVE_truncsipsi2
959      if (HAVE_truncsipsi2)
960	{
961	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
962	  return;
963	}
964#endif /* HAVE_truncsipsi2 */
965      abort ();
966    }
967
968  if (from_mode == PSImode)
969    {
970      if (to_mode != SImode)
971	{
972	  from = convert_to_mode (SImode, from, unsignedp);
973	  from_mode = SImode;
974	}
975      else
976	{
977#ifdef HAVE_extendpsisi2
978	  if (! unsignedp && HAVE_extendpsisi2)
979	    {
980	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
981	      return;
982	    }
983#endif /* HAVE_extendpsisi2 */
984#ifdef HAVE_zero_extendpsisi2
985	  if (unsignedp && HAVE_zero_extendpsisi2)
986	    {
987	      emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
988	      return;
989	    }
990#endif /* HAVE_zero_extendpsisi2 */
991	  abort ();
992	}
993    }
994
995  if (to_mode == PDImode)
996    {
997      if (from_mode != DImode)
998	from = convert_to_mode (DImode, from, unsignedp);
999
1000#ifdef HAVE_truncdipdi2
1001      if (HAVE_truncdipdi2)
1002	{
1003	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004	  return;
1005	}
1006#endif /* HAVE_truncdipdi2 */
1007      abort ();
1008    }
1009
1010  if (from_mode == PDImode)
1011    {
1012      if (to_mode != DImode)
1013	{
1014	  from = convert_to_mode (DImode, from, unsignedp);
1015	  from_mode = DImode;
1016	}
1017      else
1018	{
1019#ifdef HAVE_extendpdidi2
1020	  if (HAVE_extendpdidi2)
1021	    {
1022	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023	      return;
1024	    }
1025#endif /* HAVE_extendpdidi2 */
1026	  abort ();
1027	}
1028    }
1029
1030  /* Now follow all the conversions between integers
1031     no more than a word long.  */
1032
1033  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1034  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036				GET_MODE_BITSIZE (from_mode)))
1037    {
1038      if (!((GET_CODE (from) == MEM
1039	     && ! MEM_VOLATILE_P (from)
1040	     && direct_load[(int) to_mode]
1041	     && ! mode_dependent_address_p (XEXP (from, 0)))
1042	    || GET_CODE (from) == REG
1043	    || GET_CODE (from) == SUBREG))
1044	from = force_reg (from_mode, from);
1045      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047	from = copy_to_reg (from);
1048      emit_move_insn (to, gen_lowpart (to_mode, from));
1049      return;
1050    }
1051
1052  /* Handle extension.  */
1053  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054    {
1055      /* Convert directly if that works.  */
1056      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057	  != CODE_FOR_nothing)
1058	{
1059	  if (flag_force_mem)
1060	    from = force_not_mem (from);
1061
1062	  emit_unop_insn (code, to, from, equiv_code);
1063	  return;
1064	}
1065      else
1066	{
1067	  enum machine_mode intermediate;
1068	  rtx tmp;
1069	  tree shift_amount;
1070
1071	  /* Search for a mode to convert via.  */
1072	  for (intermediate = from_mode; intermediate != VOIDmode;
1073	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1074	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1075		  != CODE_FOR_nothing)
1076		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1077		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1078					       GET_MODE_BITSIZE (intermediate))))
1079		&& (can_extend_p (intermediate, from_mode, unsignedp)
1080		    != CODE_FOR_nothing))
1081	      {
1082		convert_move (to, convert_to_mode (intermediate, from,
1083						   unsignedp), unsignedp);
1084		return;
1085	      }
1086
1087	  /* No suitable intermediate mode.
1088	     Generate what we need with	shifts.  */
1089	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1090				      - GET_MODE_BITSIZE (from_mode), 0);
1091	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1092	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1093			      to, unsignedp);
1094	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1095			      to, unsignedp);
1096	  if (tmp != to)
1097	    emit_move_insn (to, tmp);
1098	  return;
1099	}
1100    }
1101
1102  /* Support special truncate insns for certain modes.  */
1103
1104  if (from_mode == DImode && to_mode == SImode)
1105    {
1106#ifdef HAVE_truncdisi2
1107      if (HAVE_truncdisi2)
1108	{
1109	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1110	  return;
1111	}
1112#endif
1113      convert_move (to, force_reg (from_mode, from), unsignedp);
1114      return;
1115    }
1116
1117  if (from_mode == DImode && to_mode == HImode)
1118    {
1119#ifdef HAVE_truncdihi2
1120      if (HAVE_truncdihi2)
1121	{
1122	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1123	  return;
1124	}
1125#endif
1126      convert_move (to, force_reg (from_mode, from), unsignedp);
1127      return;
1128    }
1129
1130  if (from_mode == DImode && to_mode == QImode)
1131    {
1132#ifdef HAVE_truncdiqi2
1133      if (HAVE_truncdiqi2)
1134	{
1135	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1136	  return;
1137	}
1138#endif
1139      convert_move (to, force_reg (from_mode, from), unsignedp);
1140      return;
1141    }
1142
1143  if (from_mode == SImode && to_mode == HImode)
1144    {
1145#ifdef HAVE_truncsihi2
1146      if (HAVE_truncsihi2)
1147	{
1148	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1149	  return;
1150	}
1151#endif
1152      convert_move (to, force_reg (from_mode, from), unsignedp);
1153      return;
1154    }
1155
1156  if (from_mode == SImode && to_mode == QImode)
1157    {
1158#ifdef HAVE_truncsiqi2
1159      if (HAVE_truncsiqi2)
1160	{
1161	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1162	  return;
1163	}
1164#endif
1165      convert_move (to, force_reg (from_mode, from), unsignedp);
1166      return;
1167    }
1168
1169  if (from_mode == HImode && to_mode == QImode)
1170    {
1171#ifdef HAVE_trunchiqi2
1172      if (HAVE_trunchiqi2)
1173	{
1174	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1175	  return;
1176	}
1177#endif
1178      convert_move (to, force_reg (from_mode, from), unsignedp);
1179      return;
1180    }
1181
1182  if (from_mode == TImode && to_mode == DImode)
1183    {
1184#ifdef HAVE_trunctidi2
1185      if (HAVE_trunctidi2)
1186	{
1187	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1188	  return;
1189	}
1190#endif
1191      convert_move (to, force_reg (from_mode, from), unsignedp);
1192      return;
1193    }
1194
1195  if (from_mode == TImode && to_mode == SImode)
1196    {
1197#ifdef HAVE_trunctisi2
1198      if (HAVE_trunctisi2)
1199	{
1200	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1201	  return;
1202	}
1203#endif
1204      convert_move (to, force_reg (from_mode, from), unsignedp);
1205      return;
1206    }
1207
1208  if (from_mode == TImode && to_mode == HImode)
1209    {
1210#ifdef HAVE_trunctihi2
1211      if (HAVE_trunctihi2)
1212	{
1213	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1214	  return;
1215	}
1216#endif
1217      convert_move (to, force_reg (from_mode, from), unsignedp);
1218      return;
1219    }
1220
1221  if (from_mode == TImode && to_mode == QImode)
1222    {
1223#ifdef HAVE_trunctiqi2
1224      if (HAVE_trunctiqi2)
1225	{
1226	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1227	  return;
1228	}
1229#endif
1230      convert_move (to, force_reg (from_mode, from), unsignedp);
1231      return;
1232    }
1233
1234  /* Handle truncation of volatile memrefs, and so on;
1235     the things that couldn't be truncated directly,
1236     and for which there was no special instruction.  */
1237  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1238    {
1239      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1240      emit_move_insn (to, temp);
1241      return;
1242    }
1243
1244  /* Mode combination is not recognized.  */
1245  abort ();
1246}
1247
1248/* Return an rtx for a value that would result
1249   from converting X to mode MODE.
1250   Both X and MODE may be floating, or both integer.
1251   UNSIGNEDP is nonzero if X is an unsigned value.
1252   This can be done by referring to a part of X in place
1253   or by copying to a new temporary with conversion.
1254
1255   This function *must not* call protect_from_queue
1256   except when putting X into an insn (in which case convert_move does it).  */
1257
1258rtx
1259convert_to_mode (mode, x, unsignedp)
1260     enum machine_mode mode;
1261     rtx x;
1262     int unsignedp;
1263{
1264  return convert_modes (mode, VOIDmode, x, unsignedp);
1265}
1266
1267/* Return an rtx for a value that would result
1268   from converting X from mode OLDMODE to mode MODE.
1269   Both modes may be floating, or both integer.
1270   UNSIGNEDP is nonzero if X is an unsigned value.
1271
1272   This can be done by referring to a part of X in place
1273   or by copying to a new temporary with conversion.
1274
1275   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1276
1277   This function *must not* call protect_from_queue
1278   except when putting X into an insn (in which case convert_move does it).  */
1279
1280rtx
1281convert_modes (mode, oldmode, x, unsignedp)
1282     enum machine_mode mode, oldmode;
1283     rtx x;
1284     int unsignedp;
1285{
1286  rtx temp;
1287
1288  /* If FROM is a SUBREG that indicates that we have already done at least
1289     the required extension, strip it.  */
1290
1291  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1292      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1293      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1294    x = gen_lowpart (mode, x);
1295
1296  if (GET_MODE (x) != VOIDmode)
1297    oldmode = GET_MODE (x);
1298
1299  if (mode == oldmode)
1300    return x;
1301
1302  /* There is one case that we must handle specially: If we are converting
1303     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1304     we are to interpret the constant as unsigned, gen_lowpart will do
1305     the wrong if the constant appears negative.  What we want to do is
1306     make the high-order word of the constant zero, not all ones.  */
1307
1308  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1309      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1310      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1311    {
1312      HOST_WIDE_INT val = INTVAL (x);
1313
1314      if (oldmode != VOIDmode
1315	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1316	{
1317	  int width = GET_MODE_BITSIZE (oldmode);
1318
1319	  /* We need to zero extend VAL.  */
1320	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1321	}
1322
1323      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1324    }
1325
1326  /* We can do this with a gen_lowpart if both desired and current modes
1327     are integer, and this is either a constant integer, a register, or a
1328     non-volatile MEM.  Except for the constant case where MODE is no
1329     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1330
1331  if ((GET_CODE (x) == CONST_INT
1332       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1333      || (GET_MODE_CLASS (mode) == MODE_INT
1334	  && GET_MODE_CLASS (oldmode) == MODE_INT
1335	  && (GET_CODE (x) == CONST_DOUBLE
1336	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1337		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1338		       && direct_load[(int) mode])
1339		      || (GET_CODE (x) == REG
1340			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1341						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1342    {
1343      /* ?? If we don't know OLDMODE, we have to assume here that
1344	 X does not need sign- or zero-extension.   This may not be
1345	 the case, but it's the best we can do.  */
1346      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1347	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1348	{
1349	  HOST_WIDE_INT val = INTVAL (x);
1350	  int width = GET_MODE_BITSIZE (oldmode);
1351
1352	  /* We must sign or zero-extend in this case.  Start by
1353	     zero-extending, then sign extend if we need to.  */
1354	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1355	  if (! unsignedp
1356	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1357	    val |= (HOST_WIDE_INT) (-1) << width;
1358
1359	  return GEN_INT (trunc_int_for_mode (val, mode));
1360	}
1361
1362      return gen_lowpart (mode, x);
1363    }
1364
1365  temp = gen_reg_rtx (mode);
1366  convert_move (temp, x, unsignedp);
1367  return temp;
1368}
1369
1370/* This macro is used to determine what the largest unit size that
1371   move_by_pieces can use is.  */
1372
1373/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1374   move efficiently, as opposed to  MOVE_MAX which is the maximum
1375   number of bytes we can move with a single instruction.  */
1376
1377#ifndef MOVE_MAX_PIECES
1378#define MOVE_MAX_PIECES   MOVE_MAX
1379#endif
1380
1381/* Generate several move instructions to copy LEN bytes from block FROM to
1382   block TO.  (These are MEM rtx's with BLKmode).  The caller must pass FROM
1383   and TO through protect_from_queue before calling.
1384
1385   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1386   used to push FROM to the stack.
1387
1388   ALIGN is maximum alignment we can assume.  */
1389
1390void
1391move_by_pieces (to, from, len, align)
1392     rtx to, from;
1393     unsigned HOST_WIDE_INT len;
1394     unsigned int align;
1395{
1396  struct move_by_pieces data;
1397  rtx to_addr, from_addr = XEXP (from, 0);
1398  unsigned int max_size = MOVE_MAX_PIECES + 1;
1399  enum machine_mode mode = VOIDmode, tmode;
1400  enum insn_code icode;
1401
1402  data.offset = 0;
1403  data.from_addr = from_addr;
1404  if (to)
1405    {
1406      to_addr = XEXP (to, 0);
1407      data.to = to;
1408      data.autinc_to
1409	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1410	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1411      data.reverse
1412	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1413    }
1414  else
1415    {
1416      to_addr = NULL_RTX;
1417      data.to = NULL_RTX;
1418      data.autinc_to = 1;
1419#ifdef STACK_GROWS_DOWNWARD
1420      data.reverse = 1;
1421#else
1422      data.reverse = 0;
1423#endif
1424    }
1425  data.to_addr = to_addr;
1426  data.from = from;
1427  data.autinc_from
1428    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1429       || GET_CODE (from_addr) == POST_INC
1430       || GET_CODE (from_addr) == POST_DEC);
1431
1432  data.explicit_inc_from = 0;
1433  data.explicit_inc_to = 0;
1434  if (data.reverse) data.offset = len;
1435  data.len = len;
1436
1437  /* If copying requires more than two move insns,
1438     copy addresses to registers (to make displacements shorter)
1439     and use post-increment if available.  */
1440  if (!(data.autinc_from && data.autinc_to)
1441      && move_by_pieces_ninsns (len, align) > 2)
1442    {
1443      /* Find the mode of the largest move...  */
1444      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1445	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1446	if (GET_MODE_SIZE (tmode) < max_size)
1447	  mode = tmode;
1448
1449      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1450	{
1451	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1452	  data.autinc_from = 1;
1453	  data.explicit_inc_from = -1;
1454	}
1455      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1456	{
1457	  data.from_addr = copy_addr_to_reg (from_addr);
1458	  data.autinc_from = 1;
1459	  data.explicit_inc_from = 1;
1460	}
1461      if (!data.autinc_from && CONSTANT_P (from_addr))
1462	data.from_addr = copy_addr_to_reg (from_addr);
1463      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1464	{
1465	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1466	  data.autinc_to = 1;
1467	  data.explicit_inc_to = -1;
1468	}
1469      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1470	{
1471	  data.to_addr = copy_addr_to_reg (to_addr);
1472	  data.autinc_to = 1;
1473	  data.explicit_inc_to = 1;
1474	}
1475      if (!data.autinc_to && CONSTANT_P (to_addr))
1476	data.to_addr = copy_addr_to_reg (to_addr);
1477    }
1478
1479  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1480      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1481    align = MOVE_MAX * BITS_PER_UNIT;
1482
1483  /* First move what we can in the largest integer mode, then go to
1484     successively smaller modes.  */
1485
1486  while (max_size > 1)
1487    {
1488      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1489	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1490	if (GET_MODE_SIZE (tmode) < max_size)
1491	  mode = tmode;
1492
1493      if (mode == VOIDmode)
1494	break;
1495
1496      icode = mov_optab->handlers[(int) mode].insn_code;
1497      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1498	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1499
1500      max_size = GET_MODE_SIZE (mode);
1501    }
1502
1503  /* The code above should have handled everything.  */
1504  if (data.len > 0)
1505    abort ();
1506}
1507
1508/* Return number of insns required to move L bytes by pieces.
1509   ALIGN (in bits) is maximum alignment we can assume.  */
1510
1511static unsigned HOST_WIDE_INT
1512move_by_pieces_ninsns (l, align)
1513     unsigned HOST_WIDE_INT l;
1514     unsigned int align;
1515{
1516  unsigned HOST_WIDE_INT n_insns = 0;
1517  unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1518
1519  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1520      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1521    align = MOVE_MAX * BITS_PER_UNIT;
1522
1523  while (max_size > 1)
1524    {
1525      enum machine_mode mode = VOIDmode, tmode;
1526      enum insn_code icode;
1527
1528      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1529	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1530	if (GET_MODE_SIZE (tmode) < max_size)
1531	  mode = tmode;
1532
1533      if (mode == VOIDmode)
1534	break;
1535
1536      icode = mov_optab->handlers[(int) mode].insn_code;
1537      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1538	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1539
1540      max_size = GET_MODE_SIZE (mode);
1541    }
1542
1543  if (l)
1544    abort ();
1545  return n_insns;
1546}
1547
1548/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1549   with move instructions for mode MODE.  GENFUN is the gen_... function
1550   to make a move insn for that mode.  DATA has all the other info.  */
1551
1552static void
1553move_by_pieces_1 (genfun, mode, data)
1554     rtx (*genfun) PARAMS ((rtx, ...));
1555     enum machine_mode mode;
1556     struct move_by_pieces *data;
1557{
1558  unsigned int size = GET_MODE_SIZE (mode);
1559  rtx to1 = NULL_RTX, from1;
1560
1561  while (data->len >= size)
1562    {
1563      if (data->reverse)
1564	data->offset -= size;
1565
1566      if (data->to)
1567	{
1568	  if (data->autinc_to)
1569	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1570					     data->offset);
1571	  else
1572	    to1 = adjust_address (data->to, mode, data->offset);
1573	}
1574
1575      if (data->autinc_from)
1576	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1577					   data->offset);
1578      else
1579	from1 = adjust_address (data->from, mode, data->offset);
1580
1581      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1582	emit_insn (gen_add2_insn (data->to_addr,
1583				  GEN_INT (-(HOST_WIDE_INT)size)));
1584      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1585	emit_insn (gen_add2_insn (data->from_addr,
1586				  GEN_INT (-(HOST_WIDE_INT)size)));
1587
1588      if (data->to)
1589	emit_insn ((*genfun) (to1, from1));
1590      else
1591	{
1592#ifdef PUSH_ROUNDING
1593	  emit_single_push_insn (mode, from1, NULL);
1594#else
1595	  abort ();
1596#endif
1597	}
1598
1599      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1600	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1601      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1602	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1603
1604      if (! data->reverse)
1605	data->offset += size;
1606
1607      data->len -= size;
1608    }
1609}
1610
1611/* Emit code to move a block Y to a block X.
1612   This may be done with string-move instructions,
1613   with multiple scalar move instructions, or with a library call.
1614
1615   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1616   with mode BLKmode.
1617   SIZE is an rtx that says how long they are.
1618   ALIGN is the maximum alignment we can assume they have.
1619
1620   Return the address of the new block, if memcpy is called and returns it,
1621   0 otherwise.  */
1622
1623rtx
1624emit_block_move (x, y, size)
1625     rtx x, y;
1626     rtx size;
1627{
1628  rtx retval = 0;
1629#ifdef TARGET_MEM_FUNCTIONS
1630  static tree fn;
1631  tree call_expr, arg_list;
1632#endif
1633  unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1634
1635  if (GET_MODE (x) != BLKmode)
1636    abort ();
1637
1638  if (GET_MODE (y) != BLKmode)
1639    abort ();
1640
1641  x = protect_from_queue (x, 1);
1642  y = protect_from_queue (y, 0);
1643  size = protect_from_queue (size, 0);
1644
1645  if (GET_CODE (x) != MEM)
1646    abort ();
1647  if (GET_CODE (y) != MEM)
1648    abort ();
1649  if (size == 0)
1650    abort ();
1651
1652  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1653    move_by_pieces (x, y, INTVAL (size), align);
1654  else
1655    {
1656      /* Try the most limited insn first, because there's no point
1657	 including more than one in the machine description unless
1658	 the more limited one has some advantage.  */
1659
1660      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1661      enum machine_mode mode;
1662
1663      /* Since this is a move insn, we don't care about volatility.  */
1664      volatile_ok = 1;
1665
1666      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1667	   mode = GET_MODE_WIDER_MODE (mode))
1668	{
1669	  enum insn_code code = movstr_optab[(int) mode];
1670	  insn_operand_predicate_fn pred;
1671
1672	  if (code != CODE_FOR_nothing
1673	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1674		 here because if SIZE is less than the mode mask, as it is
1675		 returned by the macro, it will definitely be less than the
1676		 actual mode mask.  */
1677	      && ((GET_CODE (size) == CONST_INT
1678		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1679		       <= (GET_MODE_MASK (mode) >> 1)))
1680		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1681	      && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1682		  || (*pred) (x, BLKmode))
1683	      && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1684		  || (*pred) (y, BLKmode))
1685	      && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1686		  || (*pred) (opalign, VOIDmode)))
1687	    {
1688	      rtx op2;
1689	      rtx last = get_last_insn ();
1690	      rtx pat;
1691
1692	      op2 = convert_to_mode (mode, size, 1);
1693	      pred = insn_data[(int) code].operand[2].predicate;
1694	      if (pred != 0 && ! (*pred) (op2, mode))
1695		op2 = copy_to_mode_reg (mode, op2);
1696
1697	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1698	      if (pat)
1699		{
1700		  emit_insn (pat);
1701		  volatile_ok = 0;
1702		  return 0;
1703		}
1704	      else
1705		delete_insns_since (last);
1706	    }
1707	}
1708
1709      volatile_ok = 0;
1710
1711      /* X, Y, or SIZE may have been passed through protect_from_queue.
1712
1713	 It is unsafe to save the value generated by protect_from_queue
1714	 and reuse it later.  Consider what happens if emit_queue is
1715	 called before the return value from protect_from_queue is used.
1716
1717	 Expansion of the CALL_EXPR below will call emit_queue before
1718	 we are finished emitting RTL for argument setup.  So if we are
1719	 not careful we could get the wrong value for an argument.
1720
1721	 To avoid this problem we go ahead and emit code to copy X, Y &
1722	 SIZE into new pseudos.  We can then place those new pseudos
1723	 into an RTL_EXPR and use them later, even after a call to
1724	 emit_queue.
1725
1726	 Note this is not strictly needed for library calls since they
1727	 do not call emit_queue before loading their arguments.  However,
1728	 we may need to have library calls call emit_queue in the future
1729	 since failing to do so could cause problems for targets which
1730	 define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1731      x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1732      y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1733
1734#ifdef TARGET_MEM_FUNCTIONS
1735      size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1736#else
1737      size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1738			      TREE_UNSIGNED (integer_type_node));
1739      size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1740#endif
1741
1742#ifdef TARGET_MEM_FUNCTIONS
1743      /* It is incorrect to use the libcall calling conventions to call
1744	 memcpy in this context.
1745
1746	 This could be a user call to memcpy and the user may wish to
1747	 examine the return value from memcpy.
1748
1749	 For targets where libcalls and normal calls have different conventions
1750	 for returning pointers, we could end up generating incorrect code.
1751
1752	 So instead of using a libcall sequence we build up a suitable
1753	 CALL_EXPR and expand the call in the normal fashion.  */
1754      if (fn == NULL_TREE)
1755	{
1756	  tree fntype;
1757
1758	  /* This was copied from except.c, I don't know if all this is
1759	     necessary in this context or not.  */
1760	  fn = get_identifier ("memcpy");
1761	  fntype = build_pointer_type (void_type_node);
1762	  fntype = build_function_type (fntype, NULL_TREE);
1763	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1764	  ggc_add_tree_root (&fn, 1);
1765	  DECL_EXTERNAL (fn) = 1;
1766	  TREE_PUBLIC (fn) = 1;
1767	  DECL_ARTIFICIAL (fn) = 1;
1768	  TREE_NOTHROW (fn) = 1;
1769	  make_decl_rtl (fn, NULL);
1770	  assemble_external (fn);
1771	}
1772
1773      /* We need to make an argument list for the function call.
1774
1775	 memcpy has three arguments, the first two are void * addresses and
1776	 the last is a size_t byte count for the copy.  */
1777      arg_list
1778	= build_tree_list (NULL_TREE,
1779			   make_tree (build_pointer_type (void_type_node), x));
1780      TREE_CHAIN (arg_list)
1781	= build_tree_list (NULL_TREE,
1782			   make_tree (build_pointer_type (void_type_node), y));
1783      TREE_CHAIN (TREE_CHAIN (arg_list))
1784	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1785      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1786
1787      /* Now we have to build up the CALL_EXPR itself.  */
1788      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1789      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1790			 call_expr, arg_list, NULL_TREE);
1791      TREE_SIDE_EFFECTS (call_expr) = 1;
1792
1793      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1794#else
1795      emit_library_call (bcopy_libfunc, LCT_NORMAL,
1796			 VOIDmode, 3, y, Pmode, x, Pmode,
1797			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1798					  TREE_UNSIGNED (integer_type_node)),
1799			 TYPE_MODE (integer_type_node));
1800#endif
1801
1802      /* If we are initializing a readonly value, show the above call
1803	 clobbered it.  Otherwise, a load from it may erroneously be hoisted
1804	 from a loop.  */
1805      if (RTX_UNCHANGING_P (x))
1806	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1807    }
1808
1809  return retval;
1810}
1811
1812/* Copy all or part of a value X into registers starting at REGNO.
1813   The number of registers to be filled is NREGS.  */
1814
1815void
1816move_block_to_reg (regno, x, nregs, mode)
1817     int regno;
1818     rtx x;
1819     int nregs;
1820     enum machine_mode mode;
1821{
1822  int i;
1823#ifdef HAVE_load_multiple
1824  rtx pat;
1825  rtx last;
1826#endif
1827
1828  if (nregs == 0)
1829    return;
1830
1831  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832    x = validize_mem (force_const_mem (mode, x));
1833
1834  /* See if the machine can do this with a load multiple insn.  */
1835#ifdef HAVE_load_multiple
1836  if (HAVE_load_multiple)
1837    {
1838      last = get_last_insn ();
1839      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1840			       GEN_INT (nregs));
1841      if (pat)
1842	{
1843	  emit_insn (pat);
1844	  return;
1845	}
1846      else
1847	delete_insns_since (last);
1848    }
1849#endif
1850
1851  for (i = 0; i < nregs; i++)
1852    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853		    operand_subword_force (x, i, mode));
1854}
1855
1856/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857   The number of registers to be filled is NREGS.  SIZE indicates the number
1858   of bytes in the object X.  */
1859
1860void
1861move_block_from_reg (regno, x, nregs, size)
1862     int regno;
1863     rtx x;
1864     int nregs;
1865     int size;
1866{
1867  int i;
1868#ifdef HAVE_store_multiple
1869  rtx pat;
1870  rtx last;
1871#endif
1872  enum machine_mode mode;
1873
1874  if (nregs == 0)
1875    return;
1876
1877  /* If SIZE is that of a mode no bigger than a word, just use that
1878     mode's store operation.  */
1879  if (size <= UNITS_PER_WORD
1880      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1881      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1882    {
1883      emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1884      return;
1885    }
1886
1887  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1888     to the left before storing to memory.  Note that the previous test
1889     doesn't handle all cases (e.g. SIZE == 3).  */
1890  if (size < UNITS_PER_WORD
1891      && BYTES_BIG_ENDIAN
1892      && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1893    {
1894      rtx tem = operand_subword (x, 0, 1, BLKmode);
1895      rtx shift;
1896
1897      if (tem == 0)
1898	abort ();
1899
1900      shift = expand_shift (LSHIFT_EXPR, word_mode,
1901			    gen_rtx_REG (word_mode, regno),
1902			    build_int_2 ((UNITS_PER_WORD - size)
1903					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1904      emit_move_insn (tem, shift);
1905      return;
1906    }
1907
1908  /* See if the machine can do this with a store multiple insn.  */
1909#ifdef HAVE_store_multiple
1910  if (HAVE_store_multiple)
1911    {
1912      last = get_last_insn ();
1913      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1914				GEN_INT (nregs));
1915      if (pat)
1916	{
1917	  emit_insn (pat);
1918	  return;
1919	}
1920      else
1921	delete_insns_since (last);
1922    }
1923#endif
1924
1925  for (i = 0; i < nregs; i++)
1926    {
1927      rtx tem = operand_subword (x, i, 1, BLKmode);
1928
1929      if (tem == 0)
1930	abort ();
1931
1932      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1933    }
1934}
1935
1936/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1937   registers represented by a PARALLEL.  SSIZE represents the total size of
1938   block SRC in bytes, or -1 if not known.  */
1939/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1940   the balance will be in what would be the low-order memory addresses, i.e.
1941   left justified for big endian, right justified for little endian.  This
1942   happens to be true for the targets currently using this support.  If this
1943   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1944   would be needed.  */
1945
1946void
1947emit_group_load (dst, orig_src, ssize)
1948     rtx dst, orig_src;
1949     int ssize;
1950{
1951  rtx *tmps, src;
1952  int start, i;
1953
1954  if (GET_CODE (dst) != PARALLEL)
1955    abort ();
1956
1957  /* Check for a NULL entry, used to indicate that the parameter goes
1958     both on the stack and in registers.  */
1959  if (XEXP (XVECEXP (dst, 0, 0), 0))
1960    start = 0;
1961  else
1962    start = 1;
1963
1964  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1965
1966  /* Process the pieces.  */
1967  for (i = start; i < XVECLEN (dst, 0); i++)
1968    {
1969      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1970      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1971      unsigned int bytelen = GET_MODE_SIZE (mode);
1972      int shift = 0;
1973
1974      /* Handle trailing fragments that run over the size of the struct.  */
1975      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1976	{
1977	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1978	  bytelen = ssize - bytepos;
1979	  if (bytelen <= 0)
1980	    abort ();
1981	}
1982
1983      /* If we won't be loading directly from memory, protect the real source
1984	 from strange tricks we might play; but make sure that the source can
1985	 be loaded directly into the destination.  */
1986      src = orig_src;
1987      if (GET_CODE (orig_src) != MEM
1988	  && (!CONSTANT_P (orig_src)
1989	      || (GET_MODE (orig_src) != mode
1990		  && GET_MODE (orig_src) != VOIDmode)))
1991	{
1992	  if (GET_MODE (orig_src) == VOIDmode)
1993	    src = gen_reg_rtx (mode);
1994	  else
1995	    src = gen_reg_rtx (GET_MODE (orig_src));
1996
1997	  emit_move_insn (src, orig_src);
1998	}
1999
2000      /* Optimize the access just a bit.  */
2001      if (GET_CODE (src) == MEM
2002	  && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2003	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2004	  && bytelen == GET_MODE_SIZE (mode))
2005	{
2006	  tmps[i] = gen_reg_rtx (mode);
2007	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2008	}
2009      else if (GET_CODE (src) == CONCAT)
2010	{
2011	  if ((bytepos == 0
2012	       && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2013	      || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2014		  && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2015	    {
2016	      tmps[i] = XEXP (src, bytepos != 0);
2017	      if (! CONSTANT_P (tmps[i])
2018		  && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2019		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2020					     0, 1, NULL_RTX, mode, mode, ssize);
2021	    }
2022	  else if (bytepos == 0)
2023	    {
2024	      rtx mem = assign_stack_temp (GET_MODE (src),
2025					   GET_MODE_SIZE (GET_MODE (src)), 0);
2026	      emit_move_insn (mem, src);
2027	      tmps[i] = adjust_address (mem, mode, 0);
2028	    }
2029	  else
2030	    abort ();
2031	}
2032      else if (CONSTANT_P (src)
2033	       || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2034	tmps[i] = src;
2035      else
2036	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2037				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2038				     mode, mode, ssize);
2039
2040      if (BYTES_BIG_ENDIAN && shift)
2041	expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2042		      tmps[i], 0, OPTAB_WIDEN);
2043    }
2044
2045  emit_queue ();
2046
2047  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2048  for (i = start; i < XVECLEN (dst, 0); i++)
2049    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2050}
2051
2052/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2053   registers represented by a PARALLEL.  SSIZE represents the total size of
2054   block DST, or -1 if not known.  */
2055
2056void
2057emit_group_store (orig_dst, src, ssize)
2058     rtx orig_dst, src;
2059     int ssize;
2060{
2061  rtx *tmps, dst;
2062  int start, i;
2063
2064  if (GET_CODE (src) != PARALLEL)
2065    abort ();
2066
2067  /* Check for a NULL entry, used to indicate that the parameter goes
2068     both on the stack and in registers.  */
2069  if (XEXP (XVECEXP (src, 0, 0), 0))
2070    start = 0;
2071  else
2072    start = 1;
2073
2074  tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2075
2076  /* Copy the (probable) hard regs into pseudos.  */
2077  for (i = start; i < XVECLEN (src, 0); i++)
2078    {
2079      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2080      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2081      emit_move_insn (tmps[i], reg);
2082    }
2083  emit_queue ();
2084
2085  /* If we won't be storing directly into memory, protect the real destination
2086     from strange tricks we might play.  */
2087  dst = orig_dst;
2088  if (GET_CODE (dst) == PARALLEL)
2089    {
2090      rtx temp;
2091
2092      /* We can get a PARALLEL dst if there is a conditional expression in
2093	 a return statement.  In that case, the dst and src are the same,
2094	 so no action is necessary.  */
2095      if (rtx_equal_p (dst, src))
2096	return;
2097
2098      /* It is unclear if we can ever reach here, but we may as well handle
2099	 it.  Allocate a temporary, and split this into a store/load to/from
2100	 the temporary.  */
2101
2102      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2103      emit_group_store (temp, src, ssize);
2104      emit_group_load (dst, temp, ssize);
2105      return;
2106    }
2107  else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2108    {
2109      dst = gen_reg_rtx (GET_MODE (orig_dst));
2110      /* Make life a bit easier for combine.  */
2111      emit_move_insn (dst, const0_rtx);
2112    }
2113
2114  /* Process the pieces.  */
2115  for (i = start; i < XVECLEN (src, 0); i++)
2116    {
2117      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2118      enum machine_mode mode = GET_MODE (tmps[i]);
2119      unsigned int bytelen = GET_MODE_SIZE (mode);
2120      rtx dest = dst;
2121
2122      /* Handle trailing fragments that run over the size of the struct.  */
2123      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2124	{
2125	  if (BYTES_BIG_ENDIAN)
2126	    {
2127	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2128	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2129			    tmps[i], 0, OPTAB_WIDEN);
2130	    }
2131	  bytelen = ssize - bytepos;
2132	}
2133
2134      if (GET_CODE (dst) == CONCAT)
2135	{
2136	  if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2137	    dest = XEXP (dst, 0);
2138	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2139	    {
2140	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2141	      dest = XEXP (dst, 1);
2142	    }
2143	  else
2144	    abort ();
2145	}
2146
2147      /* Optimize the access just a bit.  */
2148      if (GET_CODE (dest) == MEM
2149	  && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2150	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2151	  && bytelen == GET_MODE_SIZE (mode))
2152	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2153      else
2154	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2155			 mode, tmps[i], ssize);
2156    }
2157
2158  emit_queue ();
2159
2160  /* Copy from the pseudo into the (probable) hard reg.  */
2161  if (GET_CODE (dst) == REG)
2162    emit_move_insn (orig_dst, dst);
2163}
2164
2165/* Generate code to copy a BLKmode object of TYPE out of a
2166   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2167   is null, a stack temporary is created.  TGTBLK is returned.
2168
2169   The primary purpose of this routine is to handle functions
2170   that return BLKmode structures in registers.  Some machines
2171   (the PA for example) want to return all small structures
2172   in registers regardless of the structure's alignment.  */
2173
2174rtx
2175copy_blkmode_from_reg (tgtblk, srcreg, type)
2176     rtx tgtblk;
2177     rtx srcreg;
2178     tree type;
2179{
2180  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2181  rtx src = NULL, dst = NULL;
2182  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2183  unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2184
2185  if (tgtblk == 0)
2186    {
2187      tgtblk = assign_temp (build_qualified_type (type,
2188						  (TYPE_QUALS (type)
2189						   | TYPE_QUAL_CONST)),
2190			    0, 1, 1);
2191      preserve_temp_slots (tgtblk);
2192    }
2193
2194  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2195     into a new pseudo which is a full word.
2196
2197     If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2198     the wrong part of the register gets copied so we fake a type conversion
2199     in place.  */
2200  if (GET_MODE (srcreg) != BLKmode
2201      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2202    {
2203      if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2204	srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2205      else
2206	srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2207    }
2208
2209  /* Structures whose size is not a multiple of a word are aligned
2210     to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2211     machine, this means we must skip the empty high order bytes when
2212     calculating the bit offset.  */
2213  if (BYTES_BIG_ENDIAN
2214      && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2215      && bytes % UNITS_PER_WORD)
2216    big_endian_correction
2217      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2218
2219  /* Copy the structure BITSIZE bites at a time.
2220
2221     We could probably emit more efficient code for machines which do not use
2222     strict alignment, but it doesn't seem worth the effort at the current
2223     time.  */
2224  for (bitpos = 0, xbitpos = big_endian_correction;
2225       bitpos < bytes * BITS_PER_UNIT;
2226       bitpos += bitsize, xbitpos += bitsize)
2227    {
2228      /* We need a new source operand each time xbitpos is on a
2229	 word boundary and when xbitpos == big_endian_correction
2230	 (the first time through).  */
2231      if (xbitpos % BITS_PER_WORD == 0
2232	  || xbitpos == big_endian_correction)
2233	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2234				     GET_MODE (srcreg));
2235
2236      /* We need a new destination operand each time bitpos is on
2237	 a word boundary.  */
2238      if (bitpos % BITS_PER_WORD == 0)
2239	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2240
2241      /* Use xbitpos for the source extraction (right justified) and
2242	 xbitpos for the destination store (left justified).  */
2243      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2244		       extract_bit_field (src, bitsize,
2245					  xbitpos % BITS_PER_WORD, 1,
2246					  NULL_RTX, word_mode, word_mode,
2247					  BITS_PER_WORD),
2248		       BITS_PER_WORD);
2249    }
2250
2251  return tgtblk;
2252}
2253
2254/* Add a USE expression for REG to the (possibly empty) list pointed
2255   to by CALL_FUSAGE.  REG must denote a hard register.  */
2256
2257void
2258use_reg (call_fusage, reg)
2259     rtx *call_fusage, reg;
2260{
2261  if (GET_CODE (reg) != REG
2262      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2263    abort ();
2264
2265  *call_fusage
2266    = gen_rtx_EXPR_LIST (VOIDmode,
2267			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2268}
2269
2270/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2271   starting at REGNO.  All of these registers must be hard registers.  */
2272
2273void
2274use_regs (call_fusage, regno, nregs)
2275     rtx *call_fusage;
2276     int regno;
2277     int nregs;
2278{
2279  int i;
2280
2281  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2282    abort ();
2283
2284  for (i = 0; i < nregs; i++)
2285    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2286}
2287
2288/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2289   PARALLEL REGS.  This is for calls that pass values in multiple
2290   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2291
2292void
2293use_group_regs (call_fusage, regs)
2294     rtx *call_fusage;
2295     rtx regs;
2296{
2297  int i;
2298
2299  for (i = 0; i < XVECLEN (regs, 0); i++)
2300    {
2301      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2302
2303      /* A NULL entry means the parameter goes both on the stack and in
2304	 registers.  This can also be a MEM for targets that pass values
2305	 partially on the stack and partially in registers.  */
2306      if (reg != 0 && GET_CODE (reg) == REG)
2307	use_reg (call_fusage, reg);
2308    }
2309}
2310
2311
2312int
2313can_store_by_pieces (len, constfun, constfundata, align)
2314     unsigned HOST_WIDE_INT len;
2315     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2316     PTR constfundata;
2317     unsigned int align;
2318{
2319  unsigned HOST_WIDE_INT max_size, l;
2320  HOST_WIDE_INT offset = 0;
2321  enum machine_mode mode, tmode;
2322  enum insn_code icode;
2323  int reverse;
2324  rtx cst;
2325
2326  if (! MOVE_BY_PIECES_P (len, align))
2327    return 0;
2328
2329  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2330      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2331    align = MOVE_MAX * BITS_PER_UNIT;
2332
2333  /* We would first store what we can in the largest integer mode, then go to
2334     successively smaller modes.  */
2335
2336  for (reverse = 0;
2337       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2338       reverse++)
2339    {
2340      l = len;
2341      mode = VOIDmode;
2342      max_size = MOVE_MAX_PIECES + 1;
2343      while (max_size > 1)
2344	{
2345	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2346	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2347	    if (GET_MODE_SIZE (tmode) < max_size)
2348	      mode = tmode;
2349
2350	  if (mode == VOIDmode)
2351	    break;
2352
2353	  icode = mov_optab->handlers[(int) mode].insn_code;
2354	  if (icode != CODE_FOR_nothing
2355	      && align >= GET_MODE_ALIGNMENT (mode))
2356	    {
2357	      unsigned int size = GET_MODE_SIZE (mode);
2358
2359	      while (l >= size)
2360		{
2361		  if (reverse)
2362		    offset -= size;
2363
2364		  cst = (*constfun) (constfundata, offset, mode);
2365		  if (!LEGITIMATE_CONSTANT_P (cst))
2366		    return 0;
2367
2368		  if (!reverse)
2369		    offset += size;
2370
2371		  l -= size;
2372		}
2373	    }
2374
2375	  max_size = GET_MODE_SIZE (mode);
2376	}
2377
2378      /* The code above should have handled everything.  */
2379      if (l != 0)
2380	abort ();
2381    }
2382
2383  return 1;
2384}
2385
2386/* Generate several move instructions to store LEN bytes generated by
2387   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2388   pointer which will be passed as argument in every CONSTFUN call.
2389   ALIGN is maximum alignment we can assume.  */
2390
2391void
2392store_by_pieces (to, len, constfun, constfundata, align)
2393     rtx to;
2394     unsigned HOST_WIDE_INT len;
2395     rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2396     PTR constfundata;
2397     unsigned int align;
2398{
2399  struct store_by_pieces data;
2400
2401  if (! MOVE_BY_PIECES_P (len, align))
2402    abort ();
2403  to = protect_from_queue (to, 1);
2404  data.constfun = constfun;
2405  data.constfundata = constfundata;
2406  data.len = len;
2407  data.to = to;
2408  store_by_pieces_1 (&data, align);
2409}
2410
2411/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2412   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2413   before calling. ALIGN is maximum alignment we can assume.  */
2414
2415static void
2416clear_by_pieces (to, len, align)
2417     rtx to;
2418     unsigned HOST_WIDE_INT len;
2419     unsigned int align;
2420{
2421  struct store_by_pieces data;
2422
2423  data.constfun = clear_by_pieces_1;
2424  data.constfundata = NULL;
2425  data.len = len;
2426  data.to = to;
2427  store_by_pieces_1 (&data, align);
2428}
2429
2430/* Callback routine for clear_by_pieces.
2431   Return const0_rtx unconditionally.  */
2432
2433static rtx
2434clear_by_pieces_1 (data, offset, mode)
2435     PTR data ATTRIBUTE_UNUSED;
2436     HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2437     enum machine_mode mode ATTRIBUTE_UNUSED;
2438{
2439  return const0_rtx;
2440}
2441
2442/* Subroutine of clear_by_pieces and store_by_pieces.
2443   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2444   rtx with BLKmode).  The caller must pass TO through protect_from_queue
2445   before calling.  ALIGN is maximum alignment we can assume.  */
2446
2447static void
2448store_by_pieces_1 (data, align)
2449     struct store_by_pieces *data;
2450     unsigned int align;
2451{
2452  rtx to_addr = XEXP (data->to, 0);
2453  unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2454  enum machine_mode mode = VOIDmode, tmode;
2455  enum insn_code icode;
2456
2457  data->offset = 0;
2458  data->to_addr = to_addr;
2459  data->autinc_to
2460    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2461       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2462
2463  data->explicit_inc_to = 0;
2464  data->reverse
2465    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2466  if (data->reverse)
2467    data->offset = data->len;
2468
2469  /* If storing requires more than two move insns,
2470     copy addresses to registers (to make displacements shorter)
2471     and use post-increment if available.  */
2472  if (!data->autinc_to
2473      && move_by_pieces_ninsns (data->len, align) > 2)
2474    {
2475      /* Determine the main mode we'll be using.  */
2476      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2477	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2478	if (GET_MODE_SIZE (tmode) < max_size)
2479	  mode = tmode;
2480
2481      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2482	{
2483	  data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2484	  data->autinc_to = 1;
2485	  data->explicit_inc_to = -1;
2486	}
2487
2488      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2489	  && ! data->autinc_to)
2490	{
2491	  data->to_addr = copy_addr_to_reg (to_addr);
2492	  data->autinc_to = 1;
2493	  data->explicit_inc_to = 1;
2494	}
2495
2496      if ( !data->autinc_to && CONSTANT_P (to_addr))
2497	data->to_addr = copy_addr_to_reg (to_addr);
2498    }
2499
2500  if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2501      || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2502    align = MOVE_MAX * BITS_PER_UNIT;
2503
2504  /* First store what we can in the largest integer mode, then go to
2505     successively smaller modes.  */
2506
2507  while (max_size > 1)
2508    {
2509      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2510	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2511	if (GET_MODE_SIZE (tmode) < max_size)
2512	  mode = tmode;
2513
2514      if (mode == VOIDmode)
2515	break;
2516
2517      icode = mov_optab->handlers[(int) mode].insn_code;
2518      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2519	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2520
2521      max_size = GET_MODE_SIZE (mode);
2522    }
2523
2524  /* The code above should have handled everything.  */
2525  if (data->len != 0)
2526    abort ();
2527}
2528
2529/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2530   with move instructions for mode MODE.  GENFUN is the gen_... function
2531   to make a move insn for that mode.  DATA has all the other info.  */
2532
2533static void
2534store_by_pieces_2 (genfun, mode, data)
2535     rtx (*genfun) PARAMS ((rtx, ...));
2536     enum machine_mode mode;
2537     struct store_by_pieces *data;
2538{
2539  unsigned int size = GET_MODE_SIZE (mode);
2540  rtx to1, cst;
2541
2542  while (data->len >= size)
2543    {
2544      if (data->reverse)
2545	data->offset -= size;
2546
2547      if (data->autinc_to)
2548	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2549					 data->offset);
2550      else
2551	to1 = adjust_address (data->to, mode, data->offset);
2552
2553      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2554	emit_insn (gen_add2_insn (data->to_addr,
2555				  GEN_INT (-(HOST_WIDE_INT) size)));
2556
2557      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2558      emit_insn ((*genfun) (to1, cst));
2559
2560      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2561	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2562
2563      if (! data->reverse)
2564	data->offset += size;
2565
2566      data->len -= size;
2567    }
2568}
2569
2570/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2571   its length in bytes.  */
2572
2573rtx
2574clear_storage (object, size)
2575     rtx object;
2576     rtx size;
2577{
2578#ifdef TARGET_MEM_FUNCTIONS
2579  static tree fn;
2580  tree call_expr, arg_list;
2581#endif
2582  rtx retval = 0;
2583  unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2584			: GET_MODE_ALIGNMENT (GET_MODE (object)));
2585
2586  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2587     just move a zero.  Otherwise, do this a piece at a time.  */
2588  if (GET_MODE (object) != BLKmode
2589      && GET_CODE (size) == CONST_INT
2590      && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2591    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2592  else
2593    {
2594      object = protect_from_queue (object, 1);
2595      size = protect_from_queue (size, 0);
2596
2597      if (GET_CODE (size) == CONST_INT
2598	  && MOVE_BY_PIECES_P (INTVAL (size), align))
2599	clear_by_pieces (object, INTVAL (size), align);
2600      else
2601	{
2602	  /* Try the most limited insn first, because there's no point
2603	     including more than one in the machine description unless
2604	     the more limited one has some advantage.  */
2605
2606	  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2607	  enum machine_mode mode;
2608
2609	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2610	       mode = GET_MODE_WIDER_MODE (mode))
2611	    {
2612	      enum insn_code code = clrstr_optab[(int) mode];
2613	      insn_operand_predicate_fn pred;
2614
2615	      if (code != CODE_FOR_nothing
2616		  /* We don't need MODE to be narrower than
2617		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2618		     the mode mask, as it is returned by the macro, it will
2619		     definitely be less than the actual mode mask.  */
2620		  && ((GET_CODE (size) == CONST_INT
2621		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2622			   <= (GET_MODE_MASK (mode) >> 1)))
2623		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2624		  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2625		      || (*pred) (object, BLKmode))
2626		  && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2627		      || (*pred) (opalign, VOIDmode)))
2628		{
2629		  rtx op1;
2630		  rtx last = get_last_insn ();
2631		  rtx pat;
2632
2633		  op1 = convert_to_mode (mode, size, 1);
2634		  pred = insn_data[(int) code].operand[1].predicate;
2635		  if (pred != 0 && ! (*pred) (op1, mode))
2636		    op1 = copy_to_mode_reg (mode, op1);
2637
2638		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2639		  if (pat)
2640		    {
2641		      emit_insn (pat);
2642		      return 0;
2643		    }
2644		  else
2645		    delete_insns_since (last);
2646		}
2647	    }
2648
2649	  /* OBJECT or SIZE may have been passed through protect_from_queue.
2650
2651	     It is unsafe to save the value generated by protect_from_queue
2652	     and reuse it later.  Consider what happens if emit_queue is
2653	     called before the return value from protect_from_queue is used.
2654
2655	     Expansion of the CALL_EXPR below will call emit_queue before
2656	     we are finished emitting RTL for argument setup.  So if we are
2657	     not careful we could get the wrong value for an argument.
2658
2659	     To avoid this problem we go ahead and emit code to copy OBJECT
2660	     and SIZE into new pseudos.  We can then place those new pseudos
2661	     into an RTL_EXPR and use them later, even after a call to
2662	     emit_queue.
2663
2664	     Note this is not strictly needed for library calls since they
2665	     do not call emit_queue before loading their arguments.  However,
2666	     we may need to have library calls call emit_queue in the future
2667	     since failing to do so could cause problems for targets which
2668	     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2669	  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2670
2671#ifdef TARGET_MEM_FUNCTIONS
2672	  size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2673#else
2674	  size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2675				  TREE_UNSIGNED (integer_type_node));
2676	  size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2677#endif
2678
2679#ifdef TARGET_MEM_FUNCTIONS
2680	  /* It is incorrect to use the libcall calling conventions to call
2681	     memset in this context.
2682
2683	     This could be a user call to memset and the user may wish to
2684	     examine the return value from memset.
2685
2686	     For targets where libcalls and normal calls have different
2687	     conventions for returning pointers, we could end up generating
2688	     incorrect code.
2689
2690	     So instead of using a libcall sequence we build up a suitable
2691	     CALL_EXPR and expand the call in the normal fashion.  */
2692	  if (fn == NULL_TREE)
2693	    {
2694	      tree fntype;
2695
2696	      /* This was copied from except.c, I don't know if all this is
2697		 necessary in this context or not.  */
2698	      fn = get_identifier ("memset");
2699	      fntype = build_pointer_type (void_type_node);
2700	      fntype = build_function_type (fntype, NULL_TREE);
2701	      fn = build_decl (FUNCTION_DECL, fn, fntype);
2702	      ggc_add_tree_root (&fn, 1);
2703	      DECL_EXTERNAL (fn) = 1;
2704	      TREE_PUBLIC (fn) = 1;
2705	      DECL_ARTIFICIAL (fn) = 1;
2706	      TREE_NOTHROW (fn) = 1;
2707	      make_decl_rtl (fn, NULL);
2708	      assemble_external (fn);
2709	    }
2710
2711	  /* We need to make an argument list for the function call.
2712
2713	     memset has three arguments, the first is a void * addresses, the
2714	     second an integer with the initialization value, the last is a
2715	     size_t byte count for the copy.  */
2716	  arg_list
2717	    = build_tree_list (NULL_TREE,
2718			       make_tree (build_pointer_type (void_type_node),
2719					  object));
2720	  TREE_CHAIN (arg_list)
2721	    = build_tree_list (NULL_TREE,
2722			       make_tree (integer_type_node, const0_rtx));
2723	  TREE_CHAIN (TREE_CHAIN (arg_list))
2724	    = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2725	  TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2726
2727	  /* Now we have to build up the CALL_EXPR itself.  */
2728	  call_expr = build1 (ADDR_EXPR,
2729			      build_pointer_type (TREE_TYPE (fn)), fn);
2730	  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2731			     call_expr, arg_list, NULL_TREE);
2732	  TREE_SIDE_EFFECTS (call_expr) = 1;
2733
2734	  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2735#else
2736	  emit_library_call (bzero_libfunc, LCT_NORMAL,
2737			     VOIDmode, 2, object, Pmode, size,
2738			     TYPE_MODE (integer_type_node));
2739#endif
2740
2741	  /* If we are initializing a readonly value, show the above call
2742	     clobbered it.  Otherwise, a load from it may erroneously be
2743	     hoisted from a loop.  */
2744	  if (RTX_UNCHANGING_P (object))
2745	    emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2746	}
2747    }
2748
2749  return retval;
2750}
2751
2752/* Generate code to copy Y into X.
2753   Both Y and X must have the same mode, except that
2754   Y can be a constant with VOIDmode.
2755   This mode cannot be BLKmode; use emit_block_move for that.
2756
2757   Return the last instruction emitted.  */
2758
2759rtx
2760emit_move_insn (x, y)
2761     rtx x, y;
2762{
2763  enum machine_mode mode = GET_MODE (x);
2764  rtx y_cst = NULL_RTX;
2765  rtx last_insn;
2766
2767  x = protect_from_queue (x, 1);
2768  y = protect_from_queue (y, 0);
2769
2770  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2771    abort ();
2772
2773  /* Never force constant_p_rtx to memory.  */
2774  if (GET_CODE (y) == CONSTANT_P_RTX)
2775    ;
2776  else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2777    {
2778      y_cst = y;
2779      y = force_const_mem (mode, y);
2780    }
2781
2782  /* If X or Y are memory references, verify that their addresses are valid
2783     for the machine.  */
2784  if (GET_CODE (x) == MEM
2785      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2786	   && ! push_operand (x, GET_MODE (x)))
2787	  || (flag_force_addr
2788	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2789    x = validize_mem (x);
2790
2791  if (GET_CODE (y) == MEM
2792      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2793	  || (flag_force_addr
2794	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2795    y = validize_mem (y);
2796
2797  if (mode == BLKmode)
2798    abort ();
2799
2800  last_insn = emit_move_insn_1 (x, y);
2801
2802  if (y_cst && GET_CODE (x) == REG)
2803    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2804
2805  return last_insn;
2806}
2807
2808/* Low level part of emit_move_insn.
2809   Called just like emit_move_insn, but assumes X and Y
2810   are basically valid.  */
2811
2812rtx
2813emit_move_insn_1 (x, y)
2814     rtx x, y;
2815{
2816  enum machine_mode mode = GET_MODE (x);
2817  enum machine_mode submode;
2818  enum mode_class class = GET_MODE_CLASS (mode);
2819
2820  if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2821    abort ();
2822
2823  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2824    return
2825      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2826
2827  /* Expand complex moves by moving real part and imag part, if possible.  */
2828  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2829	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2830						    * BITS_PER_UNIT),
2831						   (class == MODE_COMPLEX_INT
2832						    ? MODE_INT : MODE_FLOAT),
2833						   0))
2834	   && (mov_optab->handlers[(int) submode].insn_code
2835	       != CODE_FOR_nothing))
2836    {
2837      /* Don't split destination if it is a stack push.  */
2838      int stack = push_operand (x, GET_MODE (x));
2839
2840#ifdef PUSH_ROUNDING
2841      /* In case we output to the stack, but the size is smaller machine can
2842	 push exactly, we need to use move instructions.  */
2843      if (stack
2844	  && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2845	      != GET_MODE_SIZE (submode)))
2846	{
2847	  rtx temp;
2848	  HOST_WIDE_INT offset1, offset2;
2849
2850	  /* Do not use anti_adjust_stack, since we don't want to update
2851	     stack_pointer_delta.  */
2852	  temp = expand_binop (Pmode,
2853#ifdef STACK_GROWS_DOWNWARD
2854			       sub_optab,
2855#else
2856			       add_optab,
2857#endif
2858			       stack_pointer_rtx,
2859			       GEN_INT
2860				 (PUSH_ROUNDING
2861				  (GET_MODE_SIZE (GET_MODE (x)))),
2862			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2863
2864	  if (temp != stack_pointer_rtx)
2865	    emit_move_insn (stack_pointer_rtx, temp);
2866
2867#ifdef STACK_GROWS_DOWNWARD
2868	  offset1 = 0;
2869	  offset2 = GET_MODE_SIZE (submode);
2870#else
2871	  offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2872	  offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2873		     + GET_MODE_SIZE (submode));
2874#endif
2875
2876	  emit_move_insn (change_address (x, submode,
2877					  gen_rtx_PLUS (Pmode,
2878						        stack_pointer_rtx,
2879							GEN_INT (offset1))),
2880			  gen_realpart (submode, y));
2881	  emit_move_insn (change_address (x, submode,
2882					  gen_rtx_PLUS (Pmode,
2883						        stack_pointer_rtx,
2884							GEN_INT (offset2))),
2885			  gen_imagpart (submode, y));
2886	}
2887      else
2888#endif
2889      /* If this is a stack, push the highpart first, so it
2890	 will be in the argument order.
2891
2892	 In that case, change_address is used only to convert
2893	 the mode, not to change the address.  */
2894      if (stack)
2895	{
2896	  /* Note that the real part always precedes the imag part in memory
2897	     regardless of machine's endianness.  */
2898#ifdef STACK_GROWS_DOWNWARD
2899	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2900		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2901		      gen_imagpart (submode, y)));
2902	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2903		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2904		      gen_realpart (submode, y)));
2905#else
2906	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2907		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2908		      gen_realpart (submode, y)));
2909	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2910		     (gen_rtx_MEM (submode, XEXP (x, 0)),
2911		      gen_imagpart (submode, y)));
2912#endif
2913	}
2914      else
2915	{
2916	  rtx realpart_x, realpart_y;
2917	  rtx imagpart_x, imagpart_y;
2918
2919	  /* If this is a complex value with each part being smaller than a
2920	     word, the usual calling sequence will likely pack the pieces into
2921	     a single register.  Unfortunately, SUBREG of hard registers only
2922	     deals in terms of words, so we have a problem converting input
2923	     arguments to the CONCAT of two registers that is used elsewhere
2924	     for complex values.  If this is before reload, we can copy it into
2925	     memory and reload.  FIXME, we should see about using extract and
2926	     insert on integer registers, but complex short and complex char
2927	     variables should be rarely used.  */
2928	  if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2929	      && (reload_in_progress | reload_completed) == 0)
2930	    {
2931	      int packed_dest_p
2932		= (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2933	      int packed_src_p
2934		= (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2935
2936	      if (packed_dest_p || packed_src_p)
2937		{
2938		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2939					       ? MODE_FLOAT : MODE_INT);
2940
2941		  enum machine_mode reg_mode
2942		    = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2943
2944		  if (reg_mode != BLKmode)
2945		    {
2946		      rtx mem = assign_stack_temp (reg_mode,
2947						   GET_MODE_SIZE (mode), 0);
2948		      rtx cmem = adjust_address (mem, mode, 0);
2949
2950		      cfun->cannot_inline
2951			= N_("function using short complex types cannot be inline");
2952
2953		      if (packed_dest_p)
2954			{
2955			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2956
2957			  emit_move_insn_1 (cmem, y);
2958			  return emit_move_insn_1 (sreg, mem);
2959			}
2960		      else
2961			{
2962			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2963
2964			  emit_move_insn_1 (mem, sreg);
2965			  return emit_move_insn_1 (x, cmem);
2966			}
2967		    }
2968		}
2969	    }
2970
2971	  realpart_x = gen_realpart (submode, x);
2972	  realpart_y = gen_realpart (submode, y);
2973	  imagpart_x = gen_imagpart (submode, x);
2974	  imagpart_y = gen_imagpart (submode, y);
2975
2976	  /* Show the output dies here.  This is necessary for SUBREGs
2977	     of pseudos since we cannot track their lifetimes correctly;
2978	     hard regs shouldn't appear here except as return values.
2979	     We never want to emit such a clobber after reload.  */
2980	  if (x != y
2981	      && ! (reload_in_progress || reload_completed)
2982	      && (GET_CODE (realpart_x) == SUBREG
2983		  || GET_CODE (imagpart_x) == SUBREG))
2984	    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2985
2986	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2987		     (realpart_x, realpart_y));
2988	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2989		     (imagpart_x, imagpart_y));
2990	}
2991
2992      return get_last_insn ();
2993    }
2994
2995  /* This will handle any multi-word mode that lacks a move_insn pattern.
2996     However, you will get better code if you define such patterns,
2997     even if they must turn into multiple assembler instructions.  */
2998  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2999    {
3000      rtx last_insn = 0;
3001      rtx seq, inner;
3002      int need_clobber;
3003      int i;
3004
3005#ifdef PUSH_ROUNDING
3006
3007      /* If X is a push on the stack, do the push now and replace
3008	 X with a reference to the stack pointer.  */
3009      if (push_operand (x, GET_MODE (x)))
3010	{
3011	  rtx temp;
3012	  enum rtx_code code;
3013
3014	  /* Do not use anti_adjust_stack, since we don't want to update
3015	     stack_pointer_delta.  */
3016	  temp = expand_binop (Pmode,
3017#ifdef STACK_GROWS_DOWNWARD
3018			       sub_optab,
3019#else
3020			       add_optab,
3021#endif
3022			       stack_pointer_rtx,
3023			       GEN_INT
3024				 (PUSH_ROUNDING
3025				  (GET_MODE_SIZE (GET_MODE (x)))),
3026			       stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3027
3028          if (temp != stack_pointer_rtx)
3029            emit_move_insn (stack_pointer_rtx, temp);
3030
3031	  code = GET_CODE (XEXP (x, 0));
3032
3033	  /* Just hope that small offsets off SP are OK.  */
3034	  if (code == POST_INC)
3035	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3036				GEN_INT (-((HOST_WIDE_INT)
3037					   GET_MODE_SIZE (GET_MODE (x)))));
3038	  else if (code == POST_DEC)
3039	    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3040				GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3041	  else
3042	    temp = stack_pointer_rtx;
3043
3044	  x = change_address (x, VOIDmode, temp);
3045	}
3046#endif
3047
3048      /* If we are in reload, see if either operand is a MEM whose address
3049	 is scheduled for replacement.  */
3050      if (reload_in_progress && GET_CODE (x) == MEM
3051	  && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3052	x = replace_equiv_address_nv (x, inner);
3053      if (reload_in_progress && GET_CODE (y) == MEM
3054	  && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3055	y = replace_equiv_address_nv (y, inner);
3056
3057      start_sequence ();
3058
3059      need_clobber = 0;
3060      for (i = 0;
3061	   i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3062	   i++)
3063	{
3064	  rtx xpart = operand_subword (x, i, 1, mode);
3065	  rtx ypart = operand_subword (y, i, 1, mode);
3066
3067	  /* If we can't get a part of Y, put Y into memory if it is a
3068	     constant.  Otherwise, force it into a register.  If we still
3069	     can't get a part of Y, abort.  */
3070	  if (ypart == 0 && CONSTANT_P (y))
3071	    {
3072	      y = force_const_mem (mode, y);
3073	      ypart = operand_subword (y, i, 1, mode);
3074	    }
3075	  else if (ypart == 0)
3076	    ypart = operand_subword_force (y, i, mode);
3077
3078	  if (xpart == 0 || ypart == 0)
3079	    abort ();
3080
3081	  need_clobber |= (GET_CODE (xpart) == SUBREG);
3082
3083	  last_insn = emit_move_insn (xpart, ypart);
3084	}
3085
3086      seq = gen_sequence ();
3087      end_sequence ();
3088
3089      /* Show the output dies here.  This is necessary for SUBREGs
3090	 of pseudos since we cannot track their lifetimes correctly;
3091	 hard regs shouldn't appear here except as return values.
3092	 We never want to emit such a clobber after reload.  */
3093      if (x != y
3094	  && ! (reload_in_progress || reload_completed)
3095	  && need_clobber != 0)
3096	emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3097
3098      emit_insn (seq);
3099
3100      return last_insn;
3101    }
3102  else
3103    abort ();
3104}
3105
3106/* Pushing data onto the stack.  */
3107
3108/* Push a block of length SIZE (perhaps variable)
3109   and return an rtx to address the beginning of the block.
3110   Note that it is not possible for the value returned to be a QUEUED.
3111   The value may be virtual_outgoing_args_rtx.
3112
3113   EXTRA is the number of bytes of padding to push in addition to SIZE.
3114   BELOW nonzero means this padding comes at low addresses;
3115   otherwise, the padding comes at high addresses.  */
3116
3117rtx
3118push_block (size, extra, below)
3119     rtx size;
3120     int extra, below;
3121{
3122  rtx temp;
3123
3124  size = convert_modes (Pmode, ptr_mode, size, 1);
3125  if (CONSTANT_P (size))
3126    anti_adjust_stack (plus_constant (size, extra));
3127  else if (GET_CODE (size) == REG && extra == 0)
3128    anti_adjust_stack (size);
3129  else
3130    {
3131      temp = copy_to_mode_reg (Pmode, size);
3132      if (extra != 0)
3133	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3134			     temp, 0, OPTAB_LIB_WIDEN);
3135      anti_adjust_stack (temp);
3136    }
3137
3138#ifndef STACK_GROWS_DOWNWARD
3139  if (0)
3140#else
3141  if (1)
3142#endif
3143    {
3144      temp = virtual_outgoing_args_rtx;
3145      if (extra != 0 && below)
3146	temp = plus_constant (temp, extra);
3147    }
3148  else
3149    {
3150      if (GET_CODE (size) == CONST_INT)
3151	temp = plus_constant (virtual_outgoing_args_rtx,
3152			      -INTVAL (size) - (below ? 0 : extra));
3153      else if (extra != 0 && !below)
3154	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3155			     negate_rtx (Pmode, plus_constant (size, extra)));
3156      else
3157	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3158			     negate_rtx (Pmode, size));
3159    }
3160
3161  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3162}
3163
3164#ifdef PUSH_ROUNDING
3165
3166/* Emit single push insn.  */
3167
3168static void
3169emit_single_push_insn (mode, x, type)
3170     rtx x;
3171     enum machine_mode mode;
3172     tree type;
3173{
3174  rtx dest_addr;
3175  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3176  rtx dest;
3177  enum insn_code icode;
3178  insn_operand_predicate_fn pred;
3179
3180  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3181  /* If there is push pattern, use it.  Otherwise try old way of throwing
3182     MEM representing push operation to move expander.  */
3183  icode = push_optab->handlers[(int) mode].insn_code;
3184  if (icode != CODE_FOR_nothing)
3185    {
3186      if (((pred = insn_data[(int) icode].operand[0].predicate)
3187	   && !((*pred) (x, mode))))
3188	x = force_reg (mode, x);
3189      emit_insn (GEN_FCN (icode) (x));
3190      return;
3191    }
3192  if (GET_MODE_SIZE (mode) == rounded_size)
3193    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3194  else
3195    {
3196#ifdef STACK_GROWS_DOWNWARD
3197      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3198				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3199#else
3200      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3201				GEN_INT (rounded_size));
3202#endif
3203      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3204    }
3205
3206  dest = gen_rtx_MEM (mode, dest_addr);
3207
3208  if (type != 0)
3209    {
3210      set_mem_attributes (dest, type, 1);
3211
3212      if (flag_optimize_sibling_calls)
3213	/* Function incoming arguments may overlap with sibling call
3214	   outgoing arguments and we cannot allow reordering of reads
3215	   from function arguments with stores to outgoing arguments
3216	   of sibling calls.  */
3217	set_mem_alias_set (dest, 0);
3218    }
3219  emit_move_insn (dest, x);
3220}
3221#endif
3222
3223/* Generate code to push X onto the stack, assuming it has mode MODE and
3224   type TYPE.
3225   MODE is redundant except when X is a CONST_INT (since they don't
3226   carry mode info).
3227   SIZE is an rtx for the size of data to be copied (in bytes),
3228   needed only if X is BLKmode.
3229
3230   ALIGN (in bits) is maximum alignment we can assume.
3231
3232   If PARTIAL and REG are both nonzero, then copy that many of the first
3233   words of X into registers starting with REG, and push the rest of X.
3234   The amount of space pushed is decreased by PARTIAL words,
3235   rounded *down* to a multiple of PARM_BOUNDARY.
3236   REG must be a hard register in this case.
3237   If REG is zero but PARTIAL is not, take any all others actions for an
3238   argument partially in registers, but do not actually load any
3239   registers.
3240
3241   EXTRA is the amount in bytes of extra space to leave next to this arg.
3242   This is ignored if an argument block has already been allocated.
3243
3244   On a machine that lacks real push insns, ARGS_ADDR is the address of
3245   the bottom of the argument block for this call.  We use indexing off there
3246   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3247   argument block has not been preallocated.
3248
3249   ARGS_SO_FAR is the size of args previously pushed for this call.
3250
3251   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3252   for arguments passed in registers.  If nonzero, it will be the number
3253   of bytes required.  */
3254
3255void
3256emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3257		args_addr, args_so_far, reg_parm_stack_space,
3258                alignment_pad)
3259     rtx x;
3260     enum machine_mode mode;
3261     tree type;
3262     rtx size;
3263     unsigned int align;
3264     int partial;
3265     rtx reg;
3266     int extra;
3267     rtx args_addr;
3268     rtx args_so_far;
3269     int reg_parm_stack_space;
3270     rtx alignment_pad;
3271{
3272  rtx xinner;
3273  enum direction stack_direction
3274#ifdef STACK_GROWS_DOWNWARD
3275    = downward;
3276#else
3277    = upward;
3278#endif
3279
3280  /* Decide where to pad the argument: `downward' for below,
3281     `upward' for above, or `none' for don't pad it.
3282     Default is below for small data on big-endian machines; else above.  */
3283  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3284
3285  /* Invert direction if stack is post-decrement.
3286     FIXME: why?  */
3287  if (STACK_PUSH_CODE == POST_DEC)
3288    if (where_pad != none)
3289      where_pad = (where_pad == downward ? upward : downward);
3290
3291  xinner = x = protect_from_queue (x, 0);
3292
3293  if (mode == BLKmode)
3294    {
3295      /* Copy a block into the stack, entirely or partially.  */
3296
3297      rtx temp;
3298      int used = partial * UNITS_PER_WORD;
3299      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3300      int skip;
3301
3302      if (size == 0)
3303	abort ();
3304
3305      used -= offset;
3306
3307      /* USED is now the # of bytes we need not copy to the stack
3308	 because registers will take care of them.  */
3309
3310      if (partial != 0)
3311	xinner = adjust_address (xinner, BLKmode, used);
3312
3313      /* If the partial register-part of the arg counts in its stack size,
3314	 skip the part of stack space corresponding to the registers.
3315	 Otherwise, start copying to the beginning of the stack space,
3316	 by setting SKIP to 0.  */
3317      skip = (reg_parm_stack_space == 0) ? 0 : used;
3318
3319#ifdef PUSH_ROUNDING
3320      /* Do it with several push insns if that doesn't take lots of insns
3321	 and if there is no difficulty with push insns that skip bytes
3322	 on the stack for alignment purposes.  */
3323      if (args_addr == 0
3324	  && PUSH_ARGS
3325	  && GET_CODE (size) == CONST_INT
3326	  && skip == 0
3327	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3328	  /* Here we avoid the case of a structure whose weak alignment
3329	     forces many pushes of a small amount of data,
3330	     and such small pushes do rounding that causes trouble.  */
3331	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3332	      || align >= BIGGEST_ALIGNMENT
3333	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3334		  == (align / BITS_PER_UNIT)))
3335	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3336	{
3337	  /* Push padding now if padding above and stack grows down,
3338	     or if padding below and stack grows up.
3339	     But if space already allocated, this has already been done.  */
3340	  if (extra && args_addr == 0
3341	      && where_pad != none && where_pad != stack_direction)
3342	    anti_adjust_stack (GEN_INT (extra));
3343
3344	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3345	}
3346      else
3347#endif /* PUSH_ROUNDING  */
3348	{
3349	  rtx target;
3350
3351	  /* Otherwise make space on the stack and copy the data
3352	     to the address of that space.  */
3353
3354	  /* Deduct words put into registers from the size we must copy.  */
3355	  if (partial != 0)
3356	    {
3357	      if (GET_CODE (size) == CONST_INT)
3358		size = GEN_INT (INTVAL (size) - used);
3359	      else
3360		size = expand_binop (GET_MODE (size), sub_optab, size,
3361				     GEN_INT (used), NULL_RTX, 0,
3362				     OPTAB_LIB_WIDEN);
3363	    }
3364
3365	  /* Get the address of the stack space.
3366	     In this case, we do not deal with EXTRA separately.
3367	     A single stack adjust will do.  */
3368	  if (! args_addr)
3369	    {
3370	      temp = push_block (size, extra, where_pad == downward);
3371	      extra = 0;
3372	    }
3373	  else if (GET_CODE (args_so_far) == CONST_INT)
3374	    temp = memory_address (BLKmode,
3375				   plus_constant (args_addr,
3376						  skip + INTVAL (args_so_far)));
3377	  else
3378	    temp = memory_address (BLKmode,
3379				   plus_constant (gen_rtx_PLUS (Pmode,
3380								args_addr,
3381								args_so_far),
3382						  skip));
3383	  target = gen_rtx_MEM (BLKmode, temp);
3384
3385	  if (type != 0)
3386	    {
3387	      set_mem_attributes (target, type, 1);
3388	      /* Function incoming arguments may overlap with sibling call
3389		 outgoing arguments and we cannot allow reordering of reads
3390		 from function arguments with stores to outgoing arguments
3391		 of sibling calls.  */
3392	      set_mem_alias_set (target, 0);
3393	    }
3394	  else
3395	    set_mem_align (target, align);
3396
3397	  /* TEMP is the address of the block.  Copy the data there.  */
3398	  if (GET_CODE (size) == CONST_INT
3399	      && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3400	    {
3401	      move_by_pieces (target, xinner, INTVAL (size), align);
3402	      goto ret;
3403	    }
3404	  else
3405	    {
3406	      rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3407	      enum machine_mode mode;
3408
3409	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3410		   mode != VOIDmode;
3411		   mode = GET_MODE_WIDER_MODE (mode))
3412		{
3413		  enum insn_code code = movstr_optab[(int) mode];
3414		  insn_operand_predicate_fn pred;
3415
3416		  if (code != CODE_FOR_nothing
3417		      && ((GET_CODE (size) == CONST_INT
3418			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
3419			       <= (GET_MODE_MASK (mode) >> 1)))
3420			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3421		      && (!(pred = insn_data[(int) code].operand[0].predicate)
3422			  || ((*pred) (target, BLKmode)))
3423		      && (!(pred = insn_data[(int) code].operand[1].predicate)
3424			  || ((*pred) (xinner, BLKmode)))
3425		      && (!(pred = insn_data[(int) code].operand[3].predicate)
3426			  || ((*pred) (opalign, VOIDmode))))
3427		    {
3428		      rtx op2 = convert_to_mode (mode, size, 1);
3429		      rtx last = get_last_insn ();
3430		      rtx pat;
3431
3432		      pred = insn_data[(int) code].operand[2].predicate;
3433		      if (pred != 0 && ! (*pred) (op2, mode))
3434			op2 = copy_to_mode_reg (mode, op2);
3435
3436		      pat = GEN_FCN ((int) code) (target, xinner,
3437						  op2, opalign);
3438		      if (pat)
3439			{
3440			  emit_insn (pat);
3441			  goto ret;
3442			}
3443		      else
3444			delete_insns_since (last);
3445		    }
3446		}
3447	    }
3448
3449	  if (!ACCUMULATE_OUTGOING_ARGS)
3450	    {
3451	      /* If the source is referenced relative to the stack pointer,
3452		 copy it to another register to stabilize it.  We do not need
3453		 to do this if we know that we won't be changing sp.  */
3454
3455	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3456		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3457		temp = copy_to_reg (temp);
3458	    }
3459
3460	  /* Make inhibit_defer_pop nonzero around the library call
3461	     to force it to pop the bcopy-arguments right away.  */
3462	  NO_DEFER_POP;
3463#ifdef TARGET_MEM_FUNCTIONS
3464	  emit_library_call (memcpy_libfunc, LCT_NORMAL,
3465			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3466			     convert_to_mode (TYPE_MODE (sizetype),
3467					      size, TREE_UNSIGNED (sizetype)),
3468			     TYPE_MODE (sizetype));
3469#else
3470	  emit_library_call (bcopy_libfunc, LCT_NORMAL,
3471			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3472			     convert_to_mode (TYPE_MODE (integer_type_node),
3473					      size,
3474					      TREE_UNSIGNED (integer_type_node)),
3475			     TYPE_MODE (integer_type_node));
3476#endif
3477	  OK_DEFER_POP;
3478	}
3479    }
3480  else if (partial > 0)
3481    {
3482      /* Scalar partly in registers.  */
3483
3484      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3485      int i;
3486      int not_stack;
3487      /* # words of start of argument
3488	 that we must make space for but need not store.  */
3489      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3490      int args_offset = INTVAL (args_so_far);
3491      int skip;
3492
3493      /* Push padding now if padding above and stack grows down,
3494	 or if padding below and stack grows up.
3495	 But if space already allocated, this has already been done.  */
3496      if (extra && args_addr == 0
3497	  && where_pad != none && where_pad != stack_direction)
3498	anti_adjust_stack (GEN_INT (extra));
3499
3500      /* If we make space by pushing it, we might as well push
3501	 the real data.  Otherwise, we can leave OFFSET nonzero
3502	 and leave the space uninitialized.  */
3503      if (args_addr == 0)
3504	offset = 0;
3505
3506      /* Now NOT_STACK gets the number of words that we don't need to
3507	 allocate on the stack.  */
3508      not_stack = partial - offset;
3509
3510      /* If the partial register-part of the arg counts in its stack size,
3511	 skip the part of stack space corresponding to the registers.
3512	 Otherwise, start copying to the beginning of the stack space,
3513	 by setting SKIP to 0.  */
3514      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3515
3516      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3517	x = validize_mem (force_const_mem (mode, x));
3518
3519      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3520	 SUBREGs of such registers are not allowed.  */
3521      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3522	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3523	x = copy_to_reg (x);
3524
3525      /* Loop over all the words allocated on the stack for this arg.  */
3526      /* We can do it by words, because any scalar bigger than a word
3527	 has a size a multiple of a word.  */
3528#ifndef PUSH_ARGS_REVERSED
3529      for (i = not_stack; i < size; i++)
3530#else
3531      for (i = size - 1; i >= not_stack; i--)
3532#endif
3533	if (i >= not_stack + offset)
3534	  emit_push_insn (operand_subword_force (x, i, mode),
3535			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3536			  0, args_addr,
3537			  GEN_INT (args_offset + ((i - not_stack + skip)
3538						  * UNITS_PER_WORD)),
3539			  reg_parm_stack_space, alignment_pad);
3540    }
3541  else
3542    {
3543      rtx addr;
3544      rtx target = NULL_RTX;
3545      rtx dest;
3546
3547      /* Push padding now if padding above and stack grows down,
3548	 or if padding below and stack grows up.
3549	 But if space already allocated, this has already been done.  */
3550      if (extra && args_addr == 0
3551	  && where_pad != none && where_pad != stack_direction)
3552	anti_adjust_stack (GEN_INT (extra));
3553
3554#ifdef PUSH_ROUNDING
3555      if (args_addr == 0 && PUSH_ARGS)
3556	emit_single_push_insn (mode, x, type);
3557      else
3558#endif
3559	{
3560	  if (GET_CODE (args_so_far) == CONST_INT)
3561	    addr
3562	      = memory_address (mode,
3563				plus_constant (args_addr,
3564					       INTVAL (args_so_far)));
3565	  else
3566	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3567						       args_so_far));
3568	  target = addr;
3569	  dest = gen_rtx_MEM (mode, addr);
3570	  if (type != 0)
3571	    {
3572	      set_mem_attributes (dest, type, 1);
3573	      /* Function incoming arguments may overlap with sibling call
3574		 outgoing arguments and we cannot allow reordering of reads
3575		 from function arguments with stores to outgoing arguments
3576		 of sibling calls.  */
3577	      set_mem_alias_set (dest, 0);
3578	    }
3579
3580	  emit_move_insn (dest, x);
3581	}
3582
3583    }
3584
3585 ret:
3586  /* If part should go in registers, copy that part
3587     into the appropriate registers.  Do this now, at the end,
3588     since mem-to-mem copies above may do function calls.  */
3589  if (partial > 0 && reg != 0)
3590    {
3591      /* Handle calls that pass values in multiple non-contiguous locations.
3592	 The Irix 6 ABI has examples of this.  */
3593      if (GET_CODE (reg) == PARALLEL)
3594	emit_group_load (reg, x, -1);  /* ??? size? */
3595      else
3596	move_block_to_reg (REGNO (reg), x, partial, mode);
3597    }
3598
3599  if (extra && args_addr == 0 && where_pad == stack_direction)
3600    anti_adjust_stack (GEN_INT (extra));
3601
3602  if (alignment_pad && args_addr == 0)
3603    anti_adjust_stack (alignment_pad);
3604}
3605
3606/* Return X if X can be used as a subtarget in a sequence of arithmetic
3607   operations.  */
3608
3609static rtx
3610get_subtarget (x)
3611     rtx x;
3612{
3613  return ((x == 0
3614	   /* Only registers can be subtargets.  */
3615	   || GET_CODE (x) != REG
3616	   /* If the register is readonly, it can't be set more than once.  */
3617	   || RTX_UNCHANGING_P (x)
3618	   /* Don't use hard regs to avoid extending their life.  */
3619	   || REGNO (x) < FIRST_PSEUDO_REGISTER
3620	   /* Avoid subtargets inside loops,
3621	      since they hide some invariant expressions.  */
3622	   || preserve_subexpressions_p ())
3623	  ? 0 : x);
3624}
3625
3626/* Expand an assignment that stores the value of FROM into TO.
3627   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3628   (This may contain a QUEUED rtx;
3629   if the value is constant, this rtx is a constant.)
3630   Otherwise, the returned value is NULL_RTX.
3631
3632   SUGGEST_REG is no longer actually used.
3633   It used to mean, copy the value through a register
3634   and return that register, if that is possible.
3635   We now use WANT_VALUE to decide whether to do this.  */
3636
3637rtx
3638expand_assignment (to, from, want_value, suggest_reg)
3639     tree to, from;
3640     int want_value;
3641     int suggest_reg ATTRIBUTE_UNUSED;
3642{
3643  rtx to_rtx = 0;
3644  rtx result;
3645
3646  /* Don't crash if the lhs of the assignment was erroneous.  */
3647
3648  if (TREE_CODE (to) == ERROR_MARK)
3649    {
3650      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3651      return want_value ? result : NULL_RTX;
3652    }
3653
3654  /* Assignment of a structure component needs special treatment
3655     if the structure component's rtx is not simply a MEM.
3656     Assignment of an array element at a constant index, and assignment of
3657     an array element in an unaligned packed structure field, has the same
3658     problem.  */
3659
3660  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3661      || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3662    {
3663      enum machine_mode mode1;
3664      HOST_WIDE_INT bitsize, bitpos;
3665      rtx orig_to_rtx;
3666      tree offset;
3667      int unsignedp;
3668      int volatilep = 0;
3669      tree tem;
3670
3671      push_temp_slots ();
3672      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3673				 &unsignedp, &volatilep);
3674
3675      /* If we are going to use store_bit_field and extract_bit_field,
3676	 make sure to_rtx will be safe for multiple use.  */
3677
3678      if (mode1 == VOIDmode && want_value)
3679	tem = stabilize_reference (tem);
3680
3681      orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3682
3683      if (offset != 0)
3684	{
3685	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3686
3687	  if (GET_CODE (to_rtx) != MEM)
3688	    abort ();
3689
3690#ifdef POINTERS_EXTEND_UNSIGNED
3691	  if (GET_MODE (offset_rtx) != Pmode)
3692	    offset_rtx = convert_memory_address (Pmode, offset_rtx);
3693#else
3694	  if (GET_MODE (offset_rtx) != ptr_mode)
3695	    offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3696#endif
3697
3698	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3699	     to call force_reg for that case.  Avoid that case.  */
3700	  if (GET_CODE (to_rtx) == MEM
3701	      && GET_MODE (to_rtx) == BLKmode
3702	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3703	      && bitsize > 0
3704	      && (bitpos % bitsize) == 0
3705	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3706	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3707	    {
3708	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3709	      bitpos = 0;
3710	    }
3711
3712	  to_rtx = offset_address (to_rtx, offset_rtx,
3713				   highest_pow2_factor_for_type (TREE_TYPE (to),
3714								 offset));
3715	}
3716
3717      if (GET_CODE (to_rtx) == MEM)
3718	{
3719	  tree old_expr = MEM_EXPR (to_rtx);
3720
3721	  /* If the field is at offset zero, we could have been given the
3722	     DECL_RTX of the parent struct.  Don't munge it.  */
3723	  to_rtx = shallow_copy_rtx (to_rtx);
3724
3725	  set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3726	}
3727
3728      /* Deal with volatile and readonly fields.  The former is only done
3729	 for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
3730      if (volatilep && GET_CODE (to_rtx) == MEM)
3731	{
3732	  if (to_rtx == orig_to_rtx)
3733	    to_rtx = copy_rtx (to_rtx);
3734	  MEM_VOLATILE_P (to_rtx) = 1;
3735	}
3736
3737      if (TREE_CODE (to) == COMPONENT_REF
3738	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3739	{
3740	  if (to_rtx == orig_to_rtx)
3741	    to_rtx = copy_rtx (to_rtx);
3742	  RTX_UNCHANGING_P (to_rtx) = 1;
3743	}
3744
3745      if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3746	{
3747	  if (to_rtx == orig_to_rtx)
3748	    to_rtx = copy_rtx (to_rtx);
3749	  MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3750	}
3751
3752      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3753			    (want_value
3754			     /* Spurious cast for HPUX compiler.  */
3755			     ? ((enum machine_mode)
3756				TYPE_MODE (TREE_TYPE (to)))
3757			     : VOIDmode),
3758			    unsignedp, TREE_TYPE (tem), get_alias_set (to));
3759
3760      preserve_temp_slots (result);
3761      free_temp_slots ();
3762      pop_temp_slots ();
3763
3764      /* If the value is meaningful, convert RESULT to the proper mode.
3765	 Otherwise, return nothing.  */
3766      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3767					  TYPE_MODE (TREE_TYPE (from)),
3768					  result,
3769					  TREE_UNSIGNED (TREE_TYPE (to)))
3770	      : NULL_RTX);
3771    }
3772
3773  /* If the rhs is a function call and its value is not an aggregate,
3774     call the function before we start to compute the lhs.
3775     This is needed for correct code for cases such as
3776     val = setjmp (buf) on machines where reference to val
3777     requires loading up part of an address in a separate insn.
3778
3779     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3780     since it might be a promoted variable where the zero- or sign- extension
3781     needs to be done.  Handling this in the normal way is safe because no
3782     computation is done before the call.  */
3783  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3784      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3785      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3786	    && GET_CODE (DECL_RTL (to)) == REG))
3787    {
3788      rtx value;
3789
3790      push_temp_slots ();
3791      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3792      if (to_rtx == 0)
3793	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3794
3795      /* Handle calls that return values in multiple non-contiguous locations.
3796	 The Irix 6 ABI has examples of this.  */
3797      if (GET_CODE (to_rtx) == PARALLEL)
3798	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3799      else if (GET_MODE (to_rtx) == BLKmode)
3800	emit_block_move (to_rtx, value, expr_size (from));
3801      else
3802	{
3803#ifdef POINTERS_EXTEND_UNSIGNED
3804	  if (POINTER_TYPE_P (TREE_TYPE (to))
3805	      && GET_MODE (to_rtx) != GET_MODE (value))
3806	    value = convert_memory_address (GET_MODE (to_rtx), value);
3807#endif
3808	  emit_move_insn (to_rtx, value);
3809	}
3810      preserve_temp_slots (to_rtx);
3811      free_temp_slots ();
3812      pop_temp_slots ();
3813      return want_value ? to_rtx : NULL_RTX;
3814    }
3815
3816  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3817     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3818
3819  if (to_rtx == 0)
3820    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3821
3822  /* Don't move directly into a return register.  */
3823  if (TREE_CODE (to) == RESULT_DECL
3824      && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3825    {
3826      rtx temp;
3827
3828      push_temp_slots ();
3829      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3830
3831      if (GET_CODE (to_rtx) == PARALLEL)
3832	emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3833      else
3834	emit_move_insn (to_rtx, temp);
3835
3836      preserve_temp_slots (to_rtx);
3837      free_temp_slots ();
3838      pop_temp_slots ();
3839      return want_value ? to_rtx : NULL_RTX;
3840    }
3841
3842  /* In case we are returning the contents of an object which overlaps
3843     the place the value is being stored, use a safe function when copying
3844     a value through a pointer into a structure value return block.  */
3845  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3846      && current_function_returns_struct
3847      && !current_function_returns_pcc_struct)
3848    {
3849      rtx from_rtx, size;
3850
3851      push_temp_slots ();
3852      size = expr_size (from);
3853      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3854
3855#ifdef TARGET_MEM_FUNCTIONS
3856      emit_library_call (memmove_libfunc, LCT_NORMAL,
3857			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3858			 XEXP (from_rtx, 0), Pmode,
3859			 convert_to_mode (TYPE_MODE (sizetype),
3860					  size, TREE_UNSIGNED (sizetype)),
3861			 TYPE_MODE (sizetype));
3862#else
3863      emit_library_call (bcopy_libfunc, LCT_NORMAL,
3864			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3865			 XEXP (to_rtx, 0), Pmode,
3866			 convert_to_mode (TYPE_MODE (integer_type_node),
3867					  size, TREE_UNSIGNED (integer_type_node)),
3868			 TYPE_MODE (integer_type_node));
3869#endif
3870
3871      preserve_temp_slots (to_rtx);
3872      free_temp_slots ();
3873      pop_temp_slots ();
3874      return want_value ? to_rtx : NULL_RTX;
3875    }
3876
3877  /* Compute FROM and store the value in the rtx we got.  */
3878
3879  push_temp_slots ();
3880  result = store_expr (from, to_rtx, want_value);
3881  preserve_temp_slots (result);
3882  free_temp_slots ();
3883  pop_temp_slots ();
3884  return want_value ? result : NULL_RTX;
3885}
3886
3887/* Generate code for computing expression EXP,
3888   and storing the value into TARGET.
3889   TARGET may contain a QUEUED rtx.
3890
3891   If WANT_VALUE is nonzero, return a copy of the value
3892   not in TARGET, so that we can be sure to use the proper
3893   value in a containing expression even if TARGET has something
3894   else stored in it.  If possible, we copy the value through a pseudo
3895   and return that pseudo.  Or, if the value is constant, we try to
3896   return the constant.  In some cases, we return a pseudo
3897   copied *from* TARGET.
3898
3899   If the mode is BLKmode then we may return TARGET itself.
3900   It turns out that in BLKmode it doesn't cause a problem.
3901   because C has no operators that could combine two different
3902   assignments into the same BLKmode object with different values
3903   with no sequence point.  Will other languages need this to
3904   be more thorough?
3905
3906   If WANT_VALUE is 0, we return NULL, to make sure
3907   to catch quickly any cases where the caller uses the value
3908   and fails to set WANT_VALUE.  */
3909
3910rtx
3911store_expr (exp, target, want_value)
3912     tree exp;
3913     rtx target;
3914     int want_value;
3915{
3916  rtx temp;
3917  int dont_return_target = 0;
3918  int dont_store_target = 0;
3919
3920  if (TREE_CODE (exp) == COMPOUND_EXPR)
3921    {
3922      /* Perform first part of compound expression, then assign from second
3923	 part.  */
3924      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3925      emit_queue ();
3926      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3927    }
3928  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3929    {
3930      /* For conditional expression, get safe form of the target.  Then
3931	 test the condition, doing the appropriate assignment on either
3932	 side.  This avoids the creation of unnecessary temporaries.
3933	 For non-BLKmode, it is more efficient not to do this.  */
3934
3935      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3936
3937      emit_queue ();
3938      target = protect_from_queue (target, 1);
3939
3940      do_pending_stack_adjust ();
3941      NO_DEFER_POP;
3942      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3943      start_cleanup_deferral ();
3944      store_expr (TREE_OPERAND (exp, 1), target, 0);
3945      end_cleanup_deferral ();
3946      emit_queue ();
3947      emit_jump_insn (gen_jump (lab2));
3948      emit_barrier ();
3949      emit_label (lab1);
3950      start_cleanup_deferral ();
3951      store_expr (TREE_OPERAND (exp, 2), target, 0);
3952      end_cleanup_deferral ();
3953      emit_queue ();
3954      emit_label (lab2);
3955      OK_DEFER_POP;
3956
3957      return want_value ? target : NULL_RTX;
3958    }
3959  else if (queued_subexp_p (target))
3960    /* If target contains a postincrement, let's not risk
3961       using it as the place to generate the rhs.  */
3962    {
3963      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3964	{
3965	  /* Expand EXP into a new pseudo.  */
3966	  temp = gen_reg_rtx (GET_MODE (target));
3967	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3968	}
3969      else
3970	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3971
3972      /* If target is volatile, ANSI requires accessing the value
3973	 *from* the target, if it is accessed.  So make that happen.
3974	 In no case return the target itself.  */
3975      if (! MEM_VOLATILE_P (target) && want_value)
3976	dont_return_target = 1;
3977    }
3978  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3979	   && GET_MODE (target) != BLKmode)
3980    /* If target is in memory and caller wants value in a register instead,
3981       arrange that.  Pass TARGET as target for expand_expr so that,
3982       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3983       We know expand_expr will not use the target in that case.
3984       Don't do this if TARGET is volatile because we are supposed
3985       to write it and then read it.  */
3986    {
3987      temp = expand_expr (exp, target, GET_MODE (target), 0);
3988      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3989	{
3990	  /* If TEMP is already in the desired TARGET, only copy it from
3991	     memory and don't store it there again.  */
3992	  if (temp == target
3993	      || (rtx_equal_p (temp, target)
3994		  && ! side_effects_p (temp) && ! side_effects_p (target)))
3995	    dont_store_target = 1;
3996	  temp = copy_to_reg (temp);
3997	}
3998      dont_return_target = 1;
3999    }
4000  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4001    /* If this is an scalar in a register that is stored in a wider mode
4002       than the declared mode, compute the result into its declared mode
4003       and then convert to the wider mode.  Our value is the computed
4004       expression.  */
4005    {
4006      rtx inner_target = 0;
4007
4008      /* If we don't want a value, we can do the conversion inside EXP,
4009	 which will often result in some optimizations.  Do the conversion
4010	 in two steps: first change the signedness, if needed, then
4011	 the extend.  But don't do this if the type of EXP is a subtype
4012	 of something else since then the conversion might involve
4013	 more than just converting modes.  */
4014      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4015	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
4016	{
4017	  if (TREE_UNSIGNED (TREE_TYPE (exp))
4018	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4019	    exp
4020	      = convert
4021		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4022					  TREE_TYPE (exp)),
4023		 exp);
4024
4025	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4026					SUBREG_PROMOTED_UNSIGNED_P (target)),
4027			 exp);
4028
4029	  inner_target = SUBREG_REG (target);
4030	}
4031
4032      temp = expand_expr (exp, inner_target, VOIDmode, 0);
4033
4034      /* If TEMP is a volatile MEM and we want a result value, make
4035	 the access now so it gets done only once.  Likewise if
4036	 it contains TARGET.  */
4037      if (GET_CODE (temp) == MEM && want_value
4038	  && (MEM_VOLATILE_P (temp)
4039	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4040	temp = copy_to_reg (temp);
4041
4042      /* If TEMP is a VOIDmode constant, use convert_modes to make
4043	 sure that we properly convert it.  */
4044      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4045	{
4046	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4047				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4048	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4049			        GET_MODE (target), temp,
4050			        SUBREG_PROMOTED_UNSIGNED_P (target));
4051	}
4052
4053      convert_move (SUBREG_REG (target), temp,
4054		    SUBREG_PROMOTED_UNSIGNED_P (target));
4055
4056      /* If we promoted a constant, change the mode back down to match
4057	 target.  Otherwise, the caller might get confused by a result whose
4058	 mode is larger than expected.  */
4059
4060      if (want_value && GET_MODE (temp) != GET_MODE (target))
4061	{
4062	  if (GET_MODE (temp) != VOIDmode)
4063	    {
4064	      temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4065	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4066	      SUBREG_PROMOTED_UNSIGNED_P (temp)
4067		= SUBREG_PROMOTED_UNSIGNED_P (target);
4068	    }
4069	  else
4070	    temp = convert_modes (GET_MODE (target),
4071				  GET_MODE (SUBREG_REG (target)),
4072				  temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4073	}
4074
4075      return want_value ? temp : NULL_RTX;
4076    }
4077  else
4078    {
4079      temp = expand_expr (exp, target, GET_MODE (target), 0);
4080      /* Return TARGET if it's a specified hardware register.
4081	 If TARGET is a volatile mem ref, either return TARGET
4082	 or return a reg copied *from* TARGET; ANSI requires this.
4083
4084	 Otherwise, if TEMP is not TARGET, return TEMP
4085	 if it is constant (for efficiency),
4086	 or if we really want the correct value.  */
4087      if (!(target && GET_CODE (target) == REG
4088	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
4089	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4090	  && ! rtx_equal_p (temp, target)
4091	  && (CONSTANT_P (temp) || want_value))
4092	dont_return_target = 1;
4093    }
4094
4095  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4096     the same as that of TARGET, adjust the constant.  This is needed, for
4097     example, in case it is a CONST_DOUBLE and we want only a word-sized
4098     value.  */
4099  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4100      && TREE_CODE (exp) != ERROR_MARK
4101      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4102    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4103			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4104
4105  /* If value was not generated in the target, store it there.
4106     Convert the value to TARGET's type first if necessary.
4107     If TEMP and TARGET compare equal according to rtx_equal_p, but
4108     one or both of them are volatile memory refs, we have to distinguish
4109     two cases:
4110     - expand_expr has used TARGET.  In this case, we must not generate
4111       another copy.  This can be detected by TARGET being equal according
4112       to == .
4113     - expand_expr has not used TARGET - that means that the source just
4114       happens to have the same RTX form.  Since temp will have been created
4115       by expand_expr, it will compare unequal according to == .
4116       We must generate a copy in this case, to reach the correct number
4117       of volatile memory references.  */
4118
4119  if ((! rtx_equal_p (temp, target)
4120       || (temp != target && (side_effects_p (temp)
4121			      || side_effects_p (target))))
4122      && TREE_CODE (exp) != ERROR_MARK
4123      && ! dont_store_target
4124	 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4125	    but TARGET is not valid memory reference, TEMP will differ
4126	    from TARGET although it is really the same location.  */
4127      && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4128	  || target != DECL_RTL_IF_SET (exp)))
4129    {
4130      target = protect_from_queue (target, 1);
4131      if (GET_MODE (temp) != GET_MODE (target)
4132	  && GET_MODE (temp) != VOIDmode)
4133	{
4134	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4135	  if (dont_return_target)
4136	    {
4137	      /* In this case, we will return TEMP,
4138		 so make sure it has the proper mode.
4139		 But don't forget to store the value into TARGET.  */
4140	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4141	      emit_move_insn (target, temp);
4142	    }
4143	  else
4144	    convert_move (target, temp, unsignedp);
4145	}
4146
4147      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4148	{
4149	  /* Handle copying a string constant into an array.  The string
4150	     constant may be shorter than the array.  So copy just the string's
4151	     actual length, and clear the rest.  First get the size of the data
4152	     type of the string, which is actually the size of the target.  */
4153	  rtx size = expr_size (exp);
4154
4155	  if (GET_CODE (size) == CONST_INT
4156	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4157	    emit_block_move (target, temp, size);
4158	  else
4159	    {
4160	      /* Compute the size of the data to copy from the string.  */
4161	      tree copy_size
4162		= size_binop (MIN_EXPR,
4163			      make_tree (sizetype, size),
4164			      size_int (TREE_STRING_LENGTH (exp)));
4165	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4166					       VOIDmode, 0);
4167	      rtx label = 0;
4168
4169	      /* Copy that much.  */
4170	      copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4171	      emit_block_move (target, temp, copy_size_rtx);
4172
4173	      /* Figure out how much is left in TARGET that we have to clear.
4174		 Do all calculations in ptr_mode.  */
4175	      if (GET_CODE (copy_size_rtx) == CONST_INT)
4176		{
4177		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4178		  target = adjust_address (target, BLKmode,
4179					   INTVAL (copy_size_rtx));
4180		}
4181	      else
4182		{
4183		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4184				       copy_size_rtx, NULL_RTX, 0,
4185				       OPTAB_LIB_WIDEN);
4186
4187#ifdef POINTERS_EXTEND_UNSIGNED
4188		  if (GET_MODE (copy_size_rtx) != Pmode)
4189		    copy_size_rtx = convert_memory_address (Pmode,
4190							    copy_size_rtx);
4191#endif
4192
4193		  target = offset_address (target, copy_size_rtx,
4194					   highest_pow2_factor (copy_size));
4195		  label = gen_label_rtx ();
4196		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4197					   GET_MODE (size), 0, label);
4198		}
4199
4200	      if (size != const0_rtx)
4201		clear_storage (target, size);
4202
4203	      if (label)
4204		emit_label (label);
4205	    }
4206	}
4207      /* Handle calls that return values in multiple non-contiguous locations.
4208	 The Irix 6 ABI has examples of this.  */
4209      else if (GET_CODE (target) == PARALLEL)
4210	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4211      else if (GET_MODE (temp) == BLKmode)
4212	emit_block_move (target, temp, expr_size (exp));
4213      else
4214	emit_move_insn (target, temp);
4215    }
4216
4217  /* If we don't want a value, return NULL_RTX.  */
4218  if (! want_value)
4219    return NULL_RTX;
4220
4221  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4222     ??? The latter test doesn't seem to make sense.  */
4223  else if (dont_return_target && GET_CODE (temp) != MEM)
4224    return temp;
4225
4226  /* Return TARGET itself if it is a hard register.  */
4227  else if (want_value && GET_MODE (target) != BLKmode
4228	   && ! (GET_CODE (target) == REG
4229		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4230    return copy_to_reg (target);
4231
4232  else
4233    return target;
4234}
4235
4236/* Return 1 if EXP just contains zeros.  */
4237
4238static int
4239is_zeros_p (exp)
4240     tree exp;
4241{
4242  tree elt;
4243
4244  switch (TREE_CODE (exp))
4245    {
4246    case CONVERT_EXPR:
4247    case NOP_EXPR:
4248    case NON_LVALUE_EXPR:
4249    case VIEW_CONVERT_EXPR:
4250      return is_zeros_p (TREE_OPERAND (exp, 0));
4251
4252    case INTEGER_CST:
4253      return integer_zerop (exp);
4254
4255    case COMPLEX_CST:
4256      return
4257	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4258
4259    case REAL_CST:
4260      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4261
4262    case VECTOR_CST:
4263      for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4264	   elt = TREE_CHAIN (elt))
4265	if (!is_zeros_p (TREE_VALUE (elt)))
4266	  return 0;
4267
4268      return 1;
4269
4270    case CONSTRUCTOR:
4271      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4272	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4273      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4274	if (! is_zeros_p (TREE_VALUE (elt)))
4275	  return 0;
4276
4277      return 1;
4278
4279    default:
4280      return 0;
4281    }
4282}
4283
4284/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4285
4286static int
4287mostly_zeros_p (exp)
4288     tree exp;
4289{
4290  if (TREE_CODE (exp) == CONSTRUCTOR)
4291    {
4292      int elts = 0, zeros = 0;
4293      tree elt = CONSTRUCTOR_ELTS (exp);
4294      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4295	{
4296	  /* If there are no ranges of true bits, it is all zero.  */
4297	  return elt == NULL_TREE;
4298	}
4299      for (; elt; elt = TREE_CHAIN (elt))
4300	{
4301	  /* We do not handle the case where the index is a RANGE_EXPR,
4302	     so the statistic will be somewhat inaccurate.
4303	     We do make a more accurate count in store_constructor itself,
4304	     so since this function is only used for nested array elements,
4305	     this should be close enough.  */
4306	  if (mostly_zeros_p (TREE_VALUE (elt)))
4307	    zeros++;
4308	  elts++;
4309	}
4310
4311      return 4 * zeros >= 3 * elts;
4312    }
4313
4314  return is_zeros_p (exp);
4315}
4316
4317/* Helper function for store_constructor.
4318   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4319   TYPE is the type of the CONSTRUCTOR, not the element type.
4320   CLEARED is as for store_constructor.
4321   ALIAS_SET is the alias set to use for any stores.
4322
4323   This provides a recursive shortcut back to store_constructor when it isn't
4324   necessary to go through store_field.  This is so that we can pass through
4325   the cleared field to let store_constructor know that we may not have to
4326   clear a substructure if the outer structure has already been cleared.  */
4327
4328static void
4329store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4330			 alias_set)
4331     rtx target;
4332     unsigned HOST_WIDE_INT bitsize;
4333     HOST_WIDE_INT bitpos;
4334     enum machine_mode mode;
4335     tree exp, type;
4336     int cleared;
4337     int alias_set;
4338{
4339  if (TREE_CODE (exp) == CONSTRUCTOR
4340      && bitpos % BITS_PER_UNIT == 0
4341      /* If we have a non-zero bitpos for a register target, then we just
4342	 let store_field do the bitfield handling.  This is unlikely to
4343	 generate unnecessary clear instructions anyways.  */
4344      && (bitpos == 0 || GET_CODE (target) == MEM))
4345    {
4346      if (GET_CODE (target) == MEM)
4347	target
4348	  = adjust_address (target,
4349			    GET_MODE (target) == BLKmode
4350			    || 0 != (bitpos
4351				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4352			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4353
4354
4355      /* Update the alias set, if required.  */
4356      if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4357	  && MEM_ALIAS_SET (target) != 0)
4358	{
4359	  target = copy_rtx (target);
4360	  set_mem_alias_set (target, alias_set);
4361	}
4362
4363      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4364    }
4365  else
4366    store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4367		 alias_set);
4368}
4369
4370/* Store the value of constructor EXP into the rtx TARGET.
4371   TARGET is either a REG or a MEM; we know it cannot conflict, since
4372   safe_from_p has been called.
4373   CLEARED is true if TARGET is known to have been zero'd.
4374   SIZE is the number of bytes of TARGET we are allowed to modify: this
4375   may not be the same as the size of EXP if we are assigning to a field
4376   which has been packed to exclude padding bits.  */
4377
4378static void
4379store_constructor (exp, target, cleared, size)
4380     tree exp;
4381     rtx target;
4382     int cleared;
4383     HOST_WIDE_INT size;
4384{
4385  tree type = TREE_TYPE (exp);
4386#ifdef WORD_REGISTER_OPERATIONS
4387  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4388#endif
4389
4390  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4391      || TREE_CODE (type) == QUAL_UNION_TYPE)
4392    {
4393      tree elt;
4394
4395      /* We either clear the aggregate or indicate the value is dead.  */
4396      if ((TREE_CODE (type) == UNION_TYPE
4397	   || TREE_CODE (type) == QUAL_UNION_TYPE)
4398	  && ! cleared
4399	  && ! CONSTRUCTOR_ELTS (exp))
4400	/* If the constructor is empty, clear the union.  */
4401	{
4402	  clear_storage (target, expr_size (exp));
4403	  cleared = 1;
4404	}
4405
4406      /* If we are building a static constructor into a register,
4407	 set the initial value as zero so we can fold the value into
4408	 a constant.  But if more than one register is involved,
4409	 this probably loses.  */
4410      else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4411	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4412	{
4413	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4414	  cleared = 1;
4415	}
4416
4417      /* If the constructor has fewer fields than the structure
4418	 or if we are initializing the structure to mostly zeros,
4419	 clear the whole structure first.  Don't do this if TARGET is a
4420	 register whose mode size isn't equal to SIZE since clear_storage
4421	 can't handle this case.  */
4422      else if (! cleared && size > 0
4423	       && ((list_length (CONSTRUCTOR_ELTS (exp))
4424		    != fields_length (type))
4425		   || mostly_zeros_p (exp))
4426	       && (GET_CODE (target) != REG
4427		   || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4428		       == size)))
4429	{
4430	  clear_storage (target, GEN_INT (size));
4431	  cleared = 1;
4432	}
4433
4434      if (! cleared)
4435	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4436
4437      /* Store each element of the constructor into
4438	 the corresponding field of TARGET.  */
4439
4440      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4441	{
4442	  tree field = TREE_PURPOSE (elt);
4443	  tree value = TREE_VALUE (elt);
4444	  enum machine_mode mode;
4445	  HOST_WIDE_INT bitsize;
4446	  HOST_WIDE_INT bitpos = 0;
4447	  int unsignedp;
4448	  tree offset;
4449	  rtx to_rtx = target;
4450
4451	  /* Just ignore missing fields.
4452	     We cleared the whole structure, above,
4453	     if any fields are missing.  */
4454	  if (field == 0)
4455	    continue;
4456
4457	  if (cleared && is_zeros_p (value))
4458	    continue;
4459
4460	  if (host_integerp (DECL_SIZE (field), 1))
4461	    bitsize = tree_low_cst (DECL_SIZE (field), 1);
4462	  else
4463	    bitsize = -1;
4464
4465	  unsignedp = TREE_UNSIGNED (field);
4466	  mode = DECL_MODE (field);
4467	  if (DECL_BIT_FIELD (field))
4468	    mode = VOIDmode;
4469
4470	  offset = DECL_FIELD_OFFSET (field);
4471	  if (host_integerp (offset, 0)
4472	      && host_integerp (bit_position (field), 0))
4473	    {
4474	      bitpos = int_bit_position (field);
4475	      offset = 0;
4476	    }
4477	  else
4478	    bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4479
4480	  if (offset)
4481	    {
4482	      rtx offset_rtx;
4483
4484	      if (contains_placeholder_p (offset))
4485		offset = build (WITH_RECORD_EXPR, sizetype,
4486				offset, make_tree (TREE_TYPE (exp), target));
4487
4488	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4489	      if (GET_CODE (to_rtx) != MEM)
4490		abort ();
4491
4492#ifdef POINTERS_EXTEND_UNSIGNED
4493	      if (GET_MODE (offset_rtx) != Pmode)
4494		offset_rtx = convert_memory_address (Pmode, offset_rtx);
4495#else
4496	      if (GET_MODE (offset_rtx) != ptr_mode)
4497		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4498#endif
4499
4500	      to_rtx = offset_address (to_rtx, offset_rtx,
4501				       highest_pow2_factor (offset));
4502	    }
4503
4504	  if (TREE_READONLY (field))
4505	    {
4506	      if (GET_CODE (to_rtx) == MEM)
4507		to_rtx = copy_rtx (to_rtx);
4508
4509	      RTX_UNCHANGING_P (to_rtx) = 1;
4510	    }
4511
4512#ifdef WORD_REGISTER_OPERATIONS
4513	  /* If this initializes a field that is smaller than a word, at the
4514	     start of a word, try to widen it to a full word.
4515	     This special case allows us to output C++ member function
4516	     initializations in a form that the optimizers can understand.  */
4517	  if (GET_CODE (target) == REG
4518	      && bitsize < BITS_PER_WORD
4519	      && bitpos % BITS_PER_WORD == 0
4520	      && GET_MODE_CLASS (mode) == MODE_INT
4521	      && TREE_CODE (value) == INTEGER_CST
4522	      && exp_size >= 0
4523	      && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4524	    {
4525	      tree type = TREE_TYPE (value);
4526
4527	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4528		{
4529		  type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4530		  value = convert (type, value);
4531		}
4532
4533	      if (BYTES_BIG_ENDIAN)
4534		value
4535		  = fold (build (LSHIFT_EXPR, type, value,
4536				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4537	      bitsize = BITS_PER_WORD;
4538	      mode = word_mode;
4539	    }
4540#endif
4541
4542	  if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4543	      && DECL_NONADDRESSABLE_P (field))
4544	    {
4545	      to_rtx = copy_rtx (to_rtx);
4546	      MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4547	    }
4548
4549	  store_constructor_field (to_rtx, bitsize, bitpos, mode,
4550				   value, type, cleared,
4551				   get_alias_set (TREE_TYPE (field)));
4552	}
4553    }
4554  else if (TREE_CODE (type) == ARRAY_TYPE
4555	   || TREE_CODE (type) == VECTOR_TYPE)
4556    {
4557      tree elt;
4558      int i;
4559      int need_to_clear;
4560      tree domain = TYPE_DOMAIN (type);
4561      tree elttype = TREE_TYPE (type);
4562      int const_bounds_p;
4563      HOST_WIDE_INT minelt = 0;
4564      HOST_WIDE_INT maxelt = 0;
4565
4566      /* Vectors are like arrays, but the domain is stored via an array
4567	 type indirectly.  */
4568      if (TREE_CODE (type) == VECTOR_TYPE)
4569	{
4570	  /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4571	     the same field as TYPE_DOMAIN, we are not guaranteed that
4572	     it always will.  */
4573	  domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4574	  domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4575	}
4576
4577      const_bounds_p = (TYPE_MIN_VALUE (domain)
4578			&& TYPE_MAX_VALUE (domain)
4579			&& host_integerp (TYPE_MIN_VALUE (domain), 0)
4580			&& host_integerp (TYPE_MAX_VALUE (domain), 0));
4581
4582      /* If we have constant bounds for the range of the type, get them.  */
4583      if (const_bounds_p)
4584	{
4585	  minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4586	  maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4587	}
4588
4589      /* If the constructor has fewer elements than the array,
4590         clear the whole array first.  Similarly if this is
4591         static constructor of a non-BLKmode object.  */
4592      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4593	need_to_clear = 1;
4594      else
4595	{
4596	  HOST_WIDE_INT count = 0, zero_count = 0;
4597	  need_to_clear = ! const_bounds_p;
4598
4599	  /* This loop is a more accurate version of the loop in
4600	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4601	     It is also needed to check for missing elements.  */
4602	  for (elt = CONSTRUCTOR_ELTS (exp);
4603	       elt != NULL_TREE && ! need_to_clear;
4604	       elt = TREE_CHAIN (elt))
4605	    {
4606	      tree index = TREE_PURPOSE (elt);
4607	      HOST_WIDE_INT this_node_count;
4608
4609	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4610		{
4611		  tree lo_index = TREE_OPERAND (index, 0);
4612		  tree hi_index = TREE_OPERAND (index, 1);
4613
4614		  if (! host_integerp (lo_index, 1)
4615		      || ! host_integerp (hi_index, 1))
4616		    {
4617		      need_to_clear = 1;
4618		      break;
4619		    }
4620
4621		  this_node_count = (tree_low_cst (hi_index, 1)
4622				     - tree_low_cst (lo_index, 1) + 1);
4623		}
4624	      else
4625		this_node_count = 1;
4626
4627	      count += this_node_count;
4628	      if (mostly_zeros_p (TREE_VALUE (elt)))
4629		zero_count += this_node_count;
4630	    }
4631
4632	  /* Clear the entire array first if there are any missing elements,
4633	     or if the incidence of zero elements is >= 75%.  */
4634	  if (! need_to_clear
4635	      && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4636	    need_to_clear = 1;
4637	}
4638
4639      if (need_to_clear && size > 0)
4640	{
4641	  if (! cleared)
4642	    {
4643	      if (REG_P (target))
4644		emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
4645	      else
4646		clear_storage (target, GEN_INT (size));
4647	    }
4648	  cleared = 1;
4649	}
4650      else if (REG_P (target))
4651	/* Inform later passes that the old value is dead.  */
4652	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4653
4654      /* Store each element of the constructor into
4655	 the corresponding element of TARGET, determined
4656	 by counting the elements.  */
4657      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4658	   elt;
4659	   elt = TREE_CHAIN (elt), i++)
4660	{
4661	  enum machine_mode mode;
4662	  HOST_WIDE_INT bitsize;
4663	  HOST_WIDE_INT bitpos;
4664	  int unsignedp;
4665	  tree value = TREE_VALUE (elt);
4666	  tree index = TREE_PURPOSE (elt);
4667	  rtx xtarget = target;
4668
4669	  if (cleared && is_zeros_p (value))
4670	    continue;
4671
4672	  unsignedp = TREE_UNSIGNED (elttype);
4673	  mode = TYPE_MODE (elttype);
4674	  if (mode == BLKmode)
4675	    bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4676		       ? tree_low_cst (TYPE_SIZE (elttype), 1)
4677		       : -1);
4678	  else
4679	    bitsize = GET_MODE_BITSIZE (mode);
4680
4681	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4682	    {
4683	      tree lo_index = TREE_OPERAND (index, 0);
4684	      tree hi_index = TREE_OPERAND (index, 1);
4685	      rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4686	      struct nesting *loop;
4687	      HOST_WIDE_INT lo, hi, count;
4688	      tree position;
4689
4690	      /* If the range is constant and "small", unroll the loop.  */
4691	      if (const_bounds_p
4692		  && host_integerp (lo_index, 0)
4693		  && host_integerp (hi_index, 0)
4694		  && (lo = tree_low_cst (lo_index, 0),
4695		      hi = tree_low_cst (hi_index, 0),
4696		      count = hi - lo + 1,
4697		      (GET_CODE (target) != MEM
4698		       || count <= 2
4699		       || (host_integerp (TYPE_SIZE (elttype), 1)
4700			   && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4701			       <= 40 * 8)))))
4702		{
4703		  lo -= minelt;  hi -= minelt;
4704		  for (; lo <= hi; lo++)
4705		    {
4706		      bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4707
4708		      if (GET_CODE (target) == MEM
4709			  && !MEM_KEEP_ALIAS_SET_P (target)
4710			  && TREE_CODE (type) == ARRAY_TYPE
4711			  && TYPE_NONALIASED_COMPONENT (type))
4712			{
4713			  target = copy_rtx (target);
4714			  MEM_KEEP_ALIAS_SET_P (target) = 1;
4715			}
4716
4717		      store_constructor_field
4718			(target, bitsize, bitpos, mode, value, type, cleared,
4719			 get_alias_set (elttype));
4720		    }
4721		}
4722	      else
4723		{
4724		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4725		  loop_top = gen_label_rtx ();
4726		  loop_end = gen_label_rtx ();
4727
4728		  unsignedp = TREE_UNSIGNED (domain);
4729
4730		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4731
4732		  index_r
4733		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4734						 &unsignedp, 0));
4735		  SET_DECL_RTL (index, index_r);
4736		  if (TREE_CODE (value) == SAVE_EXPR
4737		      && SAVE_EXPR_RTL (value) == 0)
4738		    {
4739		      /* Make sure value gets expanded once before the
4740                         loop.  */
4741		      expand_expr (value, const0_rtx, VOIDmode, 0);
4742		      emit_queue ();
4743		    }
4744		  store_expr (lo_index, index_r, 0);
4745		  loop = expand_start_loop (0);
4746
4747		  /* Assign value to element index.  */
4748		  position
4749		    = convert (ssizetype,
4750			       fold (build (MINUS_EXPR, TREE_TYPE (index),
4751					    index, TYPE_MIN_VALUE (domain))));
4752		  position = size_binop (MULT_EXPR, position,
4753					 convert (ssizetype,
4754						  TYPE_SIZE_UNIT (elttype)));
4755
4756		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4757		  xtarget = offset_address (target, pos_rtx,
4758					    highest_pow2_factor (position));
4759		  xtarget = adjust_address (xtarget, mode, 0);
4760		  if (TREE_CODE (value) == CONSTRUCTOR)
4761		    store_constructor (value, xtarget, cleared,
4762				       bitsize / BITS_PER_UNIT);
4763		  else
4764		    store_expr (value, xtarget, 0);
4765
4766		  expand_exit_loop_if_false (loop,
4767					     build (LT_EXPR, integer_type_node,
4768						    index, hi_index));
4769
4770		  expand_increment (build (PREINCREMENT_EXPR,
4771					   TREE_TYPE (index),
4772					   index, integer_one_node), 0, 0);
4773		  expand_end_loop ();
4774		  emit_label (loop_end);
4775		}
4776	    }
4777	  else if ((index != 0 && ! host_integerp (index, 0))
4778		   || ! host_integerp (TYPE_SIZE (elttype), 1))
4779	    {
4780	      tree position;
4781
4782	      if (index == 0)
4783		index = ssize_int (1);
4784
4785	      if (minelt)
4786		index = convert (ssizetype,
4787				 fold (build (MINUS_EXPR, index,
4788					      TYPE_MIN_VALUE (domain))));
4789
4790	      position = size_binop (MULT_EXPR, index,
4791				     convert (ssizetype,
4792					      TYPE_SIZE_UNIT (elttype)));
4793	      xtarget = offset_address (target,
4794					expand_expr (position, 0, VOIDmode, 0),
4795					highest_pow2_factor (position));
4796	      xtarget = adjust_address (xtarget, mode, 0);
4797	      store_expr (value, xtarget, 0);
4798	    }
4799	  else
4800	    {
4801	      if (index != 0)
4802		bitpos = ((tree_low_cst (index, 0) - minelt)
4803			  * tree_low_cst (TYPE_SIZE (elttype), 1));
4804	      else
4805		bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4806
4807	      if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4808		  && TREE_CODE (type) == ARRAY_TYPE
4809		  && TYPE_NONALIASED_COMPONENT (type))
4810		{
4811		  target = copy_rtx (target);
4812		  MEM_KEEP_ALIAS_SET_P (target) = 1;
4813		}
4814
4815	      store_constructor_field (target, bitsize, bitpos, mode, value,
4816				       type, cleared, get_alias_set (elttype));
4817
4818	    }
4819	}
4820    }
4821
4822  /* Set constructor assignments.  */
4823  else if (TREE_CODE (type) == SET_TYPE)
4824    {
4825      tree elt = CONSTRUCTOR_ELTS (exp);
4826      unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4827      tree domain = TYPE_DOMAIN (type);
4828      tree domain_min, domain_max, bitlength;
4829
4830      /* The default implementation strategy is to extract the constant
4831	 parts of the constructor, use that to initialize the target,
4832	 and then "or" in whatever non-constant ranges we need in addition.
4833
4834	 If a large set is all zero or all ones, it is
4835	 probably better to set it using memset (if available) or bzero.
4836	 Also, if a large set has just a single range, it may also be
4837	 better to first clear all the first clear the set (using
4838	 bzero/memset), and set the bits we want.  */
4839
4840      /* Check for all zeros.  */
4841      if (elt == NULL_TREE && size > 0)
4842	{
4843	  if (!cleared)
4844	    clear_storage (target, GEN_INT (size));
4845	  return;
4846	}
4847
4848      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4849      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4850      bitlength = size_binop (PLUS_EXPR,
4851			      size_diffop (domain_max, domain_min),
4852			      ssize_int (1));
4853
4854      nbits = tree_low_cst (bitlength, 1);
4855
4856      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4857	 are "complicated" (more than one range), initialize (the
4858	 constant parts) by copying from a constant.  */
4859      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4860	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4861	{
4862	  unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4863	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4864	  char *bit_buffer = (char *) alloca (nbits);
4865	  HOST_WIDE_INT word = 0;
4866	  unsigned int bit_pos = 0;
4867	  unsigned int ibit = 0;
4868	  unsigned int offset = 0;  /* In bytes from beginning of set.  */
4869
4870	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4871	  for (;;)
4872	    {
4873	      if (bit_buffer[ibit])
4874		{
4875		  if (BYTES_BIG_ENDIAN)
4876		    word |= (1 << (set_word_size - 1 - bit_pos));
4877		  else
4878		    word |= 1 << bit_pos;
4879		}
4880
4881	      bit_pos++;  ibit++;
4882	      if (bit_pos >= set_word_size || ibit == nbits)
4883		{
4884		  if (word != 0 || ! cleared)
4885		    {
4886		      rtx datum = GEN_INT (word);
4887		      rtx to_rtx;
4888
4889		      /* The assumption here is that it is safe to use
4890			 XEXP if the set is multi-word, but not if
4891			 it's single-word.  */
4892		      if (GET_CODE (target) == MEM)
4893			to_rtx = adjust_address (target, mode, offset);
4894		      else if (offset == 0)
4895			to_rtx = target;
4896		      else
4897			abort ();
4898		      emit_move_insn (to_rtx, datum);
4899		    }
4900
4901		  if (ibit == nbits)
4902		    break;
4903		  word = 0;
4904		  bit_pos = 0;
4905		  offset += set_word_size / BITS_PER_UNIT;
4906		}
4907	    }
4908	}
4909      else if (!cleared)
4910	/* Don't bother clearing storage if the set is all ones.  */
4911	if (TREE_CHAIN (elt) != NULL_TREE
4912	    || (TREE_PURPOSE (elt) == NULL_TREE
4913		? nbits != 1
4914		: ( ! host_integerp (TREE_VALUE (elt), 0)
4915		   || ! host_integerp (TREE_PURPOSE (elt), 0)
4916		   || (tree_low_cst (TREE_VALUE (elt), 0)
4917		       - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4918		       != (HOST_WIDE_INT) nbits))))
4919	  clear_storage (target, expr_size (exp));
4920
4921      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4922	{
4923	  /* Start of range of element or NULL.  */
4924	  tree startbit = TREE_PURPOSE (elt);
4925	  /* End of range of element, or element value.  */
4926	  tree endbit   = TREE_VALUE (elt);
4927#ifdef TARGET_MEM_FUNCTIONS
4928	  HOST_WIDE_INT startb, endb;
4929#endif
4930	  rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4931
4932	  bitlength_rtx = expand_expr (bitlength,
4933				       NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4934
4935	  /* Handle non-range tuple element like [ expr ].  */
4936	  if (startbit == NULL_TREE)
4937	    {
4938	      startbit = save_expr (endbit);
4939	      endbit = startbit;
4940	    }
4941
4942	  startbit = convert (sizetype, startbit);
4943	  endbit = convert (sizetype, endbit);
4944	  if (! integer_zerop (domain_min))
4945	    {
4946	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4947	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4948	    }
4949	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4950				      EXPAND_CONST_ADDRESS);
4951	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4952				    EXPAND_CONST_ADDRESS);
4953
4954	  if (REG_P (target))
4955	    {
4956	      targetx
4957		= assign_temp
4958		  ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4959					  TYPE_QUAL_CONST)),
4960		   0, 1, 1);
4961	      emit_move_insn (targetx, target);
4962	    }
4963
4964	  else if (GET_CODE (target) == MEM)
4965	    targetx = target;
4966	  else
4967	    abort ();
4968
4969#ifdef TARGET_MEM_FUNCTIONS
4970	  /* Optimization:  If startbit and endbit are
4971	     constants divisible by BITS_PER_UNIT,
4972	     call memset instead.  */
4973	  if (TREE_CODE (startbit) == INTEGER_CST
4974	      && TREE_CODE (endbit) == INTEGER_CST
4975	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4976	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4977	    {
4978	      emit_library_call (memset_libfunc, LCT_NORMAL,
4979				 VOIDmode, 3,
4980				 plus_constant (XEXP (targetx, 0),
4981						startb / BITS_PER_UNIT),
4982				 Pmode,
4983				 constm1_rtx, TYPE_MODE (integer_type_node),
4984				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4985				 TYPE_MODE (sizetype));
4986	    }
4987	  else
4988#endif
4989	    emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4990			       LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4991			       Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4992			       startbit_rtx, TYPE_MODE (sizetype),
4993			       endbit_rtx, TYPE_MODE (sizetype));
4994
4995	  if (REG_P (target))
4996	    emit_move_insn (target, targetx);
4997	}
4998    }
4999
5000  else
5001    abort ();
5002}
5003
5004/* Store the value of EXP (an expression tree)
5005   into a subfield of TARGET which has mode MODE and occupies
5006   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5007   If MODE is VOIDmode, it means that we are storing into a bit-field.
5008
5009   If VALUE_MODE is VOIDmode, return nothing in particular.
5010   UNSIGNEDP is not used in this case.
5011
5012   Otherwise, return an rtx for the value stored.  This rtx
5013   has mode VALUE_MODE if that is convenient to do.
5014   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5015
5016   TYPE is the type of the underlying object,
5017
5018   ALIAS_SET is the alias set for the destination.  This value will
5019   (in general) be different from that for TARGET, since TARGET is a
5020   reference to the containing structure.  */
5021
5022static rtx
5023store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5024	     alias_set)
5025     rtx target;
5026     HOST_WIDE_INT bitsize;
5027     HOST_WIDE_INT bitpos;
5028     enum machine_mode mode;
5029     tree exp;
5030     enum machine_mode value_mode;
5031     int unsignedp;
5032     tree type;
5033     int alias_set;
5034{
5035  HOST_WIDE_INT width_mask = 0;
5036
5037  if (TREE_CODE (exp) == ERROR_MARK)
5038    return const0_rtx;
5039
5040  /* If we have nothing to store, do nothing unless the expression has
5041     side-effects.  */
5042  if (bitsize == 0)
5043    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5044  else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5045    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5046
5047  /* If we are storing into an unaligned field of an aligned union that is
5048     in a register, we may have the mode of TARGET being an integer mode but
5049     MODE == BLKmode.  In that case, get an aligned object whose size and
5050     alignment are the same as TARGET and store TARGET into it (we can avoid
5051     the store if the field being stored is the entire width of TARGET).  Then
5052     call ourselves recursively to store the field into a BLKmode version of
5053     that object.  Finally, load from the object into TARGET.  This is not
5054     very efficient in general, but should only be slightly more expensive
5055     than the otherwise-required unaligned accesses.  Perhaps this can be
5056     cleaned up later.  */
5057
5058  if (mode == BLKmode
5059      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5060    {
5061      rtx object
5062	= assign_temp
5063	  (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5064	   0, 1, 1);
5065      rtx blk_object = adjust_address (object, BLKmode, 0);
5066
5067      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5068	emit_move_insn (object, target);
5069
5070      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5071		   alias_set);
5072
5073      emit_move_insn (target, object);
5074
5075      /* We want to return the BLKmode version of the data.  */
5076      return blk_object;
5077    }
5078
5079  if (GET_CODE (target) == CONCAT)
5080    {
5081      /* We're storing into a struct containing a single __complex.  */
5082
5083      if (bitpos != 0)
5084	abort ();
5085      return store_expr (exp, target, 0);
5086    }
5087
5088  /* If the structure is in a register or if the component
5089     is a bit field, we cannot use addressing to access it.
5090     Use bit-field techniques or SUBREG to store in it.  */
5091
5092  if (mode == VOIDmode
5093      || (mode != BLKmode && ! direct_store[(int) mode]
5094	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5095	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5096      || GET_CODE (target) == REG
5097      || GET_CODE (target) == SUBREG
5098      /* If the field isn't aligned enough to store as an ordinary memref,
5099	 store it as a bit field.  */
5100      || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5101	  && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5102	      || bitpos % GET_MODE_ALIGNMENT (mode)))
5103      /* If the RHS and field are a constant size and the size of the
5104	 RHS isn't the same size as the bitfield, we must use bitfield
5105	 operations.  */
5106      || (bitsize >= 0
5107	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5108	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5109    {
5110      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5111
5112      /* If BITSIZE is narrower than the size of the type of EXP
5113	 we will be narrowing TEMP.  Normally, what's wanted are the
5114	 low-order bits.  However, if EXP's type is a record and this is
5115	 big-endian machine, we want the upper BITSIZE bits.  */
5116      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5117	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5118	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5119	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5120			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5121				       - bitsize),
5122			     temp, 1);
5123
5124      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5125	 MODE.  */
5126      if (mode != VOIDmode && mode != BLKmode
5127	  && mode != TYPE_MODE (TREE_TYPE (exp)))
5128	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5129
5130      /* If the modes of TARGET and TEMP are both BLKmode, both
5131	 must be in memory and BITPOS must be aligned on a byte
5132	 boundary.  If so, we simply do a block copy.  */
5133      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5134	{
5135	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5136	      || bitpos % BITS_PER_UNIT != 0)
5137	    abort ();
5138
5139	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5140	  emit_block_move (target, temp,
5141			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5142				    / BITS_PER_UNIT));
5143
5144	  return value_mode == VOIDmode ? const0_rtx : target;
5145	}
5146
5147      /* Store the value in the bitfield.  */
5148      store_bit_field (target, bitsize, bitpos, mode, temp,
5149		       int_size_in_bytes (type));
5150
5151      if (value_mode != VOIDmode)
5152	{
5153	  /* The caller wants an rtx for the value.
5154	     If possible, avoid refetching from the bitfield itself.  */
5155	  if (width_mask != 0
5156	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5157	    {
5158	      tree count;
5159	      enum machine_mode tmode;
5160
5161	      tmode = GET_MODE (temp);
5162	      if (tmode == VOIDmode)
5163		tmode = value_mode;
5164
5165	      if (unsignedp)
5166		return expand_and (tmode, temp,
5167				   GEN_INT (trunc_int_for_mode (width_mask,
5168								tmode)),
5169				   NULL_RTX);
5170
5171	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5172	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5173	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5174	    }
5175
5176	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
5177				    NULL_RTX, value_mode, VOIDmode,
5178				    int_size_in_bytes (type));
5179	}
5180      return const0_rtx;
5181    }
5182  else
5183    {
5184      rtx addr = XEXP (target, 0);
5185      rtx to_rtx = target;
5186
5187      /* If a value is wanted, it must be the lhs;
5188	 so make the address stable for multiple use.  */
5189
5190      if (value_mode != VOIDmode && GET_CODE (addr) != REG
5191	  && ! CONSTANT_ADDRESS_P (addr)
5192	  /* A frame-pointer reference is already stable.  */
5193	  && ! (GET_CODE (addr) == PLUS
5194		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
5195		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
5196		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5197	to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5198
5199      /* Now build a reference to just the desired component.  */
5200
5201      to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5202
5203      if (to_rtx == target)
5204	to_rtx = copy_rtx (to_rtx);
5205
5206      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5207      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5208	set_mem_alias_set (to_rtx, alias_set);
5209
5210      return store_expr (exp, to_rtx, value_mode != VOIDmode);
5211    }
5212}
5213
5214/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5215   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5216   codes and find the ultimate containing object, which we return.
5217
5218   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5219   bit position, and *PUNSIGNEDP to the signedness of the field.
5220   If the position of the field is variable, we store a tree
5221   giving the variable offset (in units) in *POFFSET.
5222   This offset is in addition to the bit position.
5223   If the position is not variable, we store 0 in *POFFSET.
5224
5225   If any of the extraction expressions is volatile,
5226   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5227
5228   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5229   is a mode that can be used to access the field.  In that case, *PBITSIZE
5230   is redundant.
5231
5232   If the field describes a variable-sized object, *PMODE is set to
5233   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5234   this case, but the address of the object can be found.  */
5235
5236tree
5237get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5238		     punsignedp, pvolatilep)
5239     tree exp;
5240     HOST_WIDE_INT *pbitsize;
5241     HOST_WIDE_INT *pbitpos;
5242     tree *poffset;
5243     enum machine_mode *pmode;
5244     int *punsignedp;
5245     int *pvolatilep;
5246{
5247  tree size_tree = 0;
5248  enum machine_mode mode = VOIDmode;
5249  tree offset = size_zero_node;
5250  tree bit_offset = bitsize_zero_node;
5251  tree placeholder_ptr = 0;
5252  tree tem;
5253
5254  /* First get the mode, signedness, and size.  We do this from just the
5255     outermost expression.  */
5256  if (TREE_CODE (exp) == COMPONENT_REF)
5257    {
5258      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5259      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5260	mode = DECL_MODE (TREE_OPERAND (exp, 1));
5261
5262      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5263    }
5264  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5265    {
5266      size_tree = TREE_OPERAND (exp, 1);
5267      *punsignedp = TREE_UNSIGNED (exp);
5268    }
5269  else
5270    {
5271      mode = TYPE_MODE (TREE_TYPE (exp));
5272      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5273
5274      if (mode == BLKmode)
5275	size_tree = TYPE_SIZE (TREE_TYPE (exp));
5276      else
5277	*pbitsize = GET_MODE_BITSIZE (mode);
5278    }
5279
5280  if (size_tree != 0)
5281    {
5282      if (! host_integerp (size_tree, 1))
5283	mode = BLKmode, *pbitsize = -1;
5284      else
5285	*pbitsize = tree_low_cst (size_tree, 1);
5286    }
5287
5288  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5289     and find the ultimate containing object.  */
5290  while (1)
5291    {
5292      if (TREE_CODE (exp) == BIT_FIELD_REF)
5293	bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5294      else if (TREE_CODE (exp) == COMPONENT_REF)
5295	{
5296	  tree field = TREE_OPERAND (exp, 1);
5297	  tree this_offset = DECL_FIELD_OFFSET (field);
5298
5299	  /* If this field hasn't been filled in yet, don't go
5300	     past it.  This should only happen when folding expressions
5301	     made during type construction.  */
5302	  if (this_offset == 0)
5303	    break;
5304	  else if (! TREE_CONSTANT (this_offset)
5305		   && contains_placeholder_p (this_offset))
5306	    this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5307
5308	  offset = size_binop (PLUS_EXPR, offset, this_offset);
5309	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
5310				   DECL_FIELD_BIT_OFFSET (field));
5311
5312	  /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5313	}
5314
5315      else if (TREE_CODE (exp) == ARRAY_REF
5316	       || TREE_CODE (exp) == ARRAY_RANGE_REF)
5317	{
5318	  tree index = TREE_OPERAND (exp, 1);
5319	  tree array = TREE_OPERAND (exp, 0);
5320	  tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5321	  tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5322	  tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5323
5324	  /* We assume all arrays have sizes that are a multiple of a byte.
5325	     First subtract the lower bound, if any, in the type of the
5326	     index, then convert to sizetype and multiply by the size of the
5327	     array element.  */
5328	  if (low_bound != 0 && ! integer_zerop (low_bound))
5329	    index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5330				 index, low_bound));
5331
5332	  /* If the index has a self-referential type, pass it to a
5333	     WITH_RECORD_EXPR; if the component size is, pass our
5334	     component to one.  */
5335	  if (! TREE_CONSTANT (index)
5336	      && contains_placeholder_p (index))
5337	    index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5338	  if (! TREE_CONSTANT (unit_size)
5339	      && contains_placeholder_p (unit_size))
5340	    unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5341
5342	  offset = size_binop (PLUS_EXPR, offset,
5343			       size_binop (MULT_EXPR,
5344					   convert (sizetype, index),
5345					   unit_size));
5346	}
5347
5348      else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5349	{
5350	  tree new = find_placeholder (exp, &placeholder_ptr);
5351
5352	  /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5353	     We might have been called from tree optimization where we
5354	     haven't set up an object yet.  */
5355	  if (new == 0)
5356	    break;
5357	  else
5358	    exp = new;
5359
5360	  continue;
5361	}
5362      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5363	       && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5364	       && ! ((TREE_CODE (exp) == NOP_EXPR
5365		      || TREE_CODE (exp) == CONVERT_EXPR)
5366		     && (TYPE_MODE (TREE_TYPE (exp))
5367			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5368	break;
5369
5370      /* If any reference in the chain is volatile, the effect is volatile.  */
5371      if (TREE_THIS_VOLATILE (exp))
5372	*pvolatilep = 1;
5373
5374      exp = TREE_OPERAND (exp, 0);
5375    }
5376
5377  /* If OFFSET is constant, see if we can return the whole thing as a
5378     constant bit position.  Otherwise, split it up.  */
5379  if (host_integerp (offset, 0)
5380      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5381				 bitsize_unit_node))
5382      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5383      && host_integerp (tem, 0))
5384    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5385  else
5386    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5387
5388  *pmode = mode;
5389  return exp;
5390}
5391
5392/* Return 1 if T is an expression that get_inner_reference handles.  */
5393
5394int
5395handled_component_p (t)
5396     tree t;
5397{
5398  switch (TREE_CODE (t))
5399    {
5400    case BIT_FIELD_REF:
5401    case COMPONENT_REF:
5402    case ARRAY_REF:
5403    case ARRAY_RANGE_REF:
5404    case NON_LVALUE_EXPR:
5405    case VIEW_CONVERT_EXPR:
5406      return 1;
5407
5408    case NOP_EXPR:
5409    case CONVERT_EXPR:
5410      return (TYPE_MODE (TREE_TYPE (t))
5411	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5412
5413    default:
5414      return 0;
5415    }
5416}
5417
5418/* Given an rtx VALUE that may contain additions and multiplications, return
5419   an equivalent value that just refers to a register, memory, or constant.
5420   This is done by generating instructions to perform the arithmetic and
5421   returning a pseudo-register containing the value.
5422
5423   The returned value may be a REG, SUBREG, MEM or constant.  */
5424
5425rtx
5426force_operand (value, target)
5427     rtx value, target;
5428{
5429  rtx op1, op2;
5430  /* Use subtarget as the target for operand 0 of a binary operation.  */
5431  rtx subtarget = get_subtarget (target);
5432  enum rtx_code code = GET_CODE (value);
5433
5434  /* Check for a PIC address load.  */
5435  if ((code == PLUS || code == MINUS)
5436      && XEXP (value, 0) == pic_offset_table_rtx
5437      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5438	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5439	  || GET_CODE (XEXP (value, 1)) == CONST))
5440    {
5441      if (!subtarget)
5442	subtarget = gen_reg_rtx (GET_MODE (value));
5443      emit_move_insn (subtarget, value);
5444      return subtarget;
5445    }
5446
5447  if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5448    {
5449      if (!target)
5450	target = gen_reg_rtx (GET_MODE (value));
5451      convert_move (target, force_operand (XEXP (value, 0), NULL),
5452		    code == ZERO_EXTEND);
5453      return target;
5454    }
5455
5456  if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5457    {
5458      op2 = XEXP (value, 1);
5459      if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5460	subtarget = 0;
5461      if (code == MINUS && GET_CODE (op2) == CONST_INT)
5462	{
5463	  code = PLUS;
5464	  op2 = negate_rtx (GET_MODE (value), op2);
5465	}
5466
5467      /* Check for an addition with OP2 a constant integer and our first
5468         operand a PLUS of a virtual register and something else.  In that
5469         case, we want to emit the sum of the virtual register and the
5470         constant first and then add the other value.  This allows virtual
5471         register instantiation to simply modify the constant rather than
5472         creating another one around this addition.  */
5473      if (code == PLUS && GET_CODE (op2) == CONST_INT
5474	  && GET_CODE (XEXP (value, 0)) == PLUS
5475	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5476	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5477	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5478	{
5479	  rtx temp = expand_simple_binop (GET_MODE (value), code,
5480					  XEXP (XEXP (value, 0), 0), op2,
5481					  subtarget, 0, OPTAB_LIB_WIDEN);
5482	  return expand_simple_binop (GET_MODE (value), code, temp,
5483				      force_operand (XEXP (XEXP (value,
5484								 0), 1), 0),
5485				      target, 0, OPTAB_LIB_WIDEN);
5486	}
5487
5488      op1 = force_operand (XEXP (value, 0), subtarget);
5489      op2 = force_operand (op2, NULL_RTX);
5490      switch (code)
5491	{
5492	case MULT:
5493	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
5494	case DIV:
5495	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
5496	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
5497					target, 1, OPTAB_LIB_WIDEN);
5498	  else
5499	    return expand_divmod (0,
5500				  FLOAT_MODE_P (GET_MODE (value))
5501				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
5502				  GET_MODE (value), op1, op2, target, 0);
5503	  break;
5504	case MOD:
5505	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5506				target, 0);
5507	  break;
5508	case UDIV:
5509	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5510				target, 1);
5511	  break;
5512	case UMOD:
5513	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5514				target, 1);
5515	  break;
5516	case ASHIFTRT:
5517	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5518				      target, 0, OPTAB_LIB_WIDEN);
5519	  break;
5520	default:
5521	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
5522				      target, 1, OPTAB_LIB_WIDEN);
5523	}
5524    }
5525  if (GET_RTX_CLASS (code) == '1')
5526    {
5527      op1 = force_operand (XEXP (value, 0), NULL_RTX);
5528      return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5529    }
5530
5531#ifdef INSN_SCHEDULING
5532  /* On machines that have insn scheduling, we want all memory reference to be
5533     explicit, so we need to deal with such paradoxical SUBREGs.  */
5534  if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5535      && (GET_MODE_SIZE (GET_MODE (value))
5536	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5537    value
5538      = simplify_gen_subreg (GET_MODE (value),
5539			     force_reg (GET_MODE (SUBREG_REG (value)),
5540					force_operand (SUBREG_REG (value),
5541						       NULL_RTX)),
5542			     GET_MODE (SUBREG_REG (value)),
5543			     SUBREG_BYTE (value));
5544#endif
5545
5546  return value;
5547}
5548
5549/* Subroutine of expand_expr: return nonzero iff there is no way that
5550   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5551   call is going to be used to determine whether we need a temporary
5552   for EXP, as opposed to a recursive call to this function.
5553
5554   It is always safe for this routine to return zero since it merely
5555   searches for optimization opportunities.  */
5556
5557int
5558safe_from_p (x, exp, top_p)
5559     rtx x;
5560     tree exp;
5561     int top_p;
5562{
5563  rtx exp_rtl = 0;
5564  int i, nops;
5565  static tree save_expr_list;
5566
5567  if (x == 0
5568      /* If EXP has varying size, we MUST use a target since we currently
5569	 have no way of allocating temporaries of variable size
5570	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5571	 So we assume here that something at a higher level has prevented a
5572	 clash.  This is somewhat bogus, but the best we can do.  Only
5573	 do this when X is BLKmode and when we are at the top level.  */
5574      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5575	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5576	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5577	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5578	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5579	      != INTEGER_CST)
5580	  && GET_MODE (x) == BLKmode)
5581      /* If X is in the outgoing argument area, it is always safe.  */
5582      || (GET_CODE (x) == MEM
5583	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
5584	      || (GET_CODE (XEXP (x, 0)) == PLUS
5585		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5586    return 1;
5587
5588  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5589     find the underlying pseudo.  */
5590  if (GET_CODE (x) == SUBREG)
5591    {
5592      x = SUBREG_REG (x);
5593      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5594	return 0;
5595    }
5596
5597  /* A SAVE_EXPR might appear many times in the expression passed to the
5598     top-level safe_from_p call, and if it has a complex subexpression,
5599     examining it multiple times could result in a combinatorial explosion.
5600     E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5601     with optimization took about 28 minutes to compile -- even though it was
5602     only a few lines long.  So we mark each SAVE_EXPR we see with TREE_PRIVATE
5603     and turn that off when we are done.  We keep a list of the SAVE_EXPRs
5604     we have processed.  Note that the only test of top_p was above.  */
5605
5606  if (top_p)
5607    {
5608      int rtn;
5609      tree t;
5610
5611      save_expr_list = 0;
5612
5613      rtn = safe_from_p (x, exp, 0);
5614
5615      for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5616	TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5617
5618      return rtn;
5619    }
5620
5621  /* Now look at our tree code and possibly recurse.  */
5622  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5623    {
5624    case 'd':
5625      exp_rtl = DECL_RTL_IF_SET (exp);
5626      break;
5627
5628    case 'c':
5629      return 1;
5630
5631    case 'x':
5632      if (TREE_CODE (exp) == TREE_LIST)
5633	return ((TREE_VALUE (exp) == 0
5634		 || safe_from_p (x, TREE_VALUE (exp), 0))
5635		&& (TREE_CHAIN (exp) == 0
5636		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5637      else if (TREE_CODE (exp) == ERROR_MARK)
5638	return 1;	/* An already-visited SAVE_EXPR? */
5639      else
5640	return 0;
5641
5642    case '1':
5643      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5644
5645    case '2':
5646    case '<':
5647      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5648	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5649
5650    case 'e':
5651    case 'r':
5652      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5653	 the expression.  If it is set, we conflict iff we are that rtx or
5654	 both are in memory.  Otherwise, we check all operands of the
5655	 expression recursively.  */
5656
5657      switch (TREE_CODE (exp))
5658	{
5659	case ADDR_EXPR:
5660	  /* If the operand is static or we are static, we can't conflict.
5661	     Likewise if we don't conflict with the operand at all.  */
5662	  if (staticp (TREE_OPERAND (exp, 0))
5663	      || TREE_STATIC (exp)
5664	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5665	    return 1;
5666
5667	  /* Otherwise, the only way this can conflict is if we are taking
5668	     the address of a DECL a that address if part of X, which is
5669	     very rare.  */
5670	  exp = TREE_OPERAND (exp, 0);
5671	  if (DECL_P (exp))
5672	    {
5673	      if (!DECL_RTL_SET_P (exp)
5674		  || GET_CODE (DECL_RTL (exp)) != MEM)
5675		return 0;
5676	      else
5677		exp_rtl = XEXP (DECL_RTL (exp), 0);
5678	    }
5679	  break;
5680
5681	case INDIRECT_REF:
5682	  if (GET_CODE (x) == MEM
5683	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5684					get_alias_set (exp)))
5685	    return 0;
5686	  break;
5687
5688	case CALL_EXPR:
5689	  /* Assume that the call will clobber all hard registers and
5690	     all of memory.  */
5691	  if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5692	      || GET_CODE (x) == MEM)
5693	    return 0;
5694	  break;
5695
5696	case RTL_EXPR:
5697	  /* If a sequence exists, we would have to scan every instruction
5698	     in the sequence to see if it was safe.  This is probably not
5699	     worthwhile.  */
5700	  if (RTL_EXPR_SEQUENCE (exp))
5701	    return 0;
5702
5703	  exp_rtl = RTL_EXPR_RTL (exp);
5704	  break;
5705
5706	case WITH_CLEANUP_EXPR:
5707	  exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5708	  break;
5709
5710	case CLEANUP_POINT_EXPR:
5711	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5712
5713	case SAVE_EXPR:
5714	  exp_rtl = SAVE_EXPR_RTL (exp);
5715	  if (exp_rtl)
5716	    break;
5717
5718	  /* If we've already scanned this, don't do it again.  Otherwise,
5719	     show we've scanned it and record for clearing the flag if we're
5720	     going on.  */
5721	  if (TREE_PRIVATE (exp))
5722	    return 1;
5723
5724	  TREE_PRIVATE (exp) = 1;
5725	  if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5726	    {
5727	      TREE_PRIVATE (exp) = 0;
5728	      return 0;
5729	    }
5730
5731	  save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5732	  return 1;
5733
5734	case BIND_EXPR:
5735	  /* The only operand we look at is operand 1.  The rest aren't
5736	     part of the expression.  */
5737	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5738
5739	case METHOD_CALL_EXPR:
5740	  /* This takes an rtx argument, but shouldn't appear here.  */
5741	  abort ();
5742
5743	default:
5744	  break;
5745	}
5746
5747      /* If we have an rtx, we do not need to scan our operands.  */
5748      if (exp_rtl)
5749	break;
5750
5751      nops = first_rtl_op (TREE_CODE (exp));
5752      for (i = 0; i < nops; i++)
5753	if (TREE_OPERAND (exp, i) != 0
5754	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5755	  return 0;
5756
5757      /* If this is a language-specific tree code, it may require
5758	 special handling.  */
5759      if ((unsigned int) TREE_CODE (exp)
5760	  >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5761	  && !(*lang_hooks.safe_from_p) (x, exp))
5762	return 0;
5763    }
5764
5765  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5766     with it.  */
5767  if (exp_rtl)
5768    {
5769      if (GET_CODE (exp_rtl) == SUBREG)
5770	{
5771	  exp_rtl = SUBREG_REG (exp_rtl);
5772	  if (GET_CODE (exp_rtl) == REG
5773	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5774	    return 0;
5775	}
5776
5777      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5778	 are memory and they conflict.  */
5779      return ! (rtx_equal_p (x, exp_rtl)
5780		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5781		    && true_dependence (exp_rtl, VOIDmode, x,
5782					rtx_addr_varies_p)));
5783    }
5784
5785  /* If we reach here, it is safe.  */
5786  return 1;
5787}
5788
5789/* Subroutine of expand_expr: return rtx if EXP is a
5790   variable or parameter; else return 0.  */
5791
5792static rtx
5793var_rtx (exp)
5794     tree exp;
5795{
5796  STRIP_NOPS (exp);
5797  switch (TREE_CODE (exp))
5798    {
5799    case PARM_DECL:
5800    case VAR_DECL:
5801      return DECL_RTL (exp);
5802    default:
5803      return 0;
5804    }
5805}
5806
5807#ifdef MAX_INTEGER_COMPUTATION_MODE
5808
5809void
5810check_max_integer_computation_mode (exp)
5811     tree exp;
5812{
5813  enum tree_code code;
5814  enum machine_mode mode;
5815
5816  /* Strip any NOPs that don't change the mode.  */
5817  STRIP_NOPS (exp);
5818  code = TREE_CODE (exp);
5819
5820  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
5821  if (code == NOP_EXPR
5822      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5823    return;
5824
5825  /* First check the type of the overall operation.   We need only look at
5826     unary, binary and relational operations.  */
5827  if (TREE_CODE_CLASS (code) == '1'
5828      || TREE_CODE_CLASS (code) == '2'
5829      || TREE_CODE_CLASS (code) == '<')
5830    {
5831      mode = TYPE_MODE (TREE_TYPE (exp));
5832      if (GET_MODE_CLASS (mode) == MODE_INT
5833	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5834	internal_error ("unsupported wide integer operation");
5835    }
5836
5837  /* Check operand of a unary op.  */
5838  if (TREE_CODE_CLASS (code) == '1')
5839    {
5840      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5841      if (GET_MODE_CLASS (mode) == MODE_INT
5842	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5843	internal_error ("unsupported wide integer operation");
5844    }
5845
5846  /* Check operands of a binary/comparison op.  */
5847  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5848    {
5849      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5850      if (GET_MODE_CLASS (mode) == MODE_INT
5851	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5852	internal_error ("unsupported wide integer operation");
5853
5854      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5855      if (GET_MODE_CLASS (mode) == MODE_INT
5856	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5857	internal_error ("unsupported wide integer operation");
5858    }
5859}
5860#endif
5861
5862/* Return the highest power of two that EXP is known to be a multiple of.
5863   This is used in updating alignment of MEMs in array references.  */
5864
5865static HOST_WIDE_INT
5866highest_pow2_factor (exp)
5867     tree exp;
5868{
5869  HOST_WIDE_INT c0, c1;
5870
5871  switch (TREE_CODE (exp))
5872    {
5873    case INTEGER_CST:
5874      /* We can find the lowest bit that's a one.  If the low
5875	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5876	 We need to handle this case since we can find it in a COND_EXPR,
5877	 a MIN_EXPR, or a MAX_EXPR.  If the constant overlows, we have an
5878	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5879	 later ICE.  */
5880      if (TREE_CONSTANT_OVERFLOW (exp))
5881	return BIGGEST_ALIGNMENT;
5882      else
5883	{
5884	  /* Note: tree_low_cst is intentionally not used here,
5885	     we don't care about the upper bits.  */
5886	  c0 = TREE_INT_CST_LOW (exp);
5887	  c0 &= -c0;
5888	  return c0 ? c0 : BIGGEST_ALIGNMENT;
5889	}
5890      break;
5891
5892    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
5893      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5894      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5895      return MIN (c0, c1);
5896
5897    case MULT_EXPR:
5898      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5899      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5900      return c0 * c1;
5901
5902    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
5903    case CEIL_DIV_EXPR:
5904      if (integer_pow2p (TREE_OPERAND (exp, 1))
5905	  && host_integerp (TREE_OPERAND (exp, 1), 1))
5906	{
5907	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5908	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5909	  return MAX (1, c0 / c1);
5910	}
5911      break;
5912
5913    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
5914    case SAVE_EXPR: case WITH_RECORD_EXPR:
5915      return highest_pow2_factor (TREE_OPERAND (exp, 0));
5916
5917    case COMPOUND_EXPR:
5918      return highest_pow2_factor (TREE_OPERAND (exp, 1));
5919
5920    case COND_EXPR:
5921      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5922      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5923      return MIN (c0, c1);
5924
5925    default:
5926      break;
5927    }
5928
5929  return 1;
5930}
5931
5932/* Similar, except that it is known that the expression must be a multiple
5933   of the alignment of TYPE.  */
5934
5935static HOST_WIDE_INT
5936highest_pow2_factor_for_type (type, exp)
5937     tree type;
5938     tree exp;
5939{
5940  HOST_WIDE_INT type_align, factor;
5941
5942  factor = highest_pow2_factor (exp);
5943  type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
5944  return MAX (factor, type_align);
5945}
5946
5947/* Return an object on the placeholder list that matches EXP, a
5948   PLACEHOLDER_EXPR.  An object "matches" if it is of the type of the
5949   PLACEHOLDER_EXPR or a pointer type to it.  For further information, see
5950   tree.def.  If no such object is found, return 0.  If PLIST is nonzero, it
5951   is a location which initially points to a starting location in the
5952   placeholder list (zero means start of the list) and where a pointer into
5953   the placeholder list at which the object is found is placed.  */
5954
5955tree
5956find_placeholder (exp, plist)
5957     tree exp;
5958     tree *plist;
5959{
5960  tree type = TREE_TYPE (exp);
5961  tree placeholder_expr;
5962
5963  for (placeholder_expr
5964       = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5965       placeholder_expr != 0;
5966       placeholder_expr = TREE_CHAIN (placeholder_expr))
5967    {
5968      tree need_type = TYPE_MAIN_VARIANT (type);
5969      tree elt;
5970
5971      /* Find the outermost reference that is of the type we want.  If none,
5972	 see if any object has a type that is a pointer to the type we
5973	 want.  */
5974      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5975	   elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5976		   || TREE_CODE (elt) == COND_EXPR)
5977		  ? TREE_OPERAND (elt, 1)
5978		  : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5979		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5980		     || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5981		     || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5982		  ? TREE_OPERAND (elt, 0) : 0))
5983	if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5984	  {
5985	    if (plist)
5986	      *plist = placeholder_expr;
5987	    return elt;
5988	  }
5989
5990      for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5991	   elt
5992	   = ((TREE_CODE (elt) == COMPOUND_EXPR
5993	       || TREE_CODE (elt) == COND_EXPR)
5994	      ? TREE_OPERAND (elt, 1)
5995	      : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5996		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5997		 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5998		 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5999	      ? TREE_OPERAND (elt, 0) : 0))
6000	if (POINTER_TYPE_P (TREE_TYPE (elt))
6001	    && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6002		== need_type))
6003	  {
6004	    if (plist)
6005	      *plist = placeholder_expr;
6006	    return build1 (INDIRECT_REF, need_type, elt);
6007	  }
6008    }
6009
6010  return 0;
6011}
6012
6013/* expand_expr: generate code for computing expression EXP.
6014   An rtx for the computed value is returned.  The value is never null.
6015   In the case of a void EXP, const0_rtx is returned.
6016
6017   The value may be stored in TARGET if TARGET is nonzero.
6018   TARGET is just a suggestion; callers must assume that
6019   the rtx returned may not be the same as TARGET.
6020
6021   If TARGET is CONST0_RTX, it means that the value will be ignored.
6022
6023   If TMODE is not VOIDmode, it suggests generating the
6024   result in mode TMODE.  But this is done only when convenient.
6025   Otherwise, TMODE is ignored and the value generated in its natural mode.
6026   TMODE is just a suggestion; callers must assume that
6027   the rtx returned may not have mode TMODE.
6028
6029   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6030   probably will not be used.
6031
6032   If MODIFIER is EXPAND_SUM then when EXP is an addition
6033   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6034   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6035   products as above, or REG or MEM, or constant.
6036   Ordinarily in such cases we would output mul or add instructions
6037   and then return a pseudo reg containing the sum.
6038
6039   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6040   it also marks a label as absolutely required (it can't be dead).
6041   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6042   This is used for outputting expressions used in initializers.
6043
6044   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6045   with a constant address even if that address is not normally legitimate.
6046   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
6047
6048rtx
6049expand_expr (exp, target, tmode, modifier)
6050     tree exp;
6051     rtx target;
6052     enum machine_mode tmode;
6053     enum expand_modifier modifier;
6054{
6055  rtx op0, op1, temp;
6056  tree type = TREE_TYPE (exp);
6057  int unsignedp = TREE_UNSIGNED (type);
6058  enum machine_mode mode;
6059  enum tree_code code = TREE_CODE (exp);
6060  optab this_optab;
6061  rtx subtarget, original_target;
6062  int ignore;
6063  tree context;
6064
6065  /* Handle ERROR_MARK before anybody tries to access its type.  */
6066  if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6067    {
6068      op0 = CONST0_RTX (tmode);
6069      if (op0 != 0)
6070	return op0;
6071      return const0_rtx;
6072    }
6073
6074  mode = TYPE_MODE (type);
6075  /* Use subtarget as the target for operand 0 of a binary operation.  */
6076  subtarget = get_subtarget (target);
6077  original_target = target;
6078  ignore = (target == const0_rtx
6079	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6080		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6081		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6082		&& TREE_CODE (type) == VOID_TYPE));
6083
6084  /* If we are going to ignore this result, we need only do something
6085     if there is a side-effect somewhere in the expression.  If there
6086     is, short-circuit the most common cases here.  Note that we must
6087     not call expand_expr with anything but const0_rtx in case this
6088     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6089
6090  if (ignore)
6091    {
6092      if (! TREE_SIDE_EFFECTS (exp))
6093	return const0_rtx;
6094
6095      /* Ensure we reference a volatile object even if value is ignored, but
6096	 don't do this if all we are doing is taking its address.  */
6097      if (TREE_THIS_VOLATILE (exp)
6098	  && TREE_CODE (exp) != FUNCTION_DECL
6099	  && mode != VOIDmode && mode != BLKmode
6100	  && modifier != EXPAND_CONST_ADDRESS)
6101	{
6102	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6103	  if (GET_CODE (temp) == MEM)
6104	    temp = copy_to_reg (temp);
6105	  return const0_rtx;
6106	}
6107
6108      if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6109	  || code == INDIRECT_REF || code == BUFFER_REF)
6110	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6111			    modifier);
6112
6113      else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6114	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6115	{
6116	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6117	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6118	  return const0_rtx;
6119	}
6120      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6121	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6122	/* If the second operand has no side effects, just evaluate
6123	   the first.  */
6124	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6125			    modifier);
6126      else if (code == BIT_FIELD_REF)
6127	{
6128	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6129	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6130	  expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6131	  return const0_rtx;
6132	}
6133
6134      target = 0;
6135    }
6136
6137#ifdef MAX_INTEGER_COMPUTATION_MODE
6138  /* Only check stuff here if the mode we want is different from the mode
6139     of the expression; if it's the same, check_max_integer_computiation_mode
6140     will handle it.  Do we really need to check this stuff at all?  */
6141
6142  if (target
6143      && GET_MODE (target) != mode
6144      && TREE_CODE (exp) != INTEGER_CST
6145      && TREE_CODE (exp) != PARM_DECL
6146      && TREE_CODE (exp) != ARRAY_REF
6147      && TREE_CODE (exp) != ARRAY_RANGE_REF
6148      && TREE_CODE (exp) != COMPONENT_REF
6149      && TREE_CODE (exp) != BIT_FIELD_REF
6150      && TREE_CODE (exp) != INDIRECT_REF
6151      && TREE_CODE (exp) != CALL_EXPR
6152      && TREE_CODE (exp) != VAR_DECL
6153      && TREE_CODE (exp) != RTL_EXPR)
6154    {
6155      enum machine_mode mode = GET_MODE (target);
6156
6157      if (GET_MODE_CLASS (mode) == MODE_INT
6158	  && mode > MAX_INTEGER_COMPUTATION_MODE)
6159	internal_error ("unsupported wide integer operation");
6160    }
6161
6162  if (tmode != mode
6163      && TREE_CODE (exp) != INTEGER_CST
6164      && TREE_CODE (exp) != PARM_DECL
6165      && TREE_CODE (exp) != ARRAY_REF
6166      && TREE_CODE (exp) != ARRAY_RANGE_REF
6167      && TREE_CODE (exp) != COMPONENT_REF
6168      && TREE_CODE (exp) != BIT_FIELD_REF
6169      && TREE_CODE (exp) != INDIRECT_REF
6170      && TREE_CODE (exp) != VAR_DECL
6171      && TREE_CODE (exp) != CALL_EXPR
6172      && TREE_CODE (exp) != RTL_EXPR
6173      && GET_MODE_CLASS (tmode) == MODE_INT
6174      && tmode > MAX_INTEGER_COMPUTATION_MODE)
6175    internal_error ("unsupported wide integer operation");
6176
6177  check_max_integer_computation_mode (exp);
6178#endif
6179
6180  /* If will do cse, generate all results into pseudo registers
6181     since 1) that allows cse to find more things
6182     and 2) otherwise cse could produce an insn the machine
6183     cannot support.  And exception is a CONSTRUCTOR into a multi-word
6184     MEM: that's much more likely to be most efficient into the MEM.  */
6185
6186  if (! cse_not_expected && mode != BLKmode && target
6187      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6188      && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6189    target = subtarget;
6190
6191  switch (code)
6192    {
6193    case LABEL_DECL:
6194      {
6195	tree function = decl_function_context (exp);
6196	/* Handle using a label in a containing function.  */
6197	if (function != current_function_decl
6198	    && function != inline_function_decl && function != 0)
6199	  {
6200	    struct function *p = find_function_data (function);
6201	    p->expr->x_forced_labels
6202	      = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6203				   p->expr->x_forced_labels);
6204	  }
6205	else
6206	  {
6207	    if (modifier == EXPAND_INITIALIZER)
6208	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6209						 label_rtx (exp),
6210						 forced_labels);
6211	  }
6212
6213	temp = gen_rtx_MEM (FUNCTION_MODE,
6214			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6215	if (function != current_function_decl
6216	    && function != inline_function_decl && function != 0)
6217	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6218	return temp;
6219      }
6220
6221    case PARM_DECL:
6222      if (! DECL_RTL_SET_P (exp))
6223	{
6224	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6225	  return CONST0_RTX (mode);
6226	}
6227
6228      /* ... fall through ...  */
6229
6230    case VAR_DECL:
6231      /* If a static var's type was incomplete when the decl was written,
6232	 but the type is complete now, lay out the decl now.  */
6233      if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6234	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6235	{
6236	  rtx value = DECL_RTL_IF_SET (exp);
6237
6238	  layout_decl (exp, 0);
6239
6240	  /* If the RTL was already set, update its mode and memory
6241	     attributes.  */
6242	  if (value != 0)
6243	    {
6244	      PUT_MODE (value, DECL_MODE (exp));
6245	      SET_DECL_RTL (exp, 0);
6246	      set_mem_attributes (value, exp, 1);
6247	      SET_DECL_RTL (exp, value);
6248	    }
6249	}
6250
6251      /* ... fall through ...  */
6252
6253    case FUNCTION_DECL:
6254    case RESULT_DECL:
6255      if (DECL_RTL (exp) == 0)
6256	abort ();
6257
6258      /* Ensure variable marked as used even if it doesn't go through
6259	 a parser.  If it hasn't be used yet, write out an external
6260	 definition.  */
6261      if (! TREE_USED (exp))
6262	{
6263	  assemble_external (exp);
6264	  TREE_USED (exp) = 1;
6265	}
6266
6267      /* Show we haven't gotten RTL for this yet.  */
6268      temp = 0;
6269
6270      /* Handle variables inherited from containing functions.  */
6271      context = decl_function_context (exp);
6272
6273      /* We treat inline_function_decl as an alias for the current function
6274	 because that is the inline function whose vars, types, etc.
6275	 are being merged into the current function.
6276	 See expand_inline_function.  */
6277
6278      if (context != 0 && context != current_function_decl
6279	  && context != inline_function_decl
6280	  /* If var is static, we don't need a static chain to access it.  */
6281	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
6282		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6283	{
6284	  rtx addr;
6285
6286	  /* Mark as non-local and addressable.  */
6287	  DECL_NONLOCAL (exp) = 1;
6288	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
6289	    abort ();
6290	  mark_addressable (exp);
6291	  if (GET_CODE (DECL_RTL (exp)) != MEM)
6292	    abort ();
6293	  addr = XEXP (DECL_RTL (exp), 0);
6294	  if (GET_CODE (addr) == MEM)
6295	    addr
6296	      = replace_equiv_address (addr,
6297				       fix_lexical_addr (XEXP (addr, 0), exp));
6298	  else
6299	    addr = fix_lexical_addr (addr, exp);
6300
6301	  temp = replace_equiv_address (DECL_RTL (exp), addr);
6302	}
6303
6304      /* This is the case of an array whose size is to be determined
6305	 from its initializer, while the initializer is still being parsed.
6306	 See expand_decl.  */
6307
6308      else if (GET_CODE (DECL_RTL (exp)) == MEM
6309	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6310	temp = validize_mem (DECL_RTL (exp));
6311
6312      /* If DECL_RTL is memory, we are in the normal case and either
6313	 the address is not valid or it is not a register and -fforce-addr
6314	 is specified, get the address into a register.  */
6315
6316      else if (GET_CODE (DECL_RTL (exp)) == MEM
6317	       && modifier != EXPAND_CONST_ADDRESS
6318	       && modifier != EXPAND_SUM
6319	       && modifier != EXPAND_INITIALIZER
6320	       && (! memory_address_p (DECL_MODE (exp),
6321				       XEXP (DECL_RTL (exp), 0))
6322		   || (flag_force_addr
6323		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6324	temp = replace_equiv_address (DECL_RTL (exp),
6325				      copy_rtx (XEXP (DECL_RTL (exp), 0)));
6326
6327      /* If we got something, return it.  But first, set the alignment
6328	 if the address is a register.  */
6329      if (temp != 0)
6330	{
6331	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6332	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6333
6334	  return temp;
6335	}
6336
6337      /* If the mode of DECL_RTL does not match that of the decl, it
6338	 must be a promoted value.  We return a SUBREG of the wanted mode,
6339	 but mark it so that we know that it was already extended.  */
6340
6341      if (GET_CODE (DECL_RTL (exp)) == REG
6342	  && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6343	{
6344	  /* Get the signedness used for this variable.  Ensure we get the
6345	     same mode we got when the variable was declared.  */
6346	  if (GET_MODE (DECL_RTL (exp))
6347	      != promote_mode (type, DECL_MODE (exp), &unsignedp,
6348			       (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6349	    abort ();
6350
6351	  temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6352	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6353	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6354	  return temp;
6355	}
6356
6357      return DECL_RTL (exp);
6358
6359    case INTEGER_CST:
6360      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6361				 TREE_INT_CST_HIGH (exp), mode);
6362
6363      /* ??? If overflow is set, fold will have done an incomplete job,
6364	 which can result in (plus xx (const_int 0)), which can get
6365	 simplified by validate_replace_rtx during virtual register
6366	 instantiation, which can result in unrecognizable insns.
6367	 Avoid this by forcing all overflows into registers.  */
6368      if (TREE_CONSTANT_OVERFLOW (exp)
6369	  && modifier != EXPAND_INITIALIZER)
6370	temp = force_reg (mode, temp);
6371
6372      return temp;
6373
6374    case CONST_DECL:
6375      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6376
6377    case REAL_CST:
6378      /* If optimized, generate immediate CONST_DOUBLE
6379	 which will be turned into memory by reload if necessary.
6380
6381	 We used to force a register so that loop.c could see it.  But
6382	 this does not allow gen_* patterns to perform optimizations with
6383	 the constants.  It also produces two insns in cases like "x = 1.0;".
6384	 On most machines, floating-point constants are not permitted in
6385	 many insns, so we'd end up copying it to a register in any case.
6386
6387	 Now, we do the copying in expand_binop, if appropriate.  */
6388      return immed_real_const (exp);
6389
6390    case COMPLEX_CST:
6391    case STRING_CST:
6392      if (! TREE_CST_RTL (exp))
6393	output_constant_def (exp, 1);
6394
6395      /* TREE_CST_RTL probably contains a constant address.
6396	 On RISC machines where a constant address isn't valid,
6397	 make some insns to get that address into a register.  */
6398      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6399	  && modifier != EXPAND_CONST_ADDRESS
6400	  && modifier != EXPAND_INITIALIZER
6401	  && modifier != EXPAND_SUM
6402	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6403	      || (flag_force_addr
6404		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6405	return replace_equiv_address (TREE_CST_RTL (exp),
6406				      copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6407      return TREE_CST_RTL (exp);
6408
6409    case EXPR_WITH_FILE_LOCATION:
6410      {
6411	rtx to_return;
6412	const char *saved_input_filename = input_filename;
6413	int saved_lineno = lineno;
6414	input_filename = EXPR_WFL_FILENAME (exp);
6415	lineno = EXPR_WFL_LINENO (exp);
6416	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6417	  emit_line_note (input_filename, lineno);
6418	/* Possibly avoid switching back and forth here.  */
6419	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6420	input_filename = saved_input_filename;
6421	lineno = saved_lineno;
6422	return to_return;
6423      }
6424
6425    case SAVE_EXPR:
6426      context = decl_function_context (exp);
6427
6428      /* If this SAVE_EXPR was at global context, assume we are an
6429	 initialization function and move it into our context.  */
6430      if (context == 0)
6431	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6432
6433      /* We treat inline_function_decl as an alias for the current function
6434	 because that is the inline function whose vars, types, etc.
6435	 are being merged into the current function.
6436	 See expand_inline_function.  */
6437      if (context == current_function_decl || context == inline_function_decl)
6438	context = 0;
6439
6440      /* If this is non-local, handle it.  */
6441      if (context)
6442	{
6443	  /* The following call just exists to abort if the context is
6444	     not of a containing function.  */
6445	  find_function_data (context);
6446
6447	  temp = SAVE_EXPR_RTL (exp);
6448	  if (temp && GET_CODE (temp) == REG)
6449	    {
6450	      put_var_into_stack (exp);
6451	      temp = SAVE_EXPR_RTL (exp);
6452	    }
6453	  if (temp == 0 || GET_CODE (temp) != MEM)
6454	    abort ();
6455	  return
6456	    replace_equiv_address (temp,
6457				   fix_lexical_addr (XEXP (temp, 0), exp));
6458	}
6459      if (SAVE_EXPR_RTL (exp) == 0)
6460	{
6461	  if (mode == VOIDmode)
6462	    temp = const0_rtx;
6463	  else
6464	    temp = assign_temp (build_qualified_type (type,
6465						      (TYPE_QUALS (type)
6466						       | TYPE_QUAL_CONST)),
6467				3, 0, 0);
6468
6469	  SAVE_EXPR_RTL (exp) = temp;
6470	  if (!optimize && GET_CODE (temp) == REG)
6471	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6472						save_expr_regs);
6473
6474	  /* If the mode of TEMP does not match that of the expression, it
6475	     must be a promoted value.  We pass store_expr a SUBREG of the
6476	     wanted mode but mark it so that we know that it was already
6477	     extended.  Note that `unsignedp' was modified above in
6478	     this case.  */
6479
6480	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6481	    {
6482	      temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6483	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6484	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6485	    }
6486
6487	  if (temp == const0_rtx)
6488	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6489	  else
6490	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
6491
6492	  TREE_USED (exp) = 1;
6493	}
6494
6495      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6496	 must be a promoted value.  We return a SUBREG of the wanted mode,
6497	 but mark it so that we know that it was already extended.  */
6498
6499      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6500	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6501	{
6502	  /* Compute the signedness and make the proper SUBREG.  */
6503	  promote_mode (type, mode, &unsignedp, 0);
6504	  temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6505	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6506	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6507	  return temp;
6508	}
6509
6510      return SAVE_EXPR_RTL (exp);
6511
6512    case UNSAVE_EXPR:
6513      {
6514	rtx temp;
6515	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6516	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6517	return temp;
6518      }
6519
6520    case PLACEHOLDER_EXPR:
6521      {
6522	tree old_list = placeholder_list;
6523	tree placeholder_expr = 0;
6524
6525	exp = find_placeholder (exp, &placeholder_expr);
6526	if (exp == 0)
6527	  abort ();
6528
6529	placeholder_list = TREE_CHAIN (placeholder_expr);
6530	temp = expand_expr (exp, original_target, tmode, modifier);
6531	placeholder_list = old_list;
6532	return temp;
6533      }
6534
6535      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
6536      abort ();
6537
6538    case WITH_RECORD_EXPR:
6539      /* Put the object on the placeholder list, expand our first operand,
6540	 and pop the list.  */
6541      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6542				    placeholder_list);
6543      target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6544			    modifier);
6545      placeholder_list = TREE_CHAIN (placeholder_list);
6546      return target;
6547
6548    case GOTO_EXPR:
6549      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6550	expand_goto (TREE_OPERAND (exp, 0));
6551      else
6552	expand_computed_goto (TREE_OPERAND (exp, 0));
6553      return const0_rtx;
6554
6555    case EXIT_EXPR:
6556      expand_exit_loop_if_false (NULL,
6557				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6558      return const0_rtx;
6559
6560    case LABELED_BLOCK_EXPR:
6561      if (LABELED_BLOCK_BODY (exp))
6562	expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6563      /* Should perhaps use expand_label, but this is simpler and safer.  */
6564      do_pending_stack_adjust ();
6565      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6566      return const0_rtx;
6567
6568    case EXIT_BLOCK_EXPR:
6569      if (EXIT_BLOCK_RETURN (exp))
6570	sorry ("returned value in block_exit_expr");
6571      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6572      return const0_rtx;
6573
6574    case LOOP_EXPR:
6575      push_temp_slots ();
6576      expand_start_loop (1);
6577      expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6578      expand_end_loop ();
6579      pop_temp_slots ();
6580
6581      return const0_rtx;
6582
6583    case BIND_EXPR:
6584      {
6585	tree vars = TREE_OPERAND (exp, 0);
6586	int vars_need_expansion = 0;
6587
6588	/* Need to open a binding contour here because
6589	   if there are any cleanups they must be contained here.  */
6590	expand_start_bindings (2);
6591
6592	/* Mark the corresponding BLOCK for output in its proper place.  */
6593	if (TREE_OPERAND (exp, 2) != 0
6594	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6595	  insert_block (TREE_OPERAND (exp, 2));
6596
6597	/* If VARS have not yet been expanded, expand them now.  */
6598	while (vars)
6599	  {
6600	    if (!DECL_RTL_SET_P (vars))
6601	      {
6602		vars_need_expansion = 1;
6603		expand_decl (vars);
6604	      }
6605	    expand_decl_init (vars);
6606	    vars = TREE_CHAIN (vars);
6607	  }
6608
6609	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6610
6611	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6612
6613	return temp;
6614      }
6615
6616    case RTL_EXPR:
6617      if (RTL_EXPR_SEQUENCE (exp))
6618	{
6619	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6620	    abort ();
6621	  emit_insns (RTL_EXPR_SEQUENCE (exp));
6622	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6623	}
6624      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6625      free_temps_for_rtl_expr (exp);
6626      return RTL_EXPR_RTL (exp);
6627
6628    case CONSTRUCTOR:
6629      /* If we don't need the result, just ensure we evaluate any
6630	 subexpressions.  */
6631      if (ignore)
6632	{
6633	  tree elt;
6634
6635	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6636	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6637
6638	  return const0_rtx;
6639	}
6640
6641      /* All elts simple constants => refer to a constant in memory.  But
6642	 if this is a non-BLKmode mode, let it store a field at a time
6643	 since that should make a CONST_INT or CONST_DOUBLE when we
6644	 fold.  Likewise, if we have a target we can use, it is best to
6645	 store directly into the target unless the type is large enough
6646	 that memcpy will be used.  If we are making an initializer and
6647	 all operands are constant, put it in memory as well.  */
6648      else if ((TREE_STATIC (exp)
6649		&& ((mode == BLKmode
6650		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6651		    || TREE_ADDRESSABLE (exp)
6652		    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6653			&& (! MOVE_BY_PIECES_P
6654			    (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6655			     TYPE_ALIGN (type)))
6656			&& ! mostly_zeros_p (exp))))
6657	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6658	{
6659	  rtx constructor = output_constant_def (exp, 1);
6660
6661	  if (modifier != EXPAND_CONST_ADDRESS
6662	      && modifier != EXPAND_INITIALIZER
6663	      && modifier != EXPAND_SUM)
6664	    constructor = validize_mem (constructor);
6665
6666	  return constructor;
6667	}
6668      else
6669	{
6670	  /* Handle calls that pass values in multiple non-contiguous
6671	     locations.  The Irix 6 ABI has examples of this.  */
6672	  if (target == 0 || ! safe_from_p (target, exp, 1)
6673	      || GET_CODE (target) == PARALLEL)
6674	    target
6675	      = assign_temp (build_qualified_type (type,
6676						   (TYPE_QUALS (type)
6677						    | (TREE_READONLY (exp)
6678						       * TYPE_QUAL_CONST))),
6679			     0, TREE_ADDRESSABLE (exp), 1);
6680
6681	  store_constructor (exp, target, 0, int_expr_size (exp));
6682	  return target;
6683	}
6684
6685    case INDIRECT_REF:
6686      {
6687	tree exp1 = TREE_OPERAND (exp, 0);
6688	tree index;
6689	tree string = string_constant (exp1, &index);
6690
6691	/* Try to optimize reads from const strings.  */
6692 	if (string
6693 	    && TREE_CODE (string) == STRING_CST
6694 	    && TREE_CODE (index) == INTEGER_CST
6695	    && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6696 	    && GET_MODE_CLASS (mode) == MODE_INT
6697 	    && GET_MODE_SIZE (mode) == 1
6698	    && modifier != EXPAND_WRITE)
6699 	  return
6700	    GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (string)
6701					 [TREE_INT_CST_LOW (index)], mode));
6702
6703	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6704	op0 = memory_address (mode, op0);
6705	temp = gen_rtx_MEM (mode, op0);
6706	set_mem_attributes (temp, exp, 0);
6707
6708	/* If we are writing to this object and its type is a record with
6709	   readonly fields, we must mark it as readonly so it will
6710	   conflict with readonly references to those fields.  */
6711	if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6712	  RTX_UNCHANGING_P (temp) = 1;
6713
6714	return temp;
6715      }
6716
6717    case ARRAY_REF:
6718      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6719	abort ();
6720
6721      {
6722	tree array = TREE_OPERAND (exp, 0);
6723	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6724	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6725	tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6726	HOST_WIDE_INT i;
6727
6728	/* Optimize the special-case of a zero lower bound.
6729
6730	   We convert the low_bound to sizetype to avoid some problems
6731	   with constant folding.  (E.g. suppose the lower bound is 1,
6732	   and its mode is QI.  Without the conversion,  (ARRAY
6733	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6734	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
6735
6736	if (! integer_zerop (low_bound))
6737	  index = size_diffop (index, convert (sizetype, low_bound));
6738
6739	/* Fold an expression like: "foo"[2].
6740	   This is not done in fold so it won't happen inside &.
6741	   Don't fold if this is for wide characters since it's too
6742	   difficult to do correctly and this is a very rare case.  */
6743
6744	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6745	    && TREE_CODE (array) == STRING_CST
6746	    && TREE_CODE (index) == INTEGER_CST
6747	    && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6748	    && GET_MODE_CLASS (mode) == MODE_INT
6749	    && GET_MODE_SIZE (mode) == 1)
6750	  return
6751	    GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (array)
6752					 [TREE_INT_CST_LOW (index)], mode));
6753
6754	/* If this is a constant index into a constant array,
6755	   just get the value from the array.  Handle both the cases when
6756	   we have an explicit constructor and when our operand is a variable
6757	   that was declared const.  */
6758
6759	if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6760	    && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6761	    && TREE_CODE (index) == INTEGER_CST
6762	    && 0 > compare_tree_int (index,
6763				     list_length (CONSTRUCTOR_ELTS
6764						  (TREE_OPERAND (exp, 0)))))
6765	  {
6766	    tree elem;
6767
6768	    for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6769		 i = TREE_INT_CST_LOW (index);
6770		 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6771	      ;
6772
6773	    if (elem)
6774	      return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6775				  modifier);
6776	  }
6777
6778	else if (optimize >= 1
6779		 && modifier != EXPAND_CONST_ADDRESS
6780		 && modifier != EXPAND_INITIALIZER
6781		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6782		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6783		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6784	  {
6785	    if (TREE_CODE (index) == INTEGER_CST)
6786	      {
6787		tree init = DECL_INITIAL (array);
6788
6789		if (TREE_CODE (init) == CONSTRUCTOR)
6790		  {
6791		    tree elem;
6792
6793		    for (elem = CONSTRUCTOR_ELTS (init);
6794			 (elem
6795			  && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6796			 elem = TREE_CHAIN (elem))
6797		      ;
6798
6799		    if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6800		      return expand_expr (fold (TREE_VALUE (elem)), target,
6801					  tmode, modifier);
6802		  }
6803		else if (TREE_CODE (init) == STRING_CST
6804			 && 0 > compare_tree_int (index,
6805						  TREE_STRING_LENGTH (init)))
6806		  {
6807		    tree type = TREE_TYPE (TREE_TYPE (init));
6808		    enum machine_mode mode = TYPE_MODE (type);
6809
6810		    if (GET_MODE_CLASS (mode) == MODE_INT
6811			&& GET_MODE_SIZE (mode) == 1)
6812		      return GEN_INT (trunc_int_for_mode
6813				      (TREE_STRING_POINTER (init)
6814				       [TREE_INT_CST_LOW (index)], mode));
6815		  }
6816	      }
6817	  }
6818      }
6819      /* Fall through.  */
6820
6821    case COMPONENT_REF:
6822    case BIT_FIELD_REF:
6823    case ARRAY_RANGE_REF:
6824      /* If the operand is a CONSTRUCTOR, we can just extract the
6825	 appropriate field if it is present.  Don't do this if we have
6826	 already written the data since we want to refer to that copy
6827	 and varasm.c assumes that's what we'll do.  */
6828      if (code == COMPONENT_REF
6829	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6830	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6831	{
6832	  tree elt;
6833
6834	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6835	       elt = TREE_CHAIN (elt))
6836	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6837		/* We can normally use the value of the field in the
6838		   CONSTRUCTOR.  However, if this is a bitfield in
6839		   an integral mode that we can fit in a HOST_WIDE_INT,
6840		   we must mask only the number of bits in the bitfield,
6841		   since this is done implicitly by the constructor.  If
6842		   the bitfield does not meet either of those conditions,
6843		   we can't do this optimization.  */
6844		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6845		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6846			 == MODE_INT)
6847			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6848			    <= HOST_BITS_PER_WIDE_INT))))
6849	      {
6850		op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6851		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6852		  {
6853		    HOST_WIDE_INT bitsize
6854		      = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6855		    enum machine_mode imode
6856		      = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6857
6858		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6859		      {
6860			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6861			op0 = expand_and (imode, op0, op1, target);
6862		      }
6863		    else
6864		      {
6865			tree count
6866			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6867					 0);
6868
6869			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6870					    target, 0);
6871			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6872					    target, 0);
6873		      }
6874		  }
6875
6876		return op0;
6877	      }
6878	}
6879
6880      {
6881	enum machine_mode mode1;
6882	HOST_WIDE_INT bitsize, bitpos;
6883	tree offset;
6884	int volatilep = 0;
6885	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6886					&mode1, &unsignedp, &volatilep);
6887	rtx orig_op0;
6888
6889	/* If we got back the original object, something is wrong.  Perhaps
6890	   we are evaluating an expression too early.  In any event, don't
6891	   infinitely recurse.  */
6892	if (tem == exp)
6893	  abort ();
6894
6895	/* If TEM's type is a union of variable size, pass TARGET to the inner
6896	   computation, since it will need a temporary and TARGET is known
6897	   to have to do.  This occurs in unchecked conversion in Ada.  */
6898
6899	orig_op0 = op0
6900	  = expand_expr (tem,
6901			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6902			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6903			      != INTEGER_CST)
6904			  ? target : NULL_RTX),
6905			 VOIDmode,
6906			 (modifier == EXPAND_INITIALIZER
6907			  || modifier == EXPAND_CONST_ADDRESS)
6908			 ? modifier : EXPAND_NORMAL);
6909
6910	/* If this is a constant, put it into a register if it is a
6911	   legitimate constant and OFFSET is 0 and memory if it isn't.  */
6912	if (CONSTANT_P (op0))
6913	  {
6914	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6915	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6916		&& offset == 0)
6917	      op0 = force_reg (mode, op0);
6918	    else
6919	      op0 = validize_mem (force_const_mem (mode, op0));
6920	  }
6921
6922	if (offset != 0)
6923	  {
6924	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6925
6926	    /* If this object is in a register, put it into memory.
6927	       This case can't occur in C, but can in Ada if we have
6928	       unchecked conversion of an expression from a scalar type to
6929	       an array or record type.  */
6930	    if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6931		|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6932	      {
6933		/* If the operand is a SAVE_EXPR, we can deal with this by
6934		   forcing the SAVE_EXPR into memory.  */
6935		if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6936		  {
6937		    put_var_into_stack (TREE_OPERAND (exp, 0));
6938		    op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6939		  }
6940		else
6941		  {
6942		    tree nt
6943		      = build_qualified_type (TREE_TYPE (tem),
6944					      (TYPE_QUALS (TREE_TYPE (tem))
6945					       | TYPE_QUAL_CONST));
6946		    rtx memloc = assign_temp (nt, 1, 1, 1);
6947
6948		    emit_move_insn (memloc, op0);
6949		    op0 = memloc;
6950		  }
6951	      }
6952
6953	    if (GET_CODE (op0) != MEM)
6954	      abort ();
6955
6956#ifdef POINTERS_EXTEND_UNSIGNED
6957	    if (GET_MODE (offset_rtx) != Pmode)
6958	      offset_rtx = convert_memory_address (Pmode, offset_rtx);
6959#else
6960	    if (GET_MODE (offset_rtx) != ptr_mode)
6961	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6962#endif
6963
6964	    /* A constant address in OP0 can have VOIDmode, we must not try
6965	       to call force_reg for that case.  Avoid that case.  */
6966	    if (GET_CODE (op0) == MEM
6967		&& GET_MODE (op0) == BLKmode
6968		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
6969		&& bitsize != 0
6970		&& (bitpos % bitsize) == 0
6971		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6972		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6973	      {
6974		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6975		bitpos = 0;
6976	      }
6977
6978	    op0 = offset_address (op0, offset_rtx,
6979				  highest_pow2_factor (offset));
6980	  }
6981
6982	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6983	   record its alignment as BIGGEST_ALIGNMENT.  */
6984	if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
6985	    && is_aligning_offset (offset, tem))
6986	  set_mem_align (op0, BIGGEST_ALIGNMENT);
6987
6988	/* Don't forget about volatility even if this is a bitfield.  */
6989	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6990	  {
6991	    if (op0 == orig_op0)
6992	      op0 = copy_rtx (op0);
6993
6994	    MEM_VOLATILE_P (op0) = 1;
6995	  }
6996
6997	/* The following code doesn't handle CONCAT.
6998	   Assume only bitpos == 0 can be used for CONCAT, due to
6999	   one element arrays having the same mode as its element.  */
7000	if (GET_CODE (op0) == CONCAT)
7001	  {
7002	    if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7003	      abort ();
7004	    return op0;
7005	  }
7006
7007	/* In cases where an aligned union has an unaligned object
7008	   as a field, we might be extracting a BLKmode value from
7009	   an integer-mode (e.g., SImode) object.  Handle this case
7010	   by doing the extract into an object as wide as the field
7011	   (which we know to be the width of a basic mode), then
7012	   storing into memory, and changing the mode to BLKmode.  */
7013	if (mode1 == VOIDmode
7014	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7015	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
7016		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7017		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7018		&& modifier != EXPAND_CONST_ADDRESS
7019		&& modifier != EXPAND_INITIALIZER)
7020	    /* If the field isn't aligned enough to fetch as a memref,
7021	       fetch it as a bit field.  */
7022	    || (mode1 != BLKmode
7023		&& SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7024		&& ((TYPE_ALIGN (TREE_TYPE (tem))
7025		     < GET_MODE_ALIGNMENT (mode))
7026		    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7027	    /* If the type and the field are a constant size and the
7028	       size of the type isn't the same size as the bitfield,
7029	       we must use bitfield operations.  */
7030	    || (bitsize >= 0
7031		&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7032		    == INTEGER_CST)
7033		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7034					  bitsize)))
7035	  {
7036	    enum machine_mode ext_mode = mode;
7037
7038	    if (ext_mode == BLKmode
7039		&& ! (target != 0 && GET_CODE (op0) == MEM
7040		      && GET_CODE (target) == MEM
7041		      && bitpos % BITS_PER_UNIT == 0))
7042	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7043
7044	    if (ext_mode == BLKmode)
7045	      {
7046		/* In this case, BITPOS must start at a byte boundary and
7047		   TARGET, if specified, must be a MEM.  */
7048		if (GET_CODE (op0) != MEM
7049		    || (target != 0 && GET_CODE (target) != MEM)
7050		    || bitpos % BITS_PER_UNIT != 0)
7051		  abort ();
7052
7053		op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7054		if (target == 0)
7055		  target = assign_temp (type, 0, 1, 1);
7056
7057		emit_block_move (target, op0,
7058				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7059					  / BITS_PER_UNIT));
7060
7061		return target;
7062	      }
7063
7064	    op0 = validize_mem (op0);
7065
7066	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7067	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7068
7069	    op0 = extract_bit_field (op0, bitsize, bitpos,
7070				     unsignedp, target, ext_mode, ext_mode,
7071				     int_size_in_bytes (TREE_TYPE (tem)));
7072
7073	    /* If the result is a record type and BITSIZE is narrower than
7074	       the mode of OP0, an integral mode, and this is a big endian
7075	       machine, we must put the field into the high-order bits.  */
7076	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7077		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7078		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7079	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7080				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7081					    - bitsize),
7082				  op0, 1);
7083
7084	    if (mode == BLKmode)
7085	      {
7086		rtx new = assign_temp (build_qualified_type
7087				       (type_for_mode (ext_mode, 0),
7088					TYPE_QUAL_CONST), 0, 1, 1);
7089
7090		emit_move_insn (new, op0);
7091		op0 = copy_rtx (new);
7092		PUT_MODE (op0, BLKmode);
7093		set_mem_attributes (op0, exp, 1);
7094	      }
7095
7096	    return op0;
7097	  }
7098
7099	/* If the result is BLKmode, use that to access the object
7100	   now as well.  */
7101	if (mode == BLKmode)
7102	  mode1 = BLKmode;
7103
7104	/* Get a reference to just this component.  */
7105	if (modifier == EXPAND_CONST_ADDRESS
7106	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7107	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7108	else
7109	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7110
7111	if (op0 == orig_op0)
7112	  op0 = copy_rtx (op0);
7113
7114	set_mem_attributes (op0, exp, 0);
7115	if (GET_CODE (XEXP (op0, 0)) == REG)
7116	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7117
7118	MEM_VOLATILE_P (op0) |= volatilep;
7119	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7120	    || modifier == EXPAND_CONST_ADDRESS
7121	    || modifier == EXPAND_INITIALIZER)
7122	  return op0;
7123	else if (target == 0)
7124	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7125
7126	convert_move (target, op0, unsignedp);
7127	return target;
7128      }
7129
7130    case VTABLE_REF:
7131      {
7132	rtx insn, before = get_last_insn (), vtbl_ref;
7133
7134	/* Evaluate the interior expression.  */
7135	subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7136				 tmode, modifier);
7137
7138	/* Get or create an instruction off which to hang a note.  */
7139	if (REG_P (subtarget))
7140	  {
7141	    target = subtarget;
7142	    insn = get_last_insn ();
7143	    if (insn == before)
7144	      abort ();
7145	    if (! INSN_P (insn))
7146	      insn = prev_nonnote_insn (insn);
7147	  }
7148	else
7149	  {
7150	    target = gen_reg_rtx (GET_MODE (subtarget));
7151	    insn = emit_move_insn (target, subtarget);
7152	  }
7153
7154	/* Collect the data for the note.  */
7155	vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7156	vtbl_ref = plus_constant (vtbl_ref,
7157				  tree_low_cst (TREE_OPERAND (exp, 2), 0));
7158	/* Discard the initial CONST that was added.  */
7159	vtbl_ref = XEXP (vtbl_ref, 0);
7160
7161	REG_NOTES (insn)
7162	  = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7163
7164	return target;
7165      }
7166
7167      /* Intended for a reference to a buffer of a file-object in Pascal.
7168	 But it's not certain that a special tree code will really be
7169	 necessary for these.  INDIRECT_REF might work for them.  */
7170    case BUFFER_REF:
7171      abort ();
7172
7173    case IN_EXPR:
7174      {
7175	/* Pascal set IN expression.
7176
7177	   Algorithm:
7178	       rlo       = set_low - (set_low%bits_per_word);
7179	       the_word  = set [ (index - rlo)/bits_per_word ];
7180	       bit_index = index % bits_per_word;
7181	       bitmask   = 1 << bit_index;
7182	       return !!(the_word & bitmask);  */
7183
7184	tree set = TREE_OPERAND (exp, 0);
7185	tree index = TREE_OPERAND (exp, 1);
7186	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7187	tree set_type = TREE_TYPE (set);
7188	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7189	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7190	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7191	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7192	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7193	rtx setval = expand_expr (set, 0, VOIDmode, 0);
7194	rtx setaddr = XEXP (setval, 0);
7195	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7196	rtx rlow;
7197	rtx diff, quo, rem, addr, bit, result;
7198
7199	/* If domain is empty, answer is no.  Likewise if index is constant
7200	   and out of bounds.  */
7201	if (((TREE_CODE (set_high_bound) == INTEGER_CST
7202	     && TREE_CODE (set_low_bound) == INTEGER_CST
7203	     && tree_int_cst_lt (set_high_bound, set_low_bound))
7204	     || (TREE_CODE (index) == INTEGER_CST
7205		 && TREE_CODE (set_low_bound) == INTEGER_CST
7206		 && tree_int_cst_lt (index, set_low_bound))
7207	     || (TREE_CODE (set_high_bound) == INTEGER_CST
7208		 && TREE_CODE (index) == INTEGER_CST
7209		 && tree_int_cst_lt (set_high_bound, index))))
7210	  return const0_rtx;
7211
7212	if (target == 0)
7213	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7214
7215	/* If we get here, we have to generate the code for both cases
7216	   (in range and out of range).  */
7217
7218	op0 = gen_label_rtx ();
7219	op1 = gen_label_rtx ();
7220
7221	if (! (GET_CODE (index_val) == CONST_INT
7222	       && GET_CODE (lo_r) == CONST_INT))
7223	  emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7224				   GET_MODE (index_val), iunsignedp, op1);
7225
7226	if (! (GET_CODE (index_val) == CONST_INT
7227	       && GET_CODE (hi_r) == CONST_INT))
7228	  emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7229				   GET_MODE (index_val), iunsignedp, op1);
7230
7231	/* Calculate the element number of bit zero in the first word
7232	   of the set.  */
7233	if (GET_CODE (lo_r) == CONST_INT)
7234	  rlow = GEN_INT (INTVAL (lo_r)
7235			  & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7236	else
7237	  rlow = expand_binop (index_mode, and_optab, lo_r,
7238			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7239			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7240
7241	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7242			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7243
7244	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7245			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7246	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7247			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7248
7249	addr = memory_address (byte_mode,
7250			       expand_binop (index_mode, add_optab, diff,
7251					     setaddr, NULL_RTX, iunsignedp,
7252					     OPTAB_LIB_WIDEN));
7253
7254	/* Extract the bit we want to examine.  */
7255	bit = expand_shift (RSHIFT_EXPR, byte_mode,
7256			    gen_rtx_MEM (byte_mode, addr),
7257			    make_tree (TREE_TYPE (index), rem),
7258			    NULL_RTX, 1);
7259	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7260			       GET_MODE (target) == byte_mode ? target : 0,
7261			       1, OPTAB_LIB_WIDEN);
7262
7263	if (result != target)
7264	  convert_move (target, result, 1);
7265
7266	/* Output the code to handle the out-of-range case.  */
7267	emit_jump (op0);
7268	emit_label (op1);
7269	emit_move_insn (target, const0_rtx);
7270	emit_label (op0);
7271	return target;
7272      }
7273
7274    case WITH_CLEANUP_EXPR:
7275      if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7276	{
7277	  WITH_CLEANUP_EXPR_RTL (exp)
7278	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7279	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7280
7281	  /* That's it for this cleanup.  */
7282	  TREE_OPERAND (exp, 1) = 0;
7283	}
7284      return WITH_CLEANUP_EXPR_RTL (exp);
7285
7286    case CLEANUP_POINT_EXPR:
7287      {
7288	/* Start a new binding layer that will keep track of all cleanup
7289	   actions to be performed.  */
7290	expand_start_bindings (2);
7291
7292	target_temp_slot_level = temp_slot_level;
7293
7294	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7295	/* If we're going to use this value, load it up now.  */
7296	if (! ignore)
7297	  op0 = force_not_mem (op0);
7298	preserve_temp_slots (op0);
7299	expand_end_bindings (NULL_TREE, 0, 0);
7300      }
7301      return op0;
7302
7303    case CALL_EXPR:
7304      /* Check for a built-in function.  */
7305      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7306	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7307	      == FUNCTION_DECL)
7308	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7309        {
7310	  if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7311	      == BUILT_IN_FRONTEND)
7312	    return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7313	  else
7314	    return expand_builtin (exp, target, subtarget, tmode, ignore);
7315	}
7316
7317      return expand_call (exp, target, ignore);
7318
7319    case NON_LVALUE_EXPR:
7320    case NOP_EXPR:
7321    case CONVERT_EXPR:
7322    case REFERENCE_EXPR:
7323      if (TREE_OPERAND (exp, 0) == error_mark_node)
7324	return const0_rtx;
7325
7326      if (TREE_CODE (type) == UNION_TYPE)
7327	{
7328	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7329
7330	  /* If both input and output are BLKmode, this conversion isn't doing
7331	     anything except possibly changing memory attribute.  */
7332	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7333	    {
7334	      rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7335					modifier);
7336
7337	      result = copy_rtx (result);
7338	      set_mem_attributes (result, exp, 0);
7339	      return result;
7340	    }
7341
7342	  if (target == 0)
7343	    target = assign_temp (type, 0, 1, 1);
7344
7345	  if (GET_CODE (target) == MEM)
7346	    /* Store data into beginning of memory target.  */
7347	    store_expr (TREE_OPERAND (exp, 0),
7348			adjust_address (target, TYPE_MODE (valtype), 0), 0);
7349
7350	  else if (GET_CODE (target) == REG)
7351	    /* Store this field into a union of the proper type.  */
7352	    store_field (target,
7353			 MIN ((int_size_in_bytes (TREE_TYPE
7354						  (TREE_OPERAND (exp, 0)))
7355			       * BITS_PER_UNIT),
7356			      (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7357			 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7358			 VOIDmode, 0, type, 0);
7359	  else
7360	    abort ();
7361
7362	  /* Return the entire union.  */
7363	  return target;
7364	}
7365
7366      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7367	{
7368	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7369			     modifier);
7370
7371	  /* If the signedness of the conversion differs and OP0 is
7372	     a promoted SUBREG, clear that indication since we now
7373	     have to do the proper extension.  */
7374	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7375	      && GET_CODE (op0) == SUBREG)
7376	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7377
7378	  return op0;
7379	}
7380
7381      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7382      if (GET_MODE (op0) == mode)
7383	return op0;
7384
7385      /* If OP0 is a constant, just convert it into the proper mode.  */
7386      if (CONSTANT_P (op0))
7387	{
7388	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7389	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7390
7391          if (modifier == EXPAND_INITIALIZER)
7392	    return simplify_gen_subreg (mode, op0, inner_mode,
7393					subreg_lowpart_offset (mode,
7394							       inner_mode));
7395	  else
7396	    return convert_modes (mode, inner_mode, op0,
7397				  TREE_UNSIGNED (inner_type));
7398	}
7399
7400      if (modifier == EXPAND_INITIALIZER)
7401	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7402
7403      if (target == 0)
7404	return
7405	  convert_to_mode (mode, op0,
7406			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7407      else
7408	convert_move (target, op0,
7409		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7410      return target;
7411
7412    case VIEW_CONVERT_EXPR:
7413      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7414
7415      /* If the input and output modes are both the same, we are done.
7416	 Otherwise, if neither mode is BLKmode and both are within a word, we
7417	 can use gen_lowpart.  If neither is true, make sure the operand is
7418	 in memory and convert the MEM to the new mode.  */
7419      if (TYPE_MODE (type) == GET_MODE (op0))
7420	;
7421      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7422	       && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7423	       && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7424	op0 = gen_lowpart (TYPE_MODE (type), op0);
7425      else if (GET_CODE (op0) != MEM)
7426	{
7427	  /* If the operand is not a MEM, force it into memory.  Since we
7428	     are going to be be changing the mode of the MEM, don't call
7429	     force_const_mem for constants because we don't allow pool
7430	     constants to change mode.  */
7431	  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7432
7433	  if (TREE_ADDRESSABLE (exp))
7434	    abort ();
7435
7436	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7437	    target
7438	      = assign_stack_temp_for_type
7439		(TYPE_MODE (inner_type),
7440		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7441
7442	  emit_move_insn (target, op0);
7443	  op0 = target;
7444	}
7445
7446      /* At this point, OP0 is in the correct mode.  If the output type is such
7447	 that the operand is known to be aligned, indicate that it is.
7448	 Otherwise, we need only be concerned about alignment for non-BLKmode
7449	 results.  */
7450      if (GET_CODE (op0) == MEM)
7451	{
7452	  op0 = copy_rtx (op0);
7453
7454	  if (TYPE_ALIGN_OK (type))
7455	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7456	  else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7457		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7458	    {
7459	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7460	      HOST_WIDE_INT temp_size
7461		= MAX (int_size_in_bytes (inner_type),
7462		       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7463	      rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7464						    temp_size, 0, type);
7465	      rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7466
7467	      if (TREE_ADDRESSABLE (exp))
7468		abort ();
7469
7470	      if (GET_MODE (op0) == BLKmode)
7471		emit_block_move (new_with_op0_mode, op0,
7472				 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7473	      else
7474		emit_move_insn (new_with_op0_mode, op0);
7475
7476	      op0 = new;
7477	    }
7478
7479	  op0 = adjust_address (op0, TYPE_MODE (type), 0);
7480	}
7481
7482      return op0;
7483
7484    case PLUS_EXPR:
7485      /* We come here from MINUS_EXPR when the second operand is a
7486         constant.  */
7487    plus_expr:
7488      this_optab = ! unsignedp && flag_trapv
7489                   && (GET_MODE_CLASS (mode) == MODE_INT)
7490                   ? addv_optab : add_optab;
7491
7492      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7493	 something else, make sure we add the register to the constant and
7494	 then to the other thing.  This case can occur during strength
7495	 reduction and doing it this way will produce better code if the
7496	 frame pointer or argument pointer is eliminated.
7497
7498	 fold-const.c will ensure that the constant is always in the inner
7499	 PLUS_EXPR, so the only case we need to do anything about is if
7500	 sp, ap, or fp is our second argument, in which case we must swap
7501	 the innermost first argument and our second argument.  */
7502
7503      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7504	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7505	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7506	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7507	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7508	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7509	{
7510	  tree t = TREE_OPERAND (exp, 1);
7511
7512	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7513	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7514	}
7515
7516      /* If the result is to be ptr_mode and we are adding an integer to
7517	 something, we might be forming a constant.  So try to use
7518	 plus_constant.  If it produces a sum and we can't accept it,
7519	 use force_operand.  This allows P = &ARR[const] to generate
7520	 efficient code on machines where a SYMBOL_REF is not a valid
7521	 address.
7522
7523	 If this is an EXPAND_SUM call, always return the sum.  */
7524      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7525          || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7526	{
7527	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7528	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7529	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7530	    {
7531	      rtx constant_part;
7532
7533	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7534				 EXPAND_SUM);
7535	      /* Use immed_double_const to ensure that the constant is
7536		 truncated according to the mode of OP1, then sign extended
7537		 to a HOST_WIDE_INT.  Using the constant directly can result
7538		 in non-canonical RTL in a 64x32 cross compile.  */
7539	      constant_part
7540		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7541				      (HOST_WIDE_INT) 0,
7542				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7543	      op1 = plus_constant (op1, INTVAL (constant_part));
7544	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7545		op1 = force_operand (op1, target);
7546	      return op1;
7547	    }
7548
7549	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7550		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7551		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7552	    {
7553	      rtx constant_part;
7554
7555	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7556				 (modifier == EXPAND_INITIALIZER
7557				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7558	      if (! CONSTANT_P (op0))
7559		{
7560		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7561				     VOIDmode, modifier);
7562		  /* Don't go to both_summands if modifier
7563		     says it's not right to return a PLUS.  */
7564		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7565		    goto binop2;
7566		  goto both_summands;
7567		}
7568	      /* Use immed_double_const to ensure that the constant is
7569		 truncated according to the mode of OP1, then sign extended
7570		 to a HOST_WIDE_INT.  Using the constant directly can result
7571		 in non-canonical RTL in a 64x32 cross compile.  */
7572	      constant_part
7573		= immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7574				      (HOST_WIDE_INT) 0,
7575				      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7576	      op0 = plus_constant (op0, INTVAL (constant_part));
7577	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7578		op0 = force_operand (op0, target);
7579	      return op0;
7580	    }
7581	}
7582
7583      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7584	subtarget = 0;
7585
7586      /* No sense saving up arithmetic to be done
7587	 if it's all in the wrong mode to form part of an address.
7588	 And force_operand won't know whether to sign-extend or
7589	 zero-extend.  */
7590      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7591	  || mode != ptr_mode)
7592	{
7593	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7594	  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7595	  if (op0 == const0_rtx)
7596	    return op1;
7597	  if (op1 == const0_rtx)
7598	    return op0;
7599	  goto binop2;
7600	}
7601
7602      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7603      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7604
7605    both_summands:
7606      /* Make sure any term that's a sum with a constant comes last.  */
7607      if (GET_CODE (op0) == PLUS
7608	  && CONSTANT_P (XEXP (op0, 1)))
7609	{
7610	  temp = op0;
7611	  op0 = op1;
7612	  op1 = temp;
7613	}
7614      /* If adding to a sum including a constant,
7615	 associate it to put the constant outside.  */
7616      if (GET_CODE (op1) == PLUS
7617	  && CONSTANT_P (XEXP (op1, 1)))
7618	{
7619	  rtx constant_term = const0_rtx;
7620
7621	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7622	  if (temp != 0)
7623	    op0 = temp;
7624	  /* Ensure that MULT comes first if there is one.  */
7625	  else if (GET_CODE (op0) == MULT)
7626	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7627	  else
7628	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7629
7630	  /* Let's also eliminate constants from op0 if possible.  */
7631	  op0 = eliminate_constant_term (op0, &constant_term);
7632
7633	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7634	     their sum should be a constant.  Form it into OP1, since the
7635	     result we want will then be OP0 + OP1.  */
7636
7637	  temp = simplify_binary_operation (PLUS, mode, constant_term,
7638					    XEXP (op1, 1));
7639	  if (temp != 0)
7640	    op1 = temp;
7641	  else
7642	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7643	}
7644
7645      /* Put a constant term last and put a multiplication first.  */
7646      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7647	temp = op1, op1 = op0, op0 = temp;
7648
7649      temp = simplify_binary_operation (PLUS, mode, op0, op1);
7650      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7651
7652    case MINUS_EXPR:
7653      /* For initializers, we are allowed to return a MINUS of two
7654	 symbolic constants.  Here we handle all cases when both operands
7655	 are constant.  */
7656      /* Handle difference of two symbolic constants,
7657	 for the sake of an initializer.  */
7658      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7659	  && really_constant_p (TREE_OPERAND (exp, 0))
7660	  && really_constant_p (TREE_OPERAND (exp, 1)))
7661	{
7662	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7663				 modifier);
7664	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7665				 modifier);
7666
7667	  /* If the last operand is a CONST_INT, use plus_constant of
7668	     the negated constant.  Else make the MINUS.  */
7669	  if (GET_CODE (op1) == CONST_INT)
7670	    return plus_constant (op0, - INTVAL (op1));
7671	  else
7672	    return gen_rtx_MINUS (mode, op0, op1);
7673	}
7674      /* Convert A - const to A + (-const).  */
7675      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7676	{
7677	  tree negated = fold (build1 (NEGATE_EXPR, type,
7678				       TREE_OPERAND (exp, 1)));
7679
7680	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7681	    /* If we can't negate the constant in TYPE, leave it alone and
7682	       expand_binop will negate it for us.  We used to try to do it
7683	       here in the signed version of TYPE, but that doesn't work
7684	       on POINTER_TYPEs.  */;
7685	  else
7686	    {
7687	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7688	      goto plus_expr;
7689	    }
7690	}
7691      this_optab = ! unsignedp && flag_trapv
7692                   && (GET_MODE_CLASS(mode) == MODE_INT)
7693                   ? subv_optab : sub_optab;
7694      goto binop;
7695
7696    case MULT_EXPR:
7697      /* If first operand is constant, swap them.
7698	 Thus the following special case checks need only
7699	 check the second operand.  */
7700      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7701	{
7702	  tree t1 = TREE_OPERAND (exp, 0);
7703	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7704	  TREE_OPERAND (exp, 1) = t1;
7705	}
7706
7707      /* Attempt to return something suitable for generating an
7708	 indexed address, for machines that support that.  */
7709
7710      if (modifier == EXPAND_SUM && mode == ptr_mode
7711	  && host_integerp (TREE_OPERAND (exp, 1), 0))
7712	{
7713	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7714			     EXPAND_SUM);
7715
7716	  /* If we knew for certain that this is arithmetic for an array
7717	     reference, and we knew the bounds of the array, then we could
7718	     apply the distributive law across (PLUS X C) for constant C.
7719	     Without such knowledge, we risk overflowing the computation
7720	     when both X and C are large, but X+C isn't.  */
7721	  /* ??? Could perhaps special-case EXP being unsigned and C being
7722	     positive.  In that case we are certain that X+C is no smaller
7723	     than X and so the transformed expression will overflow iff the
7724	     original would have.  */
7725
7726	  if (GET_CODE (op0) != REG)
7727	    op0 = force_operand (op0, NULL_RTX);
7728	  if (GET_CODE (op0) != REG)
7729	    op0 = copy_to_mode_reg (mode, op0);
7730
7731	  return
7732	    gen_rtx_MULT (mode, op0,
7733			  GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7734	}
7735
7736      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7737	subtarget = 0;
7738
7739      /* Check for multiplying things that have been extended
7740	 from a narrower type.  If this machine supports multiplying
7741	 in that narrower type with a result in the desired type,
7742	 do it that way, and avoid the explicit type-conversion.  */
7743      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7744	  && TREE_CODE (type) == INTEGER_TYPE
7745	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7746	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7747	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7748	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7749				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7750	       /* Don't use a widening multiply if a shift will do.  */
7751	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7752		    > HOST_BITS_PER_WIDE_INT)
7753		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7754	      ||
7755	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7756	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7757		   ==
7758		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7759	       /* If both operands are extended, they must either both
7760		  be zero-extended or both be sign-extended.  */
7761	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7762		   ==
7763		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7764	{
7765	  enum machine_mode innermode
7766	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7767	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7768			? smul_widen_optab : umul_widen_optab);
7769	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7770			? umul_widen_optab : smul_widen_optab);
7771	  if (mode == GET_MODE_WIDER_MODE (innermode))
7772	    {
7773	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7774		{
7775		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7776				     NULL_RTX, VOIDmode, 0);
7777		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7778		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7779				       VOIDmode, 0);
7780		  else
7781		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7782				       NULL_RTX, VOIDmode, 0);
7783		  goto binop2;
7784		}
7785	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7786		       && innermode == word_mode)
7787		{
7788		  rtx htem;
7789		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7790				     NULL_RTX, VOIDmode, 0);
7791		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7792		    op1 = convert_modes (innermode, mode,
7793					 expand_expr (TREE_OPERAND (exp, 1),
7794						      NULL_RTX, VOIDmode, 0),
7795					 unsignedp);
7796		  else
7797		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7798				       NULL_RTX, VOIDmode, 0);
7799		  temp = expand_binop (mode, other_optab, op0, op1, target,
7800				       unsignedp, OPTAB_LIB_WIDEN);
7801		  htem = expand_mult_highpart_adjust (innermode,
7802						      gen_highpart (innermode, temp),
7803						      op0, op1,
7804						      gen_highpart (innermode, temp),
7805						      unsignedp);
7806		  emit_move_insn (gen_highpart (innermode, temp), htem);
7807		  return temp;
7808		}
7809	    }
7810	}
7811      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7812      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7813      return expand_mult (mode, op0, op1, target, unsignedp);
7814
7815    case TRUNC_DIV_EXPR:
7816    case FLOOR_DIV_EXPR:
7817    case CEIL_DIV_EXPR:
7818    case ROUND_DIV_EXPR:
7819    case EXACT_DIV_EXPR:
7820      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7821	subtarget = 0;
7822      /* Possible optimization: compute the dividend with EXPAND_SUM
7823	 then if the divisor is constant can optimize the case
7824	 where some terms of the dividend have coeffs divisible by it.  */
7825      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7826      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7827      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7828
7829    case RDIV_EXPR:
7830      /* Emit a/b as a*(1/b).  Later we may manage CSE the reciprocal saving
7831         expensive divide.  If not, combine will rebuild the original
7832         computation.  */
7833      if (flag_unsafe_math_optimizations && optimize && !optimize_size
7834	  && TREE_CODE (type) == REAL_TYPE
7835	  && !real_onep (TREE_OPERAND (exp, 0)))
7836        return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7837				   build (RDIV_EXPR, type,
7838					  build_real (type, dconst1),
7839					  TREE_OPERAND (exp, 1))),
7840			    target, tmode, modifier);
7841      this_optab = sdiv_optab;
7842      goto binop;
7843
7844    case TRUNC_MOD_EXPR:
7845    case FLOOR_MOD_EXPR:
7846    case CEIL_MOD_EXPR:
7847    case ROUND_MOD_EXPR:
7848      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7849	subtarget = 0;
7850      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7851      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7852      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7853
7854    case FIX_ROUND_EXPR:
7855    case FIX_FLOOR_EXPR:
7856    case FIX_CEIL_EXPR:
7857      abort ();			/* Not used for C.  */
7858
7859    case FIX_TRUNC_EXPR:
7860      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7861      if (target == 0)
7862	target = gen_reg_rtx (mode);
7863      expand_fix (target, op0, unsignedp);
7864      return target;
7865
7866    case FLOAT_EXPR:
7867      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7868      if (target == 0)
7869	target = gen_reg_rtx (mode);
7870      /* expand_float can't figure out what to do if FROM has VOIDmode.
7871	 So give it the correct mode.  With -O, cse will optimize this.  */
7872      if (GET_MODE (op0) == VOIDmode)
7873	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7874				op0);
7875      expand_float (target, op0,
7876		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7877      return target;
7878
7879    case NEGATE_EXPR:
7880      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7881      temp = expand_unop (mode,
7882                          ! unsignedp && flag_trapv
7883                          && (GET_MODE_CLASS(mode) == MODE_INT)
7884                          ? negv_optab : neg_optab, op0, target, 0);
7885      if (temp == 0)
7886	abort ();
7887      return temp;
7888
7889    case ABS_EXPR:
7890      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7891
7892      /* Handle complex values specially.  */
7893      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7894	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7895	return expand_complex_abs (mode, op0, target, unsignedp);
7896
7897      /* Unsigned abs is simply the operand.  Testing here means we don't
7898	 risk generating incorrect code below.  */
7899      if (TREE_UNSIGNED (type))
7900	return op0;
7901
7902      return expand_abs (mode, op0, target, unsignedp,
7903			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7904
7905    case MAX_EXPR:
7906    case MIN_EXPR:
7907      target = original_target;
7908      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7909	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7910	  || GET_MODE (target) != mode
7911	  || (GET_CODE (target) == REG
7912	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
7913	target = gen_reg_rtx (mode);
7914      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7915      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7916
7917      /* First try to do it with a special MIN or MAX instruction.
7918	 If that does not win, use a conditional jump to select the proper
7919	 value.  */
7920      this_optab = (TREE_UNSIGNED (type)
7921		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7922		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7923
7924      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7925			   OPTAB_WIDEN);
7926      if (temp != 0)
7927	return temp;
7928
7929      /* At this point, a MEM target is no longer useful; we will get better
7930	 code without it.  */
7931
7932      if (GET_CODE (target) == MEM)
7933	target = gen_reg_rtx (mode);
7934
7935      if (target != op0)
7936	emit_move_insn (target, op0);
7937
7938      op0 = gen_label_rtx ();
7939
7940      /* If this mode is an integer too wide to compare properly,
7941	 compare word by word.  Rely on cse to optimize constant cases.  */
7942      if (GET_MODE_CLASS (mode) == MODE_INT
7943	  && ! can_compare_p (GE, mode, ccp_jump))
7944	{
7945	  if (code == MAX_EXPR)
7946	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7947					  target, op1, NULL_RTX, op0);
7948	  else
7949	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7950					  op1, target, NULL_RTX, op0);
7951	}
7952      else
7953	{
7954	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7955	  do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7956				   unsignedp, mode, NULL_RTX, NULL_RTX,
7957				   op0);
7958	}
7959      emit_move_insn (target, op1);
7960      emit_label (op0);
7961      return target;
7962
7963    case BIT_NOT_EXPR:
7964      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7965      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7966      if (temp == 0)
7967	abort ();
7968      return temp;
7969
7970    case FFS_EXPR:
7971      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7972      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7973      if (temp == 0)
7974	abort ();
7975      return temp;
7976
7977      /* ??? Can optimize bitwise operations with one arg constant.
7978	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7979	 and (a bitwise1 b) bitwise2 b (etc)
7980	 but that is probably not worth while.  */
7981
7982      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7983	 boolean values when we want in all cases to compute both of them.  In
7984	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7985	 as actual zero-or-1 values and then bitwise anding.  In cases where
7986	 there cannot be any side effects, better code would be made by
7987	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7988	 how to recognize those cases.  */
7989
7990    case TRUTH_AND_EXPR:
7991    case BIT_AND_EXPR:
7992      this_optab = and_optab;
7993      goto binop;
7994
7995    case TRUTH_OR_EXPR:
7996    case BIT_IOR_EXPR:
7997      this_optab = ior_optab;
7998      goto binop;
7999
8000    case TRUTH_XOR_EXPR:
8001    case BIT_XOR_EXPR:
8002      this_optab = xor_optab;
8003      goto binop;
8004
8005    case LSHIFT_EXPR:
8006    case RSHIFT_EXPR:
8007    case LROTATE_EXPR:
8008    case RROTATE_EXPR:
8009      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8010	subtarget = 0;
8011      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8012      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8013			   unsignedp);
8014
8015      /* Could determine the answer when only additive constants differ.  Also,
8016	 the addition of one can be handled by changing the condition.  */
8017    case LT_EXPR:
8018    case LE_EXPR:
8019    case GT_EXPR:
8020    case GE_EXPR:
8021    case EQ_EXPR:
8022    case NE_EXPR:
8023    case UNORDERED_EXPR:
8024    case ORDERED_EXPR:
8025    case UNLT_EXPR:
8026    case UNLE_EXPR:
8027    case UNGT_EXPR:
8028    case UNGE_EXPR:
8029    case UNEQ_EXPR:
8030      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8031      if (temp != 0)
8032	return temp;
8033
8034      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8035      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8036	  && original_target
8037	  && GET_CODE (original_target) == REG
8038	  && (GET_MODE (original_target)
8039	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8040	{
8041	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8042			      VOIDmode, 0);
8043
8044	  /* If temp is constant, we can just compute the result.  */
8045	  if (GET_CODE (temp) == CONST_INT)
8046	    {
8047	      if (INTVAL (temp) != 0)
8048	        emit_move_insn (target, const1_rtx);
8049	      else
8050	        emit_move_insn (target, const0_rtx);
8051
8052	      return target;
8053	    }
8054
8055	  if (temp != original_target)
8056	    {
8057	      enum machine_mode mode1 = GET_MODE (temp);
8058	      if (mode1 == VOIDmode)
8059		mode1 = tmode != VOIDmode ? tmode : mode;
8060
8061	      temp = copy_to_mode_reg (mode1, temp);
8062	    }
8063
8064	  op1 = gen_label_rtx ();
8065	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8066				   GET_MODE (temp), unsignedp, op1);
8067	  emit_move_insn (temp, const1_rtx);
8068	  emit_label (op1);
8069	  return temp;
8070	}
8071
8072      /* If no set-flag instruction, must generate a conditional
8073	 store into a temporary variable.  Drop through
8074	 and handle this like && and ||.  */
8075
8076    case TRUTH_ANDIF_EXPR:
8077    case TRUTH_ORIF_EXPR:
8078      if (! ignore
8079	  && (target == 0 || ! safe_from_p (target, exp, 1)
8080	      /* Make sure we don't have a hard reg (such as function's return
8081		 value) live across basic blocks, if not optimizing.  */
8082	      || (!optimize && GET_CODE (target) == REG
8083		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8084	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8085
8086      if (target)
8087	emit_clr_insn (target);
8088
8089      op1 = gen_label_rtx ();
8090      jumpifnot (exp, op1);
8091
8092      if (target)
8093	emit_0_to_1_insn (target);
8094
8095      emit_label (op1);
8096      return ignore ? const0_rtx : target;
8097
8098    case TRUTH_NOT_EXPR:
8099      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8100      /* The parser is careful to generate TRUTH_NOT_EXPR
8101	 only with operands that are always zero or one.  */
8102      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8103			   target, 1, OPTAB_LIB_WIDEN);
8104      if (temp == 0)
8105	abort ();
8106      return temp;
8107
8108    case COMPOUND_EXPR:
8109      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8110      emit_queue ();
8111      return expand_expr (TREE_OPERAND (exp, 1),
8112			  (ignore ? const0_rtx : target),
8113			  VOIDmode, 0);
8114
8115    case COND_EXPR:
8116      /* If we would have a "singleton" (see below) were it not for a
8117	 conversion in each arm, bring that conversion back out.  */
8118      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8119	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8120	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8121	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8122	{
8123	  tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8124	  tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8125
8126	  if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8127	       && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8128	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8129		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8130	      || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8131		  && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8132	      || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8133		  && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8134	    return expand_expr (build1 (NOP_EXPR, type,
8135					build (COND_EXPR, TREE_TYPE (iftrue),
8136					       TREE_OPERAND (exp, 0),
8137					       iftrue, iffalse)),
8138				target, tmode, modifier);
8139	}
8140
8141      {
8142	/* Note that COND_EXPRs whose type is a structure or union
8143	   are required to be constructed to contain assignments of
8144	   a temporary variable, so that we can evaluate them here
8145	   for side effect only.  If type is void, we must do likewise.  */
8146
8147	/* If an arm of the branch requires a cleanup,
8148	   only that cleanup is performed.  */
8149
8150	tree singleton = 0;
8151	tree binary_op = 0, unary_op = 0;
8152
8153	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8154	   convert it to our mode, if necessary.  */
8155	if (integer_onep (TREE_OPERAND (exp, 1))
8156	    && integer_zerop (TREE_OPERAND (exp, 2))
8157	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8158	  {
8159	    if (ignore)
8160	      {
8161		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8162			     modifier);
8163		return const0_rtx;
8164	      }
8165
8166	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8167	    if (GET_MODE (op0) == mode)
8168	      return op0;
8169
8170	    if (target == 0)
8171	      target = gen_reg_rtx (mode);
8172	    convert_move (target, op0, unsignedp);
8173	    return target;
8174	  }
8175
8176	/* Check for X ? A + B : A.  If we have this, we can copy A to the
8177	   output and conditionally add B.  Similarly for unary operations.
8178	   Don't do this if X has side-effects because those side effects
8179	   might affect A or B and the "?" operation is a sequence point in
8180	   ANSI.  (operand_equal_p tests for side effects.)  */
8181
8182	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8183	    && operand_equal_p (TREE_OPERAND (exp, 2),
8184				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8185	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8186	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8187		 && operand_equal_p (TREE_OPERAND (exp, 1),
8188				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8189	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8190	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8191		 && operand_equal_p (TREE_OPERAND (exp, 2),
8192				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8193	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8194	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8195		 && operand_equal_p (TREE_OPERAND (exp, 1),
8196				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8197	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8198
8199	/* If we are not to produce a result, we have no target.  Otherwise,
8200	   if a target was specified use it; it will not be used as an
8201	   intermediate target unless it is safe.  If no target, use a
8202	   temporary.  */
8203
8204	if (ignore)
8205	  temp = 0;
8206	else if (original_target
8207		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8208		     || (singleton && GET_CODE (original_target) == REG
8209			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8210			 && original_target == var_rtx (singleton)))
8211		 && GET_MODE (original_target) == mode
8212#ifdef HAVE_conditional_move
8213		 && (! can_conditionally_move_p (mode)
8214		     || GET_CODE (original_target) == REG
8215		     || TREE_ADDRESSABLE (type))
8216#endif
8217		 && (GET_CODE (original_target) != MEM
8218		     || TREE_ADDRESSABLE (type)))
8219	  temp = original_target;
8220	else if (TREE_ADDRESSABLE (type))
8221	  abort ();
8222	else
8223	  temp = assign_temp (type, 0, 0, 1);
8224
8225	/* If we had X ? A + C : A, with C a constant power of 2, and we can
8226	   do the test of X as a store-flag operation, do this as
8227	   A + ((X != 0) << log C).  Similarly for other simple binary
8228	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
8229	if (temp && singleton && binary_op
8230	    && (TREE_CODE (binary_op) == PLUS_EXPR
8231		|| TREE_CODE (binary_op) == MINUS_EXPR
8232		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
8233		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
8234	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8235		: integer_onep (TREE_OPERAND (binary_op, 1)))
8236	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8237	  {
8238	    rtx result;
8239	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8240                            ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8241                               ? addv_optab : add_optab)
8242                            : TREE_CODE (binary_op) == MINUS_EXPR
8243                              ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8244                                 ? subv_optab : sub_optab)
8245                            : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8246                            : xor_optab);
8247
8248	    /* If we had X ? A : A + 1, do this as A + (X == 0).
8249
8250	       We have to invert the truth value here and then put it
8251	       back later if do_store_flag fails.  We cannot simply copy
8252	       TREE_OPERAND (exp, 0) to another variable and modify that
8253	       because invert_truthvalue can modify the tree pointed to
8254	       by its argument.  */
8255	    if (singleton == TREE_OPERAND (exp, 1))
8256	      TREE_OPERAND (exp, 0)
8257		= invert_truthvalue (TREE_OPERAND (exp, 0));
8258
8259	    result = do_store_flag (TREE_OPERAND (exp, 0),
8260				    (safe_from_p (temp, singleton, 1)
8261				     ? temp : NULL_RTX),
8262				    mode, BRANCH_COST <= 1);
8263
8264	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8265	      result = expand_shift (LSHIFT_EXPR, mode, result,
8266				     build_int_2 (tree_log2
8267						  (TREE_OPERAND
8268						   (binary_op, 1)),
8269						  0),
8270				     (safe_from_p (temp, singleton, 1)
8271				      ? temp : NULL_RTX), 0);
8272
8273	    if (result)
8274	      {
8275		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8276		return expand_binop (mode, boptab, op1, result, temp,
8277				     unsignedp, OPTAB_LIB_WIDEN);
8278	      }
8279	    else if (singleton == TREE_OPERAND (exp, 1))
8280	      TREE_OPERAND (exp, 0)
8281		= invert_truthvalue (TREE_OPERAND (exp, 0));
8282	  }
8283
8284	do_pending_stack_adjust ();
8285	NO_DEFER_POP;
8286	op0 = gen_label_rtx ();
8287
8288	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8289	  {
8290	    if (temp != 0)
8291	      {
8292		/* If the target conflicts with the other operand of the
8293		   binary op, we can't use it.  Also, we can't use the target
8294		   if it is a hard register, because evaluating the condition
8295		   might clobber it.  */
8296		if ((binary_op
8297		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8298		    || (GET_CODE (temp) == REG
8299			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
8300		  temp = gen_reg_rtx (mode);
8301		store_expr (singleton, temp, 0);
8302	      }
8303	    else
8304	      expand_expr (singleton,
8305			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8306	    if (singleton == TREE_OPERAND (exp, 1))
8307	      jumpif (TREE_OPERAND (exp, 0), op0);
8308	    else
8309	      jumpifnot (TREE_OPERAND (exp, 0), op0);
8310
8311	    start_cleanup_deferral ();
8312	    if (binary_op && temp == 0)
8313	      /* Just touch the other operand.  */
8314	      expand_expr (TREE_OPERAND (binary_op, 1),
8315			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8316	    else if (binary_op)
8317	      store_expr (build (TREE_CODE (binary_op), type,
8318				 make_tree (type, temp),
8319				 TREE_OPERAND (binary_op, 1)),
8320			  temp, 0);
8321	    else
8322	      store_expr (build1 (TREE_CODE (unary_op), type,
8323				  make_tree (type, temp)),
8324			  temp, 0);
8325	    op1 = op0;
8326	  }
8327	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8328	   comparison operator.  If we have one of these cases, set the
8329	   output to A, branch on A (cse will merge these two references),
8330	   then set the output to FOO.  */
8331	else if (temp
8332		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8333		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8334		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8335				     TREE_OPERAND (exp, 1), 0)
8336		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8337		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8338		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8339	  {
8340	    if (GET_CODE (temp) == REG
8341		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8342	      temp = gen_reg_rtx (mode);
8343	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8344	    jumpif (TREE_OPERAND (exp, 0), op0);
8345
8346	    start_cleanup_deferral ();
8347	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8348	    op1 = op0;
8349	  }
8350	else if (temp
8351		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8352		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8353		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8354				     TREE_OPERAND (exp, 2), 0)
8355		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8356		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8357		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8358	  {
8359	    if (GET_CODE (temp) == REG
8360		&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
8361	      temp = gen_reg_rtx (mode);
8362	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
8363	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8364
8365	    start_cleanup_deferral ();
8366	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
8367	    op1 = op0;
8368	  }
8369	else
8370	  {
8371	    op1 = gen_label_rtx ();
8372	    jumpifnot (TREE_OPERAND (exp, 0), op0);
8373
8374	    start_cleanup_deferral ();
8375
8376	    /* One branch of the cond can be void, if it never returns. For
8377	       example A ? throw : E  */
8378	    if (temp != 0
8379		&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8380	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
8381	    else
8382	      expand_expr (TREE_OPERAND (exp, 1),
8383			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8384	    end_cleanup_deferral ();
8385	    emit_queue ();
8386	    emit_jump_insn (gen_jump (op1));
8387	    emit_barrier ();
8388	    emit_label (op0);
8389	    start_cleanup_deferral ();
8390	    if (temp != 0
8391		&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8392	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
8393	    else
8394	      expand_expr (TREE_OPERAND (exp, 2),
8395			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8396	  }
8397
8398	end_cleanup_deferral ();
8399
8400	emit_queue ();
8401	emit_label (op1);
8402	OK_DEFER_POP;
8403
8404	return temp;
8405      }
8406
8407    case TARGET_EXPR:
8408      {
8409	/* Something needs to be initialized, but we didn't know
8410	   where that thing was when building the tree.  For example,
8411	   it could be the return value of a function, or a parameter
8412	   to a function which lays down in the stack, or a temporary
8413	   variable which must be passed by reference.
8414
8415	   We guarantee that the expression will either be constructed
8416	   or copied into our original target.  */
8417
8418	tree slot = TREE_OPERAND (exp, 0);
8419	tree cleanups = NULL_TREE;
8420	tree exp1;
8421
8422	if (TREE_CODE (slot) != VAR_DECL)
8423	  abort ();
8424
8425	if (! ignore)
8426	  target = original_target;
8427
8428	/* Set this here so that if we get a target that refers to a
8429	   register variable that's already been used, put_reg_into_stack
8430	   knows that it should fix up those uses.  */
8431	TREE_USED (slot) = 1;
8432
8433	if (target == 0)
8434	  {
8435	    if (DECL_RTL_SET_P (slot))
8436	      {
8437		target = DECL_RTL (slot);
8438		/* If we have already expanded the slot, so don't do
8439		   it again.  (mrs)  */
8440		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8441		  return target;
8442	      }
8443	    else
8444	      {
8445		target = assign_temp (type, 2, 0, 1);
8446		/* All temp slots at this level must not conflict.  */
8447		preserve_temp_slots (target);
8448		SET_DECL_RTL (slot, target);
8449		if (TREE_ADDRESSABLE (slot))
8450		  put_var_into_stack (slot);
8451
8452		/* Since SLOT is not known to the called function
8453		   to belong to its stack frame, we must build an explicit
8454		   cleanup.  This case occurs when we must build up a reference
8455		   to pass the reference as an argument.  In this case,
8456		   it is very likely that such a reference need not be
8457		   built here.  */
8458
8459		if (TREE_OPERAND (exp, 2) == 0)
8460		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8461		cleanups = TREE_OPERAND (exp, 2);
8462	      }
8463	  }
8464	else
8465	  {
8466	    /* This case does occur, when expanding a parameter which
8467	       needs to be constructed on the stack.  The target
8468	       is the actual stack address that we want to initialize.
8469	       The function we call will perform the cleanup in this case.  */
8470
8471	    /* If we have already assigned it space, use that space,
8472	       not target that we were passed in, as our target
8473	       parameter is only a hint.  */
8474	    if (DECL_RTL_SET_P (slot))
8475	      {
8476		target = DECL_RTL (slot);
8477		/* If we have already expanded the slot, so don't do
8478                   it again.  (mrs)  */
8479		if (TREE_OPERAND (exp, 1) == NULL_TREE)
8480		  return target;
8481	      }
8482	    else
8483	      {
8484		SET_DECL_RTL (slot, target);
8485		/* If we must have an addressable slot, then make sure that
8486		   the RTL that we just stored in slot is OK.  */
8487		if (TREE_ADDRESSABLE (slot))
8488		  put_var_into_stack (slot);
8489	      }
8490	  }
8491
8492	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8493	/* Mark it as expanded.  */
8494	TREE_OPERAND (exp, 1) = NULL_TREE;
8495
8496	store_expr (exp1, target, 0);
8497
8498	expand_decl_cleanup (NULL_TREE, cleanups);
8499
8500	return target;
8501      }
8502
8503    case INIT_EXPR:
8504      {
8505	tree lhs = TREE_OPERAND (exp, 0);
8506	tree rhs = TREE_OPERAND (exp, 1);
8507
8508	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8509	return temp;
8510      }
8511
8512    case MODIFY_EXPR:
8513      {
8514	/* If lhs is complex, expand calls in rhs before computing it.
8515	   That's so we don't compute a pointer and save it over a
8516	   call.  If lhs is simple, compute it first so we can give it
8517	   as a target if the rhs is just a call.  This avoids an
8518	   extra temp and copy and that prevents a partial-subsumption
8519	   which makes bad code.  Actually we could treat
8520	   component_ref's of vars like vars.  */
8521
8522	tree lhs = TREE_OPERAND (exp, 0);
8523	tree rhs = TREE_OPERAND (exp, 1);
8524
8525	temp = 0;
8526
8527	/* Check for |= or &= of a bitfield of size one into another bitfield
8528	   of size 1.  In this case, (unless we need the result of the
8529	   assignment) we can do this more efficiently with a
8530	   test followed by an assignment, if necessary.
8531
8532	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8533	   things change so we do, this code should be enhanced to
8534	   support it.  */
8535	if (ignore
8536	    && TREE_CODE (lhs) == COMPONENT_REF
8537	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
8538		|| TREE_CODE (rhs) == BIT_AND_EXPR)
8539	    && TREE_OPERAND (rhs, 0) == lhs
8540	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8541	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8542	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8543	  {
8544	    rtx label = gen_label_rtx ();
8545
8546	    do_jump (TREE_OPERAND (rhs, 1),
8547		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8548		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8549	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8550					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8551					      ? integer_one_node
8552					      : integer_zero_node)),
8553			       0, 0);
8554	    do_pending_stack_adjust ();
8555	    emit_label (label);
8556	    return const0_rtx;
8557	  }
8558
8559	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8560
8561	return temp;
8562      }
8563
8564    case RETURN_EXPR:
8565      if (!TREE_OPERAND (exp, 0))
8566	expand_null_return ();
8567      else
8568	expand_return (TREE_OPERAND (exp, 0));
8569      return const0_rtx;
8570
8571    case PREINCREMENT_EXPR:
8572    case PREDECREMENT_EXPR:
8573      return expand_increment (exp, 0, ignore);
8574
8575    case POSTINCREMENT_EXPR:
8576    case POSTDECREMENT_EXPR:
8577      /* Faster to treat as pre-increment if result is not used.  */
8578      return expand_increment (exp, ! ignore, ignore);
8579
8580    case ADDR_EXPR:
8581      /* Are we taking the address of a nested function?  */
8582      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8583	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8584	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8585	  && ! TREE_STATIC (exp))
8586	{
8587	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8588	  op0 = force_operand (op0, target);
8589	}
8590      /* If we are taking the address of something erroneous, just
8591	 return a zero.  */
8592      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8593	return const0_rtx;
8594      /* If we are taking the address of a constant and are at the
8595	 top level, we have to use output_constant_def since we can't
8596	 call force_const_mem at top level.  */
8597      else if (cfun == 0
8598	       && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8599		   || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8600		       == 'c')))
8601	op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8602      else
8603	{
8604	  /* We make sure to pass const0_rtx down if we came in with
8605	     ignore set, to avoid doing the cleanups twice for something.  */
8606	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8607			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8608			     (modifier == EXPAND_INITIALIZER
8609			      ? modifier : EXPAND_CONST_ADDRESS));
8610
8611	  /* If we are going to ignore the result, OP0 will have been set
8612	     to const0_rtx, so just return it.  Don't get confused and
8613	     think we are taking the address of the constant.  */
8614	  if (ignore)
8615	    return op0;
8616
8617	  /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8618	     clever and returns a REG when given a MEM.  */
8619	  op0 = protect_from_queue (op0, 1);
8620
8621	  /* We would like the object in memory.  If it is a constant, we can
8622	     have it be statically allocated into memory.  For a non-constant,
8623	     we need to allocate some memory and store the value into it.  */
8624
8625	  if (CONSTANT_P (op0))
8626	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8627				   op0);
8628	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8629		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8630		   || GET_CODE (op0) == PARALLEL)
8631	    {
8632	      /* If the operand is a SAVE_EXPR, we can deal with this by
8633		 forcing the SAVE_EXPR into memory.  */
8634	      if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8635		{
8636		  put_var_into_stack (TREE_OPERAND (exp, 0));
8637		  op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8638		}
8639	      else
8640		{
8641		  /* If this object is in a register, it can't be BLKmode.  */
8642		  tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8643		  rtx memloc = assign_temp (inner_type, 1, 1, 1);
8644
8645		  if (GET_CODE (op0) == PARALLEL)
8646		    /* Handle calls that pass values in multiple
8647		       non-contiguous locations.  The Irix 6 ABI has examples
8648		       of this.  */
8649		    emit_group_store (memloc, op0,
8650				      int_size_in_bytes (inner_type));
8651		  else
8652		    emit_move_insn (memloc, op0);
8653
8654		  op0 = memloc;
8655		}
8656	    }
8657
8658	  if (GET_CODE (op0) != MEM)
8659	    abort ();
8660
8661	  mark_temp_addr_taken (op0);
8662	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8663	    {
8664	      op0 = XEXP (op0, 0);
8665#ifdef POINTERS_EXTEND_UNSIGNED
8666	      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8667		  && mode == ptr_mode)
8668		op0 = convert_memory_address (ptr_mode, op0);
8669#endif
8670	      return op0;
8671	    }
8672
8673	  /* If OP0 is not aligned as least as much as the type requires, we
8674	     need to make a temporary, copy OP0 to it, and take the address of
8675	     the temporary.  We want to use the alignment of the type, not of
8676	     the operand.  Note that this is incorrect for FUNCTION_TYPE, but
8677	     the test for BLKmode means that can't happen.  The test for
8678	     BLKmode is because we never make mis-aligned MEMs with
8679	     non-BLKmode.
8680
8681	     We don't need to do this at all if the machine doesn't have
8682	     strict alignment.  */
8683	  if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8684	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8685		  > MEM_ALIGN (op0))
8686	      && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8687	    {
8688	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8689	      rtx new
8690		= assign_stack_temp_for_type
8691		  (TYPE_MODE (inner_type),
8692		   MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8693		   : int_size_in_bytes (inner_type),
8694		   1, build_qualified_type (inner_type,
8695					    (TYPE_QUALS (inner_type)
8696					     | TYPE_QUAL_CONST)));
8697
8698	      if (TYPE_ALIGN_OK (inner_type))
8699		abort ();
8700
8701	      emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8702	      op0 = new;
8703	    }
8704
8705	  op0 = force_operand (XEXP (op0, 0), target);
8706	}
8707
8708      if (flag_force_addr
8709	  && GET_CODE (op0) != REG
8710	  && modifier != EXPAND_CONST_ADDRESS
8711	  && modifier != EXPAND_INITIALIZER
8712	  && modifier != EXPAND_SUM)
8713	op0 = force_reg (Pmode, op0);
8714
8715      if (GET_CODE (op0) == REG
8716	  && ! REG_USERVAR_P (op0))
8717	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8718
8719#ifdef POINTERS_EXTEND_UNSIGNED
8720      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8721	  && mode == ptr_mode)
8722	op0 = convert_memory_address (ptr_mode, op0);
8723#endif
8724
8725      return op0;
8726
8727    case ENTRY_VALUE_EXPR:
8728      abort ();
8729
8730    /* COMPLEX type for Extended Pascal & Fortran  */
8731    case COMPLEX_EXPR:
8732      {
8733	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8734	rtx insns;
8735
8736	/* Get the rtx code of the operands.  */
8737	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8738	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8739
8740	if (! target)
8741	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8742
8743	start_sequence ();
8744
8745	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8746	emit_move_insn (gen_realpart (mode, target), op0);
8747	emit_move_insn (gen_imagpart (mode, target), op1);
8748
8749	insns = get_insns ();
8750	end_sequence ();
8751
8752	/* Complex construction should appear as a single unit.  */
8753	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8754	   each with a separate pseudo as destination.
8755	   It's not correct for flow to treat them as a unit.  */
8756	if (GET_CODE (target) != CONCAT)
8757	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8758	else
8759	  emit_insns (insns);
8760
8761	return target;
8762      }
8763
8764    case REALPART_EXPR:
8765      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8766      return gen_realpart (mode, op0);
8767
8768    case IMAGPART_EXPR:
8769      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8770      return gen_imagpart (mode, op0);
8771
8772    case CONJ_EXPR:
8773      {
8774	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8775	rtx imag_t;
8776	rtx insns;
8777
8778	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8779
8780	if (! target)
8781	  target = gen_reg_rtx (mode);
8782
8783	start_sequence ();
8784
8785	/* Store the realpart and the negated imagpart to target.  */
8786	emit_move_insn (gen_realpart (partmode, target),
8787			gen_realpart (partmode, op0));
8788
8789	imag_t = gen_imagpart (partmode, target);
8790	temp = expand_unop (partmode,
8791                            ! unsignedp && flag_trapv
8792                            && (GET_MODE_CLASS(partmode) == MODE_INT)
8793                            ? negv_optab : neg_optab,
8794			    gen_imagpart (partmode, op0), imag_t, 0);
8795	if (temp != imag_t)
8796	  emit_move_insn (imag_t, temp);
8797
8798	insns = get_insns ();
8799	end_sequence ();
8800
8801	/* Conjugate should appear as a single unit
8802	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8803	   each with a separate pseudo as destination.
8804	   It's not correct for flow to treat them as a unit.  */
8805	if (GET_CODE (target) != CONCAT)
8806	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8807	else
8808	  emit_insns (insns);
8809
8810	return target;
8811      }
8812
8813    case TRY_CATCH_EXPR:
8814      {
8815	tree handler = TREE_OPERAND (exp, 1);
8816
8817	expand_eh_region_start ();
8818
8819	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8820
8821	expand_eh_region_end_cleanup (handler);
8822
8823	return op0;
8824      }
8825
8826    case TRY_FINALLY_EXPR:
8827      {
8828	tree try_block = TREE_OPERAND (exp, 0);
8829	tree finally_block = TREE_OPERAND (exp, 1);
8830	rtx finally_label = gen_label_rtx ();
8831	rtx done_label = gen_label_rtx ();
8832	rtx return_link = gen_reg_rtx (Pmode);
8833	tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8834			      (tree) finally_label, (tree) return_link);
8835	TREE_SIDE_EFFECTS (cleanup) = 1;
8836
8837	/* Start a new binding layer that will keep track of all cleanup
8838	   actions to be performed.  */
8839	expand_start_bindings (2);
8840
8841	target_temp_slot_level = temp_slot_level;
8842
8843	expand_decl_cleanup (NULL_TREE, cleanup);
8844	op0 = expand_expr (try_block, target, tmode, modifier);
8845
8846	preserve_temp_slots (op0);
8847	expand_end_bindings (NULL_TREE, 0, 0);
8848	emit_jump (done_label);
8849	emit_label (finally_label);
8850	expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8851	emit_indirect_jump (return_link);
8852	emit_label (done_label);
8853	return op0;
8854      }
8855
8856    case GOTO_SUBROUTINE_EXPR:
8857      {
8858	rtx subr = (rtx) TREE_OPERAND (exp, 0);
8859	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8860	rtx return_address = gen_label_rtx ();
8861	emit_move_insn (return_link,
8862			gen_rtx_LABEL_REF (Pmode, return_address));
8863	emit_jump (subr);
8864	emit_label (return_address);
8865	return const0_rtx;
8866      }
8867
8868    case VA_ARG_EXPR:
8869      return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8870
8871    case EXC_PTR_EXPR:
8872      return get_exception_pointer (cfun);
8873
8874    case FDESC_EXPR:
8875      /* Function descriptors are not valid except for as
8876	 initialization constants, and should not be expanded.  */
8877      abort ();
8878
8879    default:
8880      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8881    }
8882
8883  /* Here to do an ordinary binary operator, generating an instruction
8884     from the optab already placed in `this_optab'.  */
8885 binop:
8886  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8887    subtarget = 0;
8888  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8889  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8890 binop2:
8891  temp = expand_binop (mode, this_optab, op0, op1, target,
8892		       unsignedp, OPTAB_LIB_WIDEN);
8893  if (temp == 0)
8894    abort ();
8895  return temp;
8896}
8897
8898/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8899   when applied to the address of EXP produces an address known to be
8900   aligned more than BIGGEST_ALIGNMENT.  */
8901
8902static int
8903is_aligning_offset (offset, exp)
8904     tree offset;
8905     tree exp;
8906{
8907  /* Strip off any conversions and WITH_RECORD_EXPR nodes.  */
8908  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8909	 || TREE_CODE (offset) == NOP_EXPR
8910	 || TREE_CODE (offset) == CONVERT_EXPR
8911	 || TREE_CODE (offset) == WITH_RECORD_EXPR)
8912    offset = TREE_OPERAND (offset, 0);
8913
8914  /* We must now have a BIT_AND_EXPR with a constant that is one less than
8915     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
8916  if (TREE_CODE (offset) != BIT_AND_EXPR
8917      || !host_integerp (TREE_OPERAND (offset, 1), 1)
8918      || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
8919      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8920    return 0;
8921
8922  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8923     It must be NEGATE_EXPR.  Then strip any more conversions.  */
8924  offset = TREE_OPERAND (offset, 0);
8925  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8926	 || TREE_CODE (offset) == NOP_EXPR
8927	 || TREE_CODE (offset) == CONVERT_EXPR)
8928    offset = TREE_OPERAND (offset, 0);
8929
8930  if (TREE_CODE (offset) != NEGATE_EXPR)
8931    return 0;
8932
8933  offset = TREE_OPERAND (offset, 0);
8934  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8935	 || TREE_CODE (offset) == NOP_EXPR
8936	 || TREE_CODE (offset) == CONVERT_EXPR)
8937    offset = TREE_OPERAND (offset, 0);
8938
8939  /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
8940     whose type is the same as EXP.  */
8941  return (TREE_CODE (offset) == ADDR_EXPR
8942	  && (TREE_OPERAND (offset, 0) == exp
8943	      || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
8944		  && (TREE_TYPE (TREE_OPERAND (offset, 0))
8945		      == TREE_TYPE (exp)))));
8946}
8947
8948/* Return the tree node if a ARG corresponds to a string constant or zero
8949   if it doesn't.  If we return non-zero, set *PTR_OFFSET to the offset
8950   in bytes within the string that ARG is accessing.  The type of the
8951   offset will be `sizetype'.  */
8952
8953tree
8954string_constant (arg, ptr_offset)
8955     tree arg;
8956     tree *ptr_offset;
8957{
8958  STRIP_NOPS (arg);
8959
8960  if (TREE_CODE (arg) == ADDR_EXPR
8961      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8962    {
8963      *ptr_offset = size_zero_node;
8964      return TREE_OPERAND (arg, 0);
8965    }
8966  else if (TREE_CODE (arg) == PLUS_EXPR)
8967    {
8968      tree arg0 = TREE_OPERAND (arg, 0);
8969      tree arg1 = TREE_OPERAND (arg, 1);
8970
8971      STRIP_NOPS (arg0);
8972      STRIP_NOPS (arg1);
8973
8974      if (TREE_CODE (arg0) == ADDR_EXPR
8975	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8976	{
8977	  *ptr_offset = convert (sizetype, arg1);
8978	  return TREE_OPERAND (arg0, 0);
8979	}
8980      else if (TREE_CODE (arg1) == ADDR_EXPR
8981	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8982	{
8983	  *ptr_offset = convert (sizetype, arg0);
8984	  return TREE_OPERAND (arg1, 0);
8985	}
8986    }
8987
8988  return 0;
8989}
8990
8991/* Expand code for a post- or pre- increment or decrement
8992   and return the RTX for the result.
8993   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
8994
8995static rtx
8996expand_increment (exp, post, ignore)
8997     tree exp;
8998     int post, ignore;
8999{
9000  rtx op0, op1;
9001  rtx temp, value;
9002  tree incremented = TREE_OPERAND (exp, 0);
9003  optab this_optab = add_optab;
9004  int icode;
9005  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9006  int op0_is_copy = 0;
9007  int single_insn = 0;
9008  /* 1 means we can't store into OP0 directly,
9009     because it is a subreg narrower than a word,
9010     and we don't dare clobber the rest of the word.  */
9011  int bad_subreg = 0;
9012
9013  /* Stabilize any component ref that might need to be
9014     evaluated more than once below.  */
9015  if (!post
9016      || TREE_CODE (incremented) == BIT_FIELD_REF
9017      || (TREE_CODE (incremented) == COMPONENT_REF
9018	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9019	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9020    incremented = stabilize_reference (incremented);
9021  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
9022     ones into save exprs so that they don't accidentally get evaluated
9023     more than once by the code below.  */
9024  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9025      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9026    incremented = save_expr (incremented);
9027
9028  /* Compute the operands as RTX.
9029     Note whether OP0 is the actual lvalue or a copy of it:
9030     I believe it is a copy iff it is a register or subreg
9031     and insns were generated in computing it.  */
9032
9033  temp = get_last_insn ();
9034  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9035
9036  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9037     in place but instead must do sign- or zero-extension during assignment,
9038     so we copy it into a new register and let the code below use it as
9039     a copy.
9040
9041     Note that we can safely modify this SUBREG since it is know not to be
9042     shared (it was made by the expand_expr call above).  */
9043
9044  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9045    {
9046      if (post)
9047	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9048      else
9049	bad_subreg = 1;
9050    }
9051  else if (GET_CODE (op0) == SUBREG
9052	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9053    {
9054      /* We cannot increment this SUBREG in place.  If we are
9055	 post-incrementing, get a copy of the old value.  Otherwise,
9056	 just mark that we cannot increment in place.  */
9057      if (post)
9058	op0 = copy_to_reg (op0);
9059      else
9060	bad_subreg = 1;
9061    }
9062
9063  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9064		 && temp != get_last_insn ());
9065  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9066
9067  /* Decide whether incrementing or decrementing.  */
9068  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9069      || TREE_CODE (exp) == PREDECREMENT_EXPR)
9070    this_optab = sub_optab;
9071
9072  /* Convert decrement by a constant into a negative increment.  */
9073  if (this_optab == sub_optab
9074      && GET_CODE (op1) == CONST_INT)
9075    {
9076      op1 = GEN_INT (-INTVAL (op1));
9077      this_optab = add_optab;
9078    }
9079
9080  if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9081    this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9082
9083  /* For a preincrement, see if we can do this with a single instruction.  */
9084  if (!post)
9085    {
9086      icode = (int) this_optab->handlers[(int) mode].insn_code;
9087      if (icode != (int) CODE_FOR_nothing
9088	  /* Make sure that OP0 is valid for operands 0 and 1
9089	     of the insn we want to queue.  */
9090	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9091	  && (*insn_data[icode].operand[1].predicate) (op0, mode)
9092	  && (*insn_data[icode].operand[2].predicate) (op1, mode))
9093	single_insn = 1;
9094    }
9095
9096  /* If OP0 is not the actual lvalue, but rather a copy in a register,
9097     then we cannot just increment OP0.  We must therefore contrive to
9098     increment the original value.  Then, for postincrement, we can return
9099     OP0 since it is a copy of the old value.  For preincrement, expand here
9100     unless we can do it with a single insn.
9101
9102     Likewise if storing directly into OP0 would clobber high bits
9103     we need to preserve (bad_subreg).  */
9104  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9105    {
9106      /* This is the easiest way to increment the value wherever it is.
9107	 Problems with multiple evaluation of INCREMENTED are prevented
9108	 because either (1) it is a component_ref or preincrement,
9109	 in which case it was stabilized above, or (2) it is an array_ref
9110	 with constant index in an array in a register, which is
9111	 safe to reevaluate.  */
9112      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9113			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
9114			    ? MINUS_EXPR : PLUS_EXPR),
9115			   TREE_TYPE (exp),
9116			   incremented,
9117			   TREE_OPERAND (exp, 1));
9118
9119      while (TREE_CODE (incremented) == NOP_EXPR
9120	     || TREE_CODE (incremented) == CONVERT_EXPR)
9121	{
9122	  newexp = convert (TREE_TYPE (incremented), newexp);
9123	  incremented = TREE_OPERAND (incremented, 0);
9124	}
9125
9126      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9127      return post ? op0 : temp;
9128    }
9129
9130  if (post)
9131    {
9132      /* We have a true reference to the value in OP0.
9133	 If there is an insn to add or subtract in this mode, queue it.
9134	 Queueing the increment insn avoids the register shuffling
9135	 that often results if we must increment now and first save
9136	 the old value for subsequent use.  */
9137
9138#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
9139      op0 = stabilize (op0);
9140#endif
9141
9142      icode = (int) this_optab->handlers[(int) mode].insn_code;
9143      if (icode != (int) CODE_FOR_nothing
9144	  /* Make sure that OP0 is valid for operands 0 and 1
9145	     of the insn we want to queue.  */
9146	  && (*insn_data[icode].operand[0].predicate) (op0, mode)
9147	  && (*insn_data[icode].operand[1].predicate) (op0, mode))
9148	{
9149	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9150	    op1 = force_reg (mode, op1);
9151
9152	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9153	}
9154      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9155	{
9156	  rtx addr = (general_operand (XEXP (op0, 0), mode)
9157		      ? force_reg (Pmode, XEXP (op0, 0))
9158		      : copy_to_reg (XEXP (op0, 0)));
9159	  rtx temp, result;
9160
9161	  op0 = replace_equiv_address (op0, addr);
9162	  temp = force_reg (GET_MODE (op0), op0);
9163	  if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9164	    op1 = force_reg (mode, op1);
9165
9166	  /* The increment queue is LIFO, thus we have to `queue'
9167	     the instructions in reverse order.  */
9168	  enqueue_insn (op0, gen_move_insn (op0, temp));
9169	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9170	  return result;
9171	}
9172    }
9173
9174  /* Preincrement, or we can't increment with one simple insn.  */
9175  if (post)
9176    /* Save a copy of the value before inc or dec, to return it later.  */
9177    temp = value = copy_to_reg (op0);
9178  else
9179    /* Arrange to return the incremented value.  */
9180    /* Copy the rtx because expand_binop will protect from the queue,
9181       and the results of that would be invalid for us to return
9182       if our caller does emit_queue before using our result.  */
9183    temp = copy_rtx (value = op0);
9184
9185  /* Increment however we can.  */
9186  op1 = expand_binop (mode, this_optab, value, op1, op0,
9187		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9188
9189  /* Make sure the value is stored into OP0.  */
9190  if (op1 != op0)
9191    emit_move_insn (op0, op1);
9192
9193  return temp;
9194}
9195
9196/* At the start of a function, record that we have no previously-pushed
9197   arguments waiting to be popped.  */
9198
9199void
9200init_pending_stack_adjust ()
9201{
9202  pending_stack_adjust = 0;
9203}
9204
9205/* When exiting from function, if safe, clear out any pending stack adjust
9206   so the adjustment won't get done.
9207
9208   Note, if the current function calls alloca, then it must have a
9209   frame pointer regardless of the value of flag_omit_frame_pointer.  */
9210
9211void
9212clear_pending_stack_adjust ()
9213{
9214#ifdef EXIT_IGNORE_STACK
9215  if (optimize > 0
9216      && (! flag_omit_frame_pointer || current_function_calls_alloca)
9217      && EXIT_IGNORE_STACK
9218      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9219      && ! flag_inline_functions)
9220    {
9221      stack_pointer_delta -= pending_stack_adjust,
9222      pending_stack_adjust = 0;
9223    }
9224#endif
9225}
9226
9227/* Pop any previously-pushed arguments that have not been popped yet.  */
9228
9229void
9230do_pending_stack_adjust ()
9231{
9232  if (inhibit_defer_pop == 0)
9233    {
9234      if (pending_stack_adjust != 0)
9235	adjust_stack (GEN_INT (pending_stack_adjust));
9236      pending_stack_adjust = 0;
9237    }
9238}
9239
9240/* Expand conditional expressions.  */
9241
9242/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9243   LABEL is an rtx of code CODE_LABEL, in this function and all the
9244   functions here.  */
9245
9246void
9247jumpifnot (exp, label)
9248     tree exp;
9249     rtx label;
9250{
9251  do_jump (exp, label, NULL_RTX);
9252}
9253
9254/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
9255
9256void
9257jumpif (exp, label)
9258     tree exp;
9259     rtx label;
9260{
9261  do_jump (exp, NULL_RTX, label);
9262}
9263
9264/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9265   the result is zero, or IF_TRUE_LABEL if the result is one.
9266   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9267   meaning fall through in that case.
9268
9269   do_jump always does any pending stack adjust except when it does not
9270   actually perform a jump.  An example where there is no jump
9271   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9272
9273   This function is responsible for optimizing cases such as
9274   &&, || and comparison operators in EXP.  */
9275
9276void
9277do_jump (exp, if_false_label, if_true_label)
9278     tree exp;
9279     rtx if_false_label, if_true_label;
9280{
9281  enum tree_code code = TREE_CODE (exp);
9282  /* Some cases need to create a label to jump to
9283     in order to properly fall through.
9284     These cases set DROP_THROUGH_LABEL nonzero.  */
9285  rtx drop_through_label = 0;
9286  rtx temp;
9287  int i;
9288  tree type;
9289  enum machine_mode mode;
9290
9291#ifdef MAX_INTEGER_COMPUTATION_MODE
9292  check_max_integer_computation_mode (exp);
9293#endif
9294
9295  emit_queue ();
9296
9297  switch (code)
9298    {
9299    case ERROR_MARK:
9300      break;
9301
9302    case INTEGER_CST:
9303      temp = integer_zerop (exp) ? if_false_label : if_true_label;
9304      if (temp)
9305	emit_jump (temp);
9306      break;
9307
9308#if 0
9309      /* This is not true with #pragma weak  */
9310    case ADDR_EXPR:
9311      /* The address of something can never be zero.  */
9312      if (if_true_label)
9313	emit_jump (if_true_label);
9314      break;
9315#endif
9316
9317    case NOP_EXPR:
9318      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9319	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9320	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9321	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9322	goto normal;
9323    case CONVERT_EXPR:
9324      /* If we are narrowing the operand, we have to do the compare in the
9325	 narrower mode.  */
9326      if ((TYPE_PRECISION (TREE_TYPE (exp))
9327	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9328	goto normal;
9329    case NON_LVALUE_EXPR:
9330    case REFERENCE_EXPR:
9331    case ABS_EXPR:
9332    case NEGATE_EXPR:
9333    case LROTATE_EXPR:
9334    case RROTATE_EXPR:
9335      /* These cannot change zero->non-zero or vice versa.  */
9336      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9337      break;
9338
9339    case WITH_RECORD_EXPR:
9340      /* Put the object on the placeholder list, recurse through our first
9341	 operand, and pop the list.  */
9342      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9343				    placeholder_list);
9344      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9345      placeholder_list = TREE_CHAIN (placeholder_list);
9346      break;
9347
9348#if 0
9349      /* This is never less insns than evaluating the PLUS_EXPR followed by
9350	 a test and can be longer if the test is eliminated.  */
9351    case PLUS_EXPR:
9352      /* Reduce to minus.  */
9353      exp = build (MINUS_EXPR, TREE_TYPE (exp),
9354		   TREE_OPERAND (exp, 0),
9355		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9356				 TREE_OPERAND (exp, 1))));
9357      /* Process as MINUS.  */
9358#endif
9359
9360    case MINUS_EXPR:
9361      /* Non-zero iff operands of minus differ.  */
9362      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9363				  TREE_OPERAND (exp, 0),
9364				  TREE_OPERAND (exp, 1)),
9365			   NE, NE, if_false_label, if_true_label);
9366      break;
9367
9368    case BIT_AND_EXPR:
9369      /* If we are AND'ing with a small constant, do this comparison in the
9370	 smallest type that fits.  If the machine doesn't have comparisons
9371	 that small, it will be converted back to the wider comparison.
9372	 This helps if we are testing the sign bit of a narrower object.
9373	 combine can't do this for us because it can't know whether a
9374	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
9375
9376      if (! SLOW_BYTE_ACCESS
9377	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9378	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9379	  && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9380	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9381	  && (type = type_for_mode (mode, 1)) != 0
9382	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9383	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9384	      != CODE_FOR_nothing))
9385	{
9386	  do_jump (convert (type, exp), if_false_label, if_true_label);
9387	  break;
9388	}
9389      goto normal;
9390
9391    case TRUTH_NOT_EXPR:
9392      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9393      break;
9394
9395    case TRUTH_ANDIF_EXPR:
9396      if (if_false_label == 0)
9397	if_false_label = drop_through_label = gen_label_rtx ();
9398      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9399      start_cleanup_deferral ();
9400      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9401      end_cleanup_deferral ();
9402      break;
9403
9404    case TRUTH_ORIF_EXPR:
9405      if (if_true_label == 0)
9406	if_true_label = drop_through_label = gen_label_rtx ();
9407      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9408      start_cleanup_deferral ();
9409      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9410      end_cleanup_deferral ();
9411      break;
9412
9413    case COMPOUND_EXPR:
9414      push_temp_slots ();
9415      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9416      preserve_temp_slots (NULL_RTX);
9417      free_temp_slots ();
9418      pop_temp_slots ();
9419      emit_queue ();
9420      do_pending_stack_adjust ();
9421      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9422      break;
9423
9424    case COMPONENT_REF:
9425    case BIT_FIELD_REF:
9426    case ARRAY_REF:
9427    case ARRAY_RANGE_REF:
9428      {
9429	HOST_WIDE_INT bitsize, bitpos;
9430	int unsignedp;
9431	enum machine_mode mode;
9432	tree type;
9433	tree offset;
9434	int volatilep = 0;
9435
9436	/* Get description of this reference.  We don't actually care
9437	   about the underlying object here.  */
9438	get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9439			     &unsignedp, &volatilep);
9440
9441	type = type_for_size (bitsize, unsignedp);
9442	if (! SLOW_BYTE_ACCESS
9443	    && type != 0 && bitsize >= 0
9444	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9445	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9446		!= CODE_FOR_nothing))
9447	  {
9448	    do_jump (convert (type, exp), if_false_label, if_true_label);
9449	    break;
9450	  }
9451	goto normal;
9452      }
9453
9454    case COND_EXPR:
9455      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
9456      if (integer_onep (TREE_OPERAND (exp, 1))
9457	  && integer_zerop (TREE_OPERAND (exp, 2)))
9458	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9459
9460      else if (integer_zerop (TREE_OPERAND (exp, 1))
9461	       && integer_onep (TREE_OPERAND (exp, 2)))
9462	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9463
9464      else
9465	{
9466	  rtx label1 = gen_label_rtx ();
9467	  drop_through_label = gen_label_rtx ();
9468
9469	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9470
9471	  start_cleanup_deferral ();
9472	  /* Now the THEN-expression.  */
9473	  do_jump (TREE_OPERAND (exp, 1),
9474		   if_false_label ? if_false_label : drop_through_label,
9475		   if_true_label ? if_true_label : drop_through_label);
9476	  /* In case the do_jump just above never jumps.  */
9477	  do_pending_stack_adjust ();
9478	  emit_label (label1);
9479
9480	  /* Now the ELSE-expression.  */
9481	  do_jump (TREE_OPERAND (exp, 2),
9482		   if_false_label ? if_false_label : drop_through_label,
9483		   if_true_label ? if_true_label : drop_through_label);
9484	  end_cleanup_deferral ();
9485	}
9486      break;
9487
9488    case EQ_EXPR:
9489      {
9490	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9491
9492	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9493	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9494	  {
9495	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9496	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9497	    do_jump
9498	      (fold
9499	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9500		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9501				    fold (build1 (REALPART_EXPR,
9502						  TREE_TYPE (inner_type),
9503						  exp0)),
9504				    fold (build1 (REALPART_EXPR,
9505						  TREE_TYPE (inner_type),
9506						  exp1)))),
9507		       fold (build (EQ_EXPR, TREE_TYPE (exp),
9508				    fold (build1 (IMAGPART_EXPR,
9509						  TREE_TYPE (inner_type),
9510						  exp0)),
9511				    fold (build1 (IMAGPART_EXPR,
9512						  TREE_TYPE (inner_type),
9513						  exp1)))))),
9514	       if_false_label, if_true_label);
9515	  }
9516
9517	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9518	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9519
9520	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9521		 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9522	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9523	else
9524	  do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9525	break;
9526      }
9527
9528    case NE_EXPR:
9529      {
9530	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9531
9532	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9533	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9534	  {
9535	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9536	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9537	    do_jump
9538	      (fold
9539	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9540		       fold (build (NE_EXPR, TREE_TYPE (exp),
9541				    fold (build1 (REALPART_EXPR,
9542						  TREE_TYPE (inner_type),
9543						  exp0)),
9544				    fold (build1 (REALPART_EXPR,
9545						  TREE_TYPE (inner_type),
9546						  exp1)))),
9547		       fold (build (NE_EXPR, TREE_TYPE (exp),
9548				    fold (build1 (IMAGPART_EXPR,
9549						  TREE_TYPE (inner_type),
9550						  exp0)),
9551				    fold (build1 (IMAGPART_EXPR,
9552						  TREE_TYPE (inner_type),
9553						  exp1)))))),
9554	       if_false_label, if_true_label);
9555	  }
9556
9557	else if (integer_zerop (TREE_OPERAND (exp, 1)))
9558	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9559
9560	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9561		 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9562	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9563	else
9564	  do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9565	break;
9566      }
9567
9568    case LT_EXPR:
9569      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9570      if (GET_MODE_CLASS (mode) == MODE_INT
9571	  && ! can_compare_p (LT, mode, ccp_jump))
9572	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9573      else
9574	do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9575      break;
9576
9577    case LE_EXPR:
9578      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9579      if (GET_MODE_CLASS (mode) == MODE_INT
9580	  && ! can_compare_p (LE, mode, ccp_jump))
9581	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9582      else
9583	do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9584      break;
9585
9586    case GT_EXPR:
9587      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9588      if (GET_MODE_CLASS (mode) == MODE_INT
9589	  && ! can_compare_p (GT, mode, ccp_jump))
9590	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9591      else
9592	do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9593      break;
9594
9595    case GE_EXPR:
9596      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9597      if (GET_MODE_CLASS (mode) == MODE_INT
9598	  && ! can_compare_p (GE, mode, ccp_jump))
9599	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9600      else
9601	do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9602      break;
9603
9604    case UNORDERED_EXPR:
9605    case ORDERED_EXPR:
9606      {
9607	enum rtx_code cmp, rcmp;
9608	int do_rev;
9609
9610	if (code == UNORDERED_EXPR)
9611	  cmp = UNORDERED, rcmp = ORDERED;
9612	else
9613	  cmp = ORDERED, rcmp = UNORDERED;
9614	mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9615
9616	do_rev = 0;
9617	if (! can_compare_p (cmp, mode, ccp_jump)
9618	    && (can_compare_p (rcmp, mode, ccp_jump)
9619		/* If the target doesn't provide either UNORDERED or ORDERED
9620		   comparisons, canonicalize on UNORDERED for the library.  */
9621		|| rcmp == UNORDERED))
9622	  do_rev = 1;
9623
9624        if (! do_rev)
9625	  do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9626	else
9627	  do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9628      }
9629      break;
9630
9631    {
9632      enum rtx_code rcode1;
9633      enum tree_code tcode2;
9634
9635      case UNLT_EXPR:
9636	rcode1 = UNLT;
9637	tcode2 = LT_EXPR;
9638	goto unordered_bcc;
9639      case UNLE_EXPR:
9640	rcode1 = UNLE;
9641	tcode2 = LE_EXPR;
9642	goto unordered_bcc;
9643      case UNGT_EXPR:
9644	rcode1 = UNGT;
9645	tcode2 = GT_EXPR;
9646	goto unordered_bcc;
9647      case UNGE_EXPR:
9648	rcode1 = UNGE;
9649	tcode2 = GE_EXPR;
9650	goto unordered_bcc;
9651      case UNEQ_EXPR:
9652	rcode1 = UNEQ;
9653	tcode2 = EQ_EXPR;
9654	goto unordered_bcc;
9655
9656      unordered_bcc:
9657        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9658	if (can_compare_p (rcode1, mode, ccp_jump))
9659	  do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9660			       if_true_label);
9661	else
9662	  {
9663	    tree op0 = save_expr (TREE_OPERAND (exp, 0));
9664	    tree op1 = save_expr (TREE_OPERAND (exp, 1));
9665	    tree cmp0, cmp1;
9666
9667	    /* If the target doesn't support combined unordered
9668	       compares, decompose into UNORDERED + comparison.  */
9669	    cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9670	    cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9671	    exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9672	    do_jump (exp, if_false_label, if_true_label);
9673	  }
9674      }
9675      break;
9676
9677      /* Special case:
9678		__builtin_expect (<test>, 0)	and
9679		__builtin_expect (<test>, 1)
9680
9681	 We need to do this here, so that <test> is not converted to a SCC
9682	 operation on machines that use condition code registers and COMPARE
9683	 like the PowerPC, and then the jump is done based on whether the SCC
9684	 operation produced a 1 or 0.  */
9685    case CALL_EXPR:
9686      /* Check for a built-in function.  */
9687      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9688	{
9689	  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9690	  tree arglist = TREE_OPERAND (exp, 1);
9691
9692	  if (TREE_CODE (fndecl) == FUNCTION_DECL
9693	      && DECL_BUILT_IN (fndecl)
9694	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9695	      && arglist != NULL_TREE
9696	      && TREE_CHAIN (arglist) != NULL_TREE)
9697	    {
9698	      rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9699						    if_true_label);
9700
9701	      if (seq != NULL_RTX)
9702		{
9703		  emit_insn (seq);
9704		  return;
9705		}
9706	    }
9707	}
9708      /* fall through and generate the normal code.  */
9709
9710    default:
9711    normal:
9712      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9713#if 0
9714      /* This is not needed any more and causes poor code since it causes
9715	 comparisons and tests from non-SI objects to have different code
9716	 sequences.  */
9717      /* Copy to register to avoid generating bad insns by cse
9718	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
9719      if (!cse_not_expected && GET_CODE (temp) == MEM)
9720	temp = copy_to_reg (temp);
9721#endif
9722      do_pending_stack_adjust ();
9723      /* Do any postincrements in the expression that was tested.  */
9724      emit_queue ();
9725
9726      if (GET_CODE (temp) == CONST_INT
9727	  || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9728	  || GET_CODE (temp) == LABEL_REF)
9729	{
9730	  rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9731	  if (target)
9732	    emit_jump (target);
9733	}
9734      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9735	       && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9736	/* Note swapping the labels gives us not-equal.  */
9737	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9738      else if (GET_MODE (temp) != VOIDmode)
9739	do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9740				 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9741				 GET_MODE (temp), NULL_RTX,
9742				 if_false_label, if_true_label);
9743      else
9744	abort ();
9745    }
9746
9747  if (drop_through_label)
9748    {
9749      /* If do_jump produces code that might be jumped around,
9750	 do any stack adjusts from that code, before the place
9751	 where control merges in.  */
9752      do_pending_stack_adjust ();
9753      emit_label (drop_through_label);
9754    }
9755}
9756
9757/* Given a comparison expression EXP for values too wide to be compared
9758   with one insn, test the comparison and jump to the appropriate label.
9759   The code of EXP is ignored; we always test GT if SWAP is 0,
9760   and LT if SWAP is 1.  */
9761
9762static void
9763do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9764     tree exp;
9765     int swap;
9766     rtx if_false_label, if_true_label;
9767{
9768  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9769  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9770  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9771  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9772
9773  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9774}
9775
9776/* Compare OP0 with OP1, word at a time, in mode MODE.
9777   UNSIGNEDP says to do unsigned comparison.
9778   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
9779
9780void
9781do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9782     enum machine_mode mode;
9783     int unsignedp;
9784     rtx op0, op1;
9785     rtx if_false_label, if_true_label;
9786{
9787  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9788  rtx drop_through_label = 0;
9789  int i;
9790
9791  if (! if_true_label || ! if_false_label)
9792    drop_through_label = gen_label_rtx ();
9793  if (! if_true_label)
9794    if_true_label = drop_through_label;
9795  if (! if_false_label)
9796    if_false_label = drop_through_label;
9797
9798  /* Compare a word at a time, high order first.  */
9799  for (i = 0; i < nwords; i++)
9800    {
9801      rtx op0_word, op1_word;
9802
9803      if (WORDS_BIG_ENDIAN)
9804	{
9805	  op0_word = operand_subword_force (op0, i, mode);
9806	  op1_word = operand_subword_force (op1, i, mode);
9807	}
9808      else
9809	{
9810	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9811	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9812	}
9813
9814      /* All but high-order word must be compared as unsigned.  */
9815      do_compare_rtx_and_jump (op0_word, op1_word, GT,
9816			       (unsignedp || i > 0), word_mode, NULL_RTX,
9817			       NULL_RTX, if_true_label);
9818
9819      /* Consider lower words only if these are equal.  */
9820      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9821			       NULL_RTX, NULL_RTX, if_false_label);
9822    }
9823
9824  if (if_false_label)
9825    emit_jump (if_false_label);
9826  if (drop_through_label)
9827    emit_label (drop_through_label);
9828}
9829
9830/* Given an EQ_EXPR expression EXP for values too wide to be compared
9831   with one insn, test the comparison and jump to the appropriate label.  */
9832
9833static void
9834do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9835     tree exp;
9836     rtx if_false_label, if_true_label;
9837{
9838  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9839  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9840  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9841  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9842  int i;
9843  rtx drop_through_label = 0;
9844
9845  if (! if_false_label)
9846    drop_through_label = if_false_label = gen_label_rtx ();
9847
9848  for (i = 0; i < nwords; i++)
9849    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9850			     operand_subword_force (op1, i, mode),
9851			     EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9852			     word_mode, NULL_RTX, if_false_label, NULL_RTX);
9853
9854  if (if_true_label)
9855    emit_jump (if_true_label);
9856  if (drop_through_label)
9857    emit_label (drop_through_label);
9858}
9859
9860/* Jump according to whether OP0 is 0.
9861   We assume that OP0 has an integer mode that is too wide
9862   for the available compare insns.  */
9863
9864void
9865do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9866     rtx op0;
9867     rtx if_false_label, if_true_label;
9868{
9869  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9870  rtx part;
9871  int i;
9872  rtx drop_through_label = 0;
9873
9874  /* The fastest way of doing this comparison on almost any machine is to
9875     "or" all the words and compare the result.  If all have to be loaded
9876     from memory and this is a very wide item, it's possible this may
9877     be slower, but that's highly unlikely.  */
9878
9879  part = gen_reg_rtx (word_mode);
9880  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9881  for (i = 1; i < nwords && part != 0; i++)
9882    part = expand_binop (word_mode, ior_optab, part,
9883			 operand_subword_force (op0, i, GET_MODE (op0)),
9884			 part, 1, OPTAB_WIDEN);
9885
9886  if (part != 0)
9887    {
9888      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9889			       NULL_RTX, if_false_label, if_true_label);
9890
9891      return;
9892    }
9893
9894  /* If we couldn't do the "or" simply, do this with a series of compares.  */
9895  if (! if_false_label)
9896    drop_through_label = if_false_label = gen_label_rtx ();
9897
9898  for (i = 0; i < nwords; i++)
9899    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9900			     const0_rtx, EQ, 1, word_mode, NULL_RTX,
9901			     if_false_label, NULL_RTX);
9902
9903  if (if_true_label)
9904    emit_jump (if_true_label);
9905
9906  if (drop_through_label)
9907    emit_label (drop_through_label);
9908}
9909
9910/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9911   (including code to compute the values to be compared)
9912   and set (CC0) according to the result.
9913   The decision as to signed or unsigned comparison must be made by the caller.
9914
9915   We force a stack adjustment unless there are currently
9916   things pushed on the stack that aren't yet used.
9917
9918   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9919   compared.  */
9920
9921rtx
9922compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9923     rtx op0, op1;
9924     enum rtx_code code;
9925     int unsignedp;
9926     enum machine_mode mode;
9927     rtx size;
9928{
9929  rtx tem;
9930
9931  /* If one operand is constant, make it the second one.  Only do this
9932     if the other operand is not constant as well.  */
9933
9934  if (swap_commutative_operands_p (op0, op1))
9935    {
9936      tem = op0;
9937      op0 = op1;
9938      op1 = tem;
9939      code = swap_condition (code);
9940    }
9941
9942  if (flag_force_mem)
9943    {
9944      op0 = force_not_mem (op0);
9945      op1 = force_not_mem (op1);
9946    }
9947
9948  do_pending_stack_adjust ();
9949
9950  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9951      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9952    return tem;
9953
9954#if 0
9955  /* There's no need to do this now that combine.c can eliminate lots of
9956     sign extensions.  This can be less efficient in certain cases on other
9957     machines.  */
9958
9959  /* If this is a signed equality comparison, we can do it as an
9960     unsigned comparison since zero-extension is cheaper than sign
9961     extension and comparisons with zero are done as unsigned.  This is
9962     the case even on machines that can do fast sign extension, since
9963     zero-extension is easier to combine with other operations than
9964     sign-extension is.  If we are comparing against a constant, we must
9965     convert it to what it would look like unsigned.  */
9966  if ((code == EQ || code == NE) && ! unsignedp
9967      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9968    {
9969      if (GET_CODE (op1) == CONST_INT
9970	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9971	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9972      unsignedp = 1;
9973    }
9974#endif
9975
9976  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9977
9978  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9979}
9980
9981/* Like do_compare_and_jump but expects the values to compare as two rtx's.
9982   The decision as to signed or unsigned comparison must be made by the caller.
9983
9984   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9985   compared.  */
9986
9987void
9988do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9989			 if_false_label, if_true_label)
9990     rtx op0, op1;
9991     enum rtx_code code;
9992     int unsignedp;
9993     enum machine_mode mode;
9994     rtx size;
9995     rtx if_false_label, if_true_label;
9996{
9997  rtx tem;
9998  int dummy_true_label = 0;
9999
10000  /* Reverse the comparison if that is safe and we want to jump if it is
10001     false.  */
10002  if (! if_true_label && ! FLOAT_MODE_P (mode))
10003    {
10004      if_true_label = if_false_label;
10005      if_false_label = 0;
10006      code = reverse_condition (code);
10007    }
10008
10009  /* If one operand is constant, make it the second one.  Only do this
10010     if the other operand is not constant as well.  */
10011
10012  if (swap_commutative_operands_p (op0, op1))
10013    {
10014      tem = op0;
10015      op0 = op1;
10016      op1 = tem;
10017      code = swap_condition (code);
10018    }
10019
10020  if (flag_force_mem)
10021    {
10022      op0 = force_not_mem (op0);
10023      op1 = force_not_mem (op1);
10024    }
10025
10026  do_pending_stack_adjust ();
10027
10028  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10029      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10030    {
10031      if (tem == const_true_rtx)
10032	{
10033	  if (if_true_label)
10034	    emit_jump (if_true_label);
10035	}
10036      else
10037	{
10038	  if (if_false_label)
10039	    emit_jump (if_false_label);
10040	}
10041      return;
10042    }
10043
10044#if 0
10045  /* There's no need to do this now that combine.c can eliminate lots of
10046     sign extensions.  This can be less efficient in certain cases on other
10047     machines.  */
10048
10049  /* If this is a signed equality comparison, we can do it as an
10050     unsigned comparison since zero-extension is cheaper than sign
10051     extension and comparisons with zero are done as unsigned.  This is
10052     the case even on machines that can do fast sign extension, since
10053     zero-extension is easier to combine with other operations than
10054     sign-extension is.  If we are comparing against a constant, we must
10055     convert it to what it would look like unsigned.  */
10056  if ((code == EQ || code == NE) && ! unsignedp
10057      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10058    {
10059      if (GET_CODE (op1) == CONST_INT
10060	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10061	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10062      unsignedp = 1;
10063    }
10064#endif
10065
10066  if (! if_true_label)
10067    {
10068      dummy_true_label = 1;
10069      if_true_label = gen_label_rtx ();
10070    }
10071
10072  emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10073			   if_true_label);
10074
10075  if (if_false_label)
10076    emit_jump (if_false_label);
10077  if (dummy_true_label)
10078    emit_label (if_true_label);
10079}
10080
10081/* Generate code for a comparison expression EXP (including code to compute
10082   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10083   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
10084   generated code will drop through.
10085   SIGNED_CODE should be the rtx operation for this comparison for
10086   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10087
10088   We force a stack adjustment unless there are currently
10089   things pushed on the stack that aren't yet used.  */
10090
10091static void
10092do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10093		     if_true_label)
10094     tree exp;
10095     enum rtx_code signed_code, unsigned_code;
10096     rtx if_false_label, if_true_label;
10097{
10098  rtx op0, op1;
10099  tree type;
10100  enum machine_mode mode;
10101  int unsignedp;
10102  enum rtx_code code;
10103
10104  /* Don't crash if the comparison was erroneous.  */
10105  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10106  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10107    return;
10108
10109  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10110  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10111    return;
10112
10113  type = TREE_TYPE (TREE_OPERAND (exp, 0));
10114  mode = TYPE_MODE (type);
10115  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10116      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10117	  || (GET_MODE_BITSIZE (mode)
10118	      > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10119								      1)))))))
10120    {
10121      /* op0 might have been replaced by promoted constant, in which
10122	 case the type of second argument should be used.  */
10123      type = TREE_TYPE (TREE_OPERAND (exp, 1));
10124      mode = TYPE_MODE (type);
10125    }
10126  unsignedp = TREE_UNSIGNED (type);
10127  code = unsignedp ? unsigned_code : signed_code;
10128
10129#ifdef HAVE_canonicalize_funcptr_for_compare
10130  /* If function pointers need to be "canonicalized" before they can
10131     be reliably compared, then canonicalize them.  */
10132  if (HAVE_canonicalize_funcptr_for_compare
10133      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10134      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10135	  == FUNCTION_TYPE))
10136    {
10137      rtx new_op0 = gen_reg_rtx (mode);
10138
10139      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10140      op0 = new_op0;
10141    }
10142
10143  if (HAVE_canonicalize_funcptr_for_compare
10144      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10145      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10146	  == FUNCTION_TYPE))
10147    {
10148      rtx new_op1 = gen_reg_rtx (mode);
10149
10150      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10151      op1 = new_op1;
10152    }
10153#endif
10154
10155  /* Do any postincrements in the expression that was tested.  */
10156  emit_queue ();
10157
10158  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10159			   ((mode == BLKmode)
10160			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10161			   if_false_label, if_true_label);
10162}
10163
10164/* Generate code to calculate EXP using a store-flag instruction
10165   and return an rtx for the result.  EXP is either a comparison
10166   or a TRUTH_NOT_EXPR whose operand is a comparison.
10167
10168   If TARGET is nonzero, store the result there if convenient.
10169
10170   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10171   cheap.
10172
10173   Return zero if there is no suitable set-flag instruction
10174   available on this machine.
10175
10176   Once expand_expr has been called on the arguments of the comparison,
10177   we are committed to doing the store flag, since it is not safe to
10178   re-evaluate the expression.  We emit the store-flag insn by calling
10179   emit_store_flag, but only expand the arguments if we have a reason
10180   to believe that emit_store_flag will be successful.  If we think that
10181   it will, but it isn't, we have to simulate the store-flag with a
10182   set/jump/set sequence.  */
10183
10184static rtx
10185do_store_flag (exp, target, mode, only_cheap)
10186     tree exp;
10187     rtx target;
10188     enum machine_mode mode;
10189     int only_cheap;
10190{
10191  enum rtx_code code;
10192  tree arg0, arg1, type;
10193  tree tem;
10194  enum machine_mode operand_mode;
10195  int invert = 0;
10196  int unsignedp;
10197  rtx op0, op1;
10198  enum insn_code icode;
10199  rtx subtarget = target;
10200  rtx result, label;
10201
10202  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10203     result at the end.  We can't simply invert the test since it would
10204     have already been inverted if it were valid.  This case occurs for
10205     some floating-point comparisons.  */
10206
10207  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10208    invert = 1, exp = TREE_OPERAND (exp, 0);
10209
10210  arg0 = TREE_OPERAND (exp, 0);
10211  arg1 = TREE_OPERAND (exp, 1);
10212
10213  /* Don't crash if the comparison was erroneous.  */
10214  if (arg0 == error_mark_node || arg1 == error_mark_node)
10215    return const0_rtx;
10216
10217  type = TREE_TYPE (arg0);
10218  operand_mode = TYPE_MODE (type);
10219  unsignedp = TREE_UNSIGNED (type);
10220
10221  /* We won't bother with BLKmode store-flag operations because it would mean
10222     passing a lot of information to emit_store_flag.  */
10223  if (operand_mode == BLKmode)
10224    return 0;
10225
10226  /* We won't bother with store-flag operations involving function pointers
10227     when function pointers must be canonicalized before comparisons.  */
10228#ifdef HAVE_canonicalize_funcptr_for_compare
10229  if (HAVE_canonicalize_funcptr_for_compare
10230      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10231	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10232	       == FUNCTION_TYPE))
10233	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10234	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10235		  == FUNCTION_TYPE))))
10236    return 0;
10237#endif
10238
10239  STRIP_NOPS (arg0);
10240  STRIP_NOPS (arg1);
10241
10242  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10243     operation of some type.  Some comparisons against 1 and -1 can be
10244     converted to comparisons with zero.  Do so here so that the tests
10245     below will be aware that we have a comparison with zero.   These
10246     tests will not catch constants in the first operand, but constants
10247     are rarely passed as the first operand.  */
10248
10249  switch (TREE_CODE (exp))
10250    {
10251    case EQ_EXPR:
10252      code = EQ;
10253      break;
10254    case NE_EXPR:
10255      code = NE;
10256      break;
10257    case LT_EXPR:
10258      if (integer_onep (arg1))
10259	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10260      else
10261	code = unsignedp ? LTU : LT;
10262      break;
10263    case LE_EXPR:
10264      if (! unsignedp && integer_all_onesp (arg1))
10265	arg1 = integer_zero_node, code = LT;
10266      else
10267	code = unsignedp ? LEU : LE;
10268      break;
10269    case GT_EXPR:
10270      if (! unsignedp && integer_all_onesp (arg1))
10271	arg1 = integer_zero_node, code = GE;
10272      else
10273	code = unsignedp ? GTU : GT;
10274      break;
10275    case GE_EXPR:
10276      if (integer_onep (arg1))
10277	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10278      else
10279	code = unsignedp ? GEU : GE;
10280      break;
10281
10282    case UNORDERED_EXPR:
10283      code = UNORDERED;
10284      break;
10285    case ORDERED_EXPR:
10286      code = ORDERED;
10287      break;
10288    case UNLT_EXPR:
10289      code = UNLT;
10290      break;
10291    case UNLE_EXPR:
10292      code = UNLE;
10293      break;
10294    case UNGT_EXPR:
10295      code = UNGT;
10296      break;
10297    case UNGE_EXPR:
10298      code = UNGE;
10299      break;
10300    case UNEQ_EXPR:
10301      code = UNEQ;
10302      break;
10303
10304    default:
10305      abort ();
10306    }
10307
10308  /* Put a constant second.  */
10309  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10310    {
10311      tem = arg0; arg0 = arg1; arg1 = tem;
10312      code = swap_condition (code);
10313    }
10314
10315  /* If this is an equality or inequality test of a single bit, we can
10316     do this by shifting the bit being tested to the low-order bit and
10317     masking the result with the constant 1.  If the condition was EQ,
10318     we xor it with 1.  This does not require an scc insn and is faster
10319     than an scc insn even if we have it.  */
10320
10321  if ((code == NE || code == EQ)
10322      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10323      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10324    {
10325      tree inner = TREE_OPERAND (arg0, 0);
10326      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10327      int ops_unsignedp;
10328
10329      /* If INNER is a right shift of a constant and it plus BITNUM does
10330	 not overflow, adjust BITNUM and INNER.  */
10331
10332      if (TREE_CODE (inner) == RSHIFT_EXPR
10333	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10334	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10335	  && bitnum < TYPE_PRECISION (type)
10336	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10337				   bitnum - TYPE_PRECISION (type)))
10338	{
10339	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10340	  inner = TREE_OPERAND (inner, 0);
10341	}
10342
10343      /* If we are going to be able to omit the AND below, we must do our
10344	 operations as unsigned.  If we must use the AND, we have a choice.
10345	 Normally unsigned is faster, but for some machines signed is.  */
10346      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10347#ifdef LOAD_EXTEND_OP
10348		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10349#else
10350		       : 1
10351#endif
10352		       );
10353
10354      if (! get_subtarget (subtarget)
10355	  || GET_MODE (subtarget) != operand_mode
10356	  || ! safe_from_p (subtarget, inner, 1))
10357	subtarget = 0;
10358
10359      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10360
10361      if (bitnum != 0)
10362	op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10363			    size_int (bitnum), subtarget, ops_unsignedp);
10364
10365      if (GET_MODE (op0) != mode)
10366	op0 = convert_to_mode (mode, op0, ops_unsignedp);
10367
10368      if ((code == EQ && ! invert) || (code == NE && invert))
10369	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10370			    ops_unsignedp, OPTAB_LIB_WIDEN);
10371
10372      /* Put the AND last so it can combine with more things.  */
10373      if (bitnum != TYPE_PRECISION (type) - 1)
10374	op0 = expand_and (mode, op0, const1_rtx, subtarget);
10375
10376      return op0;
10377    }
10378
10379  /* Now see if we are likely to be able to do this.  Return if not.  */
10380  if (! can_compare_p (code, operand_mode, ccp_store_flag))
10381    return 0;
10382
10383  icode = setcc_gen_code[(int) code];
10384  if (icode == CODE_FOR_nothing
10385      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10386    {
10387      /* We can only do this if it is one of the special cases that
10388	 can be handled without an scc insn.  */
10389      if ((code == LT && integer_zerop (arg1))
10390	  || (! only_cheap && code == GE && integer_zerop (arg1)))
10391	;
10392      else if (BRANCH_COST >= 0
10393	       && ! only_cheap && (code == NE || code == EQ)
10394	       && TREE_CODE (type) != REAL_TYPE
10395	       && ((abs_optab->handlers[(int) operand_mode].insn_code
10396		    != CODE_FOR_nothing)
10397		   || (ffs_optab->handlers[(int) operand_mode].insn_code
10398		       != CODE_FOR_nothing)))
10399	;
10400      else
10401	return 0;
10402    }
10403
10404  if (! get_subtarget (target)
10405      || GET_MODE (subtarget) != operand_mode
10406      || ! safe_from_p (subtarget, arg1, 1))
10407    subtarget = 0;
10408
10409  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10410  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10411
10412  if (target == 0)
10413    target = gen_reg_rtx (mode);
10414
10415  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
10416     because, if the emit_store_flag does anything it will succeed and
10417     OP0 and OP1 will not be used subsequently.  */
10418
10419  result = emit_store_flag (target, code,
10420			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10421			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10422			    operand_mode, unsignedp, 1);
10423
10424  if (result)
10425    {
10426      if (invert)
10427	result = expand_binop (mode, xor_optab, result, const1_rtx,
10428			       result, 0, OPTAB_LIB_WIDEN);
10429      return result;
10430    }
10431
10432  /* If this failed, we have to do this with set/compare/jump/set code.  */
10433  if (GET_CODE (target) != REG
10434      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10435    target = gen_reg_rtx (GET_MODE (target));
10436
10437  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10438  result = compare_from_rtx (op0, op1, code, unsignedp,
10439			     operand_mode, NULL_RTX);
10440  if (GET_CODE (result) == CONST_INT)
10441    return (((result == const0_rtx && ! invert)
10442	     || (result != const0_rtx && invert))
10443	    ? const0_rtx : const1_rtx);
10444
10445  /* The code of RESULT may not match CODE if compare_from_rtx
10446     decided to swap its operands and reverse the original code.
10447
10448     We know that compare_from_rtx returns either a CONST_INT or
10449     a new comparison code, so it is safe to just extract the
10450     code from RESULT.  */
10451  code = GET_CODE (result);
10452
10453  label = gen_label_rtx ();
10454  if (bcc_gen_fctn[(int) code] == 0)
10455    abort ();
10456
10457  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10458  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10459  emit_label (label);
10460
10461  return target;
10462}
10463
10464
10465/* Stubs in case we haven't got a casesi insn.  */
10466#ifndef HAVE_casesi
10467# define HAVE_casesi 0
10468# define gen_casesi(a, b, c, d, e) (0)
10469# define CODE_FOR_casesi CODE_FOR_nothing
10470#endif
10471
10472/* If the machine does not have a case insn that compares the bounds,
10473   this means extra overhead for dispatch tables, which raises the
10474   threshold for using them.  */
10475#ifndef CASE_VALUES_THRESHOLD
10476#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10477#endif /* CASE_VALUES_THRESHOLD */
10478
10479unsigned int
10480case_values_threshold ()
10481{
10482  return CASE_VALUES_THRESHOLD;
10483}
10484
10485/* Attempt to generate a casesi instruction.  Returns 1 if successful,
10486   0 otherwise (i.e. if there is no casesi instruction).  */
10487int
10488try_casesi (index_type, index_expr, minval, range,
10489	    table_label, default_label)
10490     tree index_type, index_expr, minval, range;
10491     rtx table_label ATTRIBUTE_UNUSED;
10492     rtx default_label;
10493{
10494  enum machine_mode index_mode = SImode;
10495  int index_bits = GET_MODE_BITSIZE (index_mode);
10496  rtx op1, op2, index;
10497  enum machine_mode op_mode;
10498
10499  if (! HAVE_casesi)
10500    return 0;
10501
10502  /* Convert the index to SImode.  */
10503  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10504    {
10505      enum machine_mode omode = TYPE_MODE (index_type);
10506      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10507
10508      /* We must handle the endpoints in the original mode.  */
10509      index_expr = build (MINUS_EXPR, index_type,
10510			  index_expr, minval);
10511      minval = integer_zero_node;
10512      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10513      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10514			       omode, 1, default_label);
10515      /* Now we can safely truncate.  */
10516      index = convert_to_mode (index_mode, index, 0);
10517    }
10518  else
10519    {
10520      if (TYPE_MODE (index_type) != index_mode)
10521	{
10522	  index_expr = convert (type_for_size (index_bits, 0),
10523				index_expr);
10524	  index_type = TREE_TYPE (index_expr);
10525	}
10526
10527      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10528    }
10529  emit_queue ();
10530  index = protect_from_queue (index, 0);
10531  do_pending_stack_adjust ();
10532
10533  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10534  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10535      (index, op_mode))
10536    index = copy_to_mode_reg (op_mode, index);
10537
10538  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10539
10540  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10541  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10542		       op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10543  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10544      (op1, op_mode))
10545    op1 = copy_to_mode_reg (op_mode, op1);
10546
10547  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10548
10549  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10550  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10551		       op2, TREE_UNSIGNED (TREE_TYPE (range)));
10552  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10553      (op2, op_mode))
10554    op2 = copy_to_mode_reg (op_mode, op2);
10555
10556  emit_jump_insn (gen_casesi (index, op1, op2,
10557			      table_label, default_label));
10558  return 1;
10559}
10560
10561/* Attempt to generate a tablejump instruction; same concept.  */
10562#ifndef HAVE_tablejump
10563#define HAVE_tablejump 0
10564#define gen_tablejump(x, y) (0)
10565#endif
10566
10567/* Subroutine of the next function.
10568
10569   INDEX is the value being switched on, with the lowest value
10570   in the table already subtracted.
10571   MODE is its expected mode (needed if INDEX is constant).
10572   RANGE is the length of the jump table.
10573   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10574
10575   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10576   index value is out of range.  */
10577
10578static void
10579do_tablejump (index, mode, range, table_label, default_label)
10580     rtx index, range, table_label, default_label;
10581     enum machine_mode mode;
10582{
10583  rtx temp, vector;
10584
10585  /* Do an unsigned comparison (in the proper mode) between the index
10586     expression and the value which represents the length of the range.
10587     Since we just finished subtracting the lower bound of the range
10588     from the index expression, this comparison allows us to simultaneously
10589     check that the original index expression value is both greater than
10590     or equal to the minimum value of the range and less than or equal to
10591     the maximum value of the range.  */
10592
10593  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10594			   default_label);
10595
10596  /* If index is in range, it must fit in Pmode.
10597     Convert to Pmode so we can index with it.  */
10598  if (mode != Pmode)
10599    index = convert_to_mode (Pmode, index, 1);
10600
10601  /* Don't let a MEM slip thru, because then INDEX that comes
10602     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10603     and break_out_memory_refs will go to work on it and mess it up.  */
10604#ifdef PIC_CASE_VECTOR_ADDRESS
10605  if (flag_pic && GET_CODE (index) != REG)
10606    index = copy_to_mode_reg (Pmode, index);
10607#endif
10608
10609  /* If flag_force_addr were to affect this address
10610     it could interfere with the tricky assumptions made
10611     about addresses that contain label-refs,
10612     which may be valid only very near the tablejump itself.  */
10613  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10614     GET_MODE_SIZE, because this indicates how large insns are.  The other
10615     uses should all be Pmode, because they are addresses.  This code
10616     could fail if addresses and insns are not the same size.  */
10617  index = gen_rtx_PLUS (Pmode,
10618			gen_rtx_MULT (Pmode, index,
10619				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10620			gen_rtx_LABEL_REF (Pmode, table_label));
10621#ifdef PIC_CASE_VECTOR_ADDRESS
10622  if (flag_pic)
10623    index = PIC_CASE_VECTOR_ADDRESS (index);
10624  else
10625#endif
10626    index = memory_address_noforce (CASE_VECTOR_MODE, index);
10627  temp = gen_reg_rtx (CASE_VECTOR_MODE);
10628  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10629  RTX_UNCHANGING_P (vector) = 1;
10630  convert_move (temp, vector, 0);
10631
10632  emit_jump_insn (gen_tablejump (temp, table_label));
10633
10634  /* If we are generating PIC code or if the table is PC-relative, the
10635     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10636  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10637    emit_barrier ();
10638}
10639
10640int
10641try_tablejump (index_type, index_expr, minval, range,
10642	       table_label, default_label)
10643     tree index_type, index_expr, minval, range;
10644     rtx table_label, default_label;
10645{
10646  rtx index;
10647
10648  if (! HAVE_tablejump)
10649    return 0;
10650
10651  index_expr = fold (build (MINUS_EXPR, index_type,
10652			    convert (index_type, index_expr),
10653			    convert (index_type, minval)));
10654  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10655  emit_queue ();
10656  index = protect_from_queue (index, 0);
10657  do_pending_stack_adjust ();
10658
10659  do_tablejump (index, TYPE_MODE (index_type),
10660		convert_modes (TYPE_MODE (index_type),
10661			       TYPE_MODE (TREE_TYPE (range)),
10662			       expand_expr (range, NULL_RTX,
10663					    VOIDmode, 0),
10664			       TREE_UNSIGNED (TREE_TYPE (range))),
10665		table_label, default_label);
10666  return 1;
10667}
10668