expr.c revision 70635
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22#include "config.h"
23#include "system.h"
24#include "machmode.h"
25#include "rtl.h"
26#include "tree.h"
27#include "obstack.h"
28#include "flags.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "except.h"
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
35#include "insn-config.h"
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
38#include "recog.h"
39#include "output.h"
40#include "typeclass.h"
41#include "defaults.h"
42#include "toplev.h"
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
47   from first to last or from last to first.
48
49   They should if the stack and args grow in opposite directions, but
50   only if we have push insns.  */
51
52#ifdef PUSH_ROUNDING
53
54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55#define PUSH_ARGS_REVERSED	/* If it's last to first */
56#endif
57
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
68/* Assume that case vectors are not pc-relative.  */
69#ifndef CASE_VECTOR_PC_RELATIVE
70#define CASE_VECTOR_PC_RELATIVE 0
71#endif
72
73/* If this is nonzero, we do not bother generating VOLATILE
74   around volatile memory references, and we are willing to
75   output indirect addresses.  If cse is to follow, we reject
76   indirect addresses so a useful potential cse is generated;
77   if it is used only once, instruction combination will produce
78   the same indirect address eventually.  */
79int cse_not_expected;
80
81/* Nonzero to generate code for all the subroutines within an
82   expression before generating the upper levels of the expression.
83   Nowadays this is never zero.  */
84int do_preexpand_calls = 1;
85
86/* Number of units that we should eventually pop off the stack.
87   These are the arguments to function calls that have already returned.  */
88int pending_stack_adjust;
89
90/* Under some ABIs, it is the caller's responsibility to pop arguments
91   pushed for function calls.  A naive implementation would simply pop
92   the arguments immediately after each call.  However, if several
93   function calls are made in a row, it is typically cheaper to pop
94   all the arguments after all of the calls are complete since a
95   single pop instruction can be used.  Therefore, GCC attempts to
96   defer popping the arguments until absolutely necessary.  (For
97   example, at the end of a conditional, the arguments must be popped,
98   since code outside the conditional won't know whether or not the
99   arguments need to be popped.)
100
101   When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
102   attempt to defer pops.  Instead, the stack is popped immediately
103   after each call.  Rather then setting this variable directly, use
104   NO_DEFER_POP and OK_DEFER_POP.  */
105int inhibit_defer_pop;
106
107/* Nonzero means __builtin_saveregs has already been done in this function.
108   The value is the pseudoreg containing the value __builtin_saveregs
109   returned.  */
110static rtx saveregs_value;
111
112/* Similarly for __builtin_apply_args.  */
113static rtx apply_args_value;
114
115/* Don't check memory usage, since code is being emitted to check a memory
116   usage.  Used when current_function_check_memory_usage is true, to avoid
117   infinite recursion.  */
118static int in_check_memory_usage;
119
120/* Postincrements that still need to be expanded.  */
121static rtx pending_chain;
122
123/* This structure is used by move_by_pieces to describe the move to
124   be performed.  */
125struct move_by_pieces
126{
127  rtx to;
128  rtx to_addr;
129  int autinc_to;
130  int explicit_inc_to;
131  int to_struct;
132  rtx from;
133  rtx from_addr;
134  int autinc_from;
135  int explicit_inc_from;
136  int from_struct;
137  int len;
138  int offset;
139  int reverse;
140};
141
142/* This structure is used by clear_by_pieces to describe the clear to
143   be performed.  */
144
145struct clear_by_pieces
146{
147  rtx to;
148  rtx to_addr;
149  int autinc_to;
150  int explicit_inc_to;
151  int to_struct;
152  int len;
153  int offset;
154  int reverse;
155};
156
157extern struct obstack permanent_obstack;
158extern rtx arg_pointer_save_area;
159
160static rtx get_push_address	PROTO ((int));
161
162static rtx enqueue_insn		PROTO((rtx, rtx));
163static void init_queue		PROTO((void));
164static int move_by_pieces_ninsns PROTO((unsigned int, int));
165static void move_by_pieces_1	PROTO((rtx (*) (rtx, ...), enum machine_mode,
166				       struct move_by_pieces *));
167static void clear_by_pieces	PROTO((rtx, int, int));
168static void clear_by_pieces_1	PROTO((rtx (*) (rtx, ...), enum machine_mode,
169				       struct clear_by_pieces *));
170static int is_zeros_p		PROTO((tree));
171static int mostly_zeros_p	PROTO((tree));
172static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
173					   tree, tree, int));
174static void store_constructor	PROTO((tree, rtx, int));
175static rtx store_field		PROTO((rtx, int, int, enum machine_mode, tree,
176				       enum machine_mode, int, int,
177				       int, int));
178static enum memory_use_mode
179  get_memory_usage_from_modifier PROTO((enum expand_modifier));
180static tree save_noncopied_parts PROTO((tree, tree));
181static tree init_noncopied_parts PROTO((tree, tree));
182static int safe_from_p		PROTO((rtx, tree, int));
183static int fixed_type_p		PROTO((tree));
184static rtx var_rtx		PROTO((tree));
185static int get_pointer_alignment PROTO((tree, unsigned));
186static tree string_constant	PROTO((tree, tree *));
187static tree c_strlen		PROTO((tree));
188static rtx get_memory_rtx	PROTO((tree));
189static rtx expand_builtin	PROTO((tree, rtx, rtx,
190				       enum machine_mode, int));
191static int apply_args_size	PROTO((void));
192static int apply_result_size	PROTO((void));
193static rtx result_vector	PROTO((int, rtx));
194static rtx expand_builtin_setjmp PROTO((tree, rtx));
195static rtx expand_builtin_apply_args PROTO((void));
196static rtx expand_builtin_apply	PROTO((rtx, rtx, rtx));
197static void expand_builtin_return PROTO((rtx));
198static rtx expand_increment	PROTO((tree, int, int));
199static void preexpand_calls	PROTO((tree));
200static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
201static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
202static void do_jump_for_compare	PROTO((rtx, rtx, rtx));
203static rtx compare		PROTO((tree, enum rtx_code, enum rtx_code));
204static rtx do_store_flag	PROTO((tree, rtx, enum machine_mode, int));
205
206/* Record for each mode whether we can move a register directly to or
207   from an object of that mode in memory.  If we can't, we won't try
208   to use that mode directly when accessing a field of that mode.  */
209
210static char direct_load[NUM_MACHINE_MODES];
211static char direct_store[NUM_MACHINE_MODES];
212
213/* If a memory-to-memory move would take MOVE_RATIO or more simple
214   move-instruction sequences, we will do a movstr or libcall instead.  */
215
216#ifndef MOVE_RATIO
217#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
218#define MOVE_RATIO 2
219#else
220/* If we are optimizing for space (-Os), cut down the default move ratio */
221#define MOVE_RATIO (optimize_size ? 3 : 15)
222#endif
223#endif
224
225/* This macro is used to determine whether move_by_pieces should be called
226   to perform a structure copy. */
227#ifndef MOVE_BY_PIECES_P
228#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns        \
229                                       (SIZE, ALIGN) < MOVE_RATIO)
230#endif
231
232/* This array records the insn_code of insns to perform block moves.  */
233enum insn_code movstr_optab[NUM_MACHINE_MODES];
234
235/* This array records the insn_code of insns to perform block clears.  */
236enum insn_code clrstr_optab[NUM_MACHINE_MODES];
237
238/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow.  */
239
240#ifndef SLOW_UNALIGNED_ACCESS
241#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
242#endif
243
244/* Register mappings for target machines without register windows.  */
245#ifndef INCOMING_REGNO
246#define INCOMING_REGNO(OUT) (OUT)
247#endif
248#ifndef OUTGOING_REGNO
249#define OUTGOING_REGNO(IN) (IN)
250#endif
251
252/* This is run once per compilation to set up which modes can be used
253   directly in memory and to initialize the block move optab.  */
254
255void
256init_expr_once ()
257{
258  rtx insn, pat;
259  enum machine_mode mode;
260  int num_clobbers;
261  rtx mem, mem1;
262  char *free_point;
263
264  start_sequence ();
265
266  /* Since we are on the permanent obstack, we must be sure we save this
267     spot AFTER we call start_sequence, since it will reuse the rtl it
268     makes.  */
269  free_point = (char *) oballoc (0);
270
271  /* Try indexing by frame ptr and try by stack ptr.
272     It is known that on the Convex the stack ptr isn't a valid index.
273     With luck, one or the other is valid on any machine.  */
274  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
275  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
276
277  insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
278  pat = PATTERN (insn);
279
280  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
281       mode = (enum machine_mode) ((int) mode + 1))
282    {
283      int regno;
284      rtx reg;
285
286      direct_load[(int) mode] = direct_store[(int) mode] = 0;
287      PUT_MODE (mem, mode);
288      PUT_MODE (mem1, mode);
289
290      /* See if there is some register that can be used in this mode and
291	 directly loaded or stored from memory.  */
292
293      if (mode != VOIDmode && mode != BLKmode)
294	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
295	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
296	     regno++)
297	  {
298	    if (! HARD_REGNO_MODE_OK (regno, mode))
299	      continue;
300
301	    reg = gen_rtx_REG (mode, regno);
302
303	    SET_SRC (pat) = mem;
304	    SET_DEST (pat) = reg;
305	    if (recog (pat, insn, &num_clobbers) >= 0)
306	      direct_load[(int) mode] = 1;
307
308	    SET_SRC (pat) = mem1;
309	    SET_DEST (pat) = reg;
310	    if (recog (pat, insn, &num_clobbers) >= 0)
311	      direct_load[(int) mode] = 1;
312
313	    SET_SRC (pat) = reg;
314	    SET_DEST (pat) = mem;
315	    if (recog (pat, insn, &num_clobbers) >= 0)
316	      direct_store[(int) mode] = 1;
317
318	    SET_SRC (pat) = reg;
319	    SET_DEST (pat) = mem1;
320	    if (recog (pat, insn, &num_clobbers) >= 0)
321	      direct_store[(int) mode] = 1;
322	  }
323    }
324
325  end_sequence ();
326  obfree (free_point);
327}
328
329/* This is run at the start of compiling a function.  */
330
331void
332init_expr ()
333{
334  init_queue ();
335
336  pending_stack_adjust = 0;
337  inhibit_defer_pop = 0;
338  saveregs_value = 0;
339  apply_args_value = 0;
340  forced_labels = 0;
341}
342
343/* Save all variables describing the current status into the structure *P.
344   This is used before starting a nested function.  */
345
346void
347save_expr_status (p)
348     struct function *p;
349{
350  p->pending_chain = pending_chain;
351  p->pending_stack_adjust = pending_stack_adjust;
352  p->inhibit_defer_pop = inhibit_defer_pop;
353  p->saveregs_value = saveregs_value;
354  p->apply_args_value = apply_args_value;
355  p->forced_labels = forced_labels;
356
357  pending_chain = NULL_RTX;
358  pending_stack_adjust = 0;
359  inhibit_defer_pop = 0;
360  saveregs_value = 0;
361  apply_args_value = 0;
362  forced_labels = 0;
363}
364
365/* Restore all variables describing the current status from the structure *P.
366   This is used after a nested function.  */
367
368void
369restore_expr_status (p)
370     struct function *p;
371{
372  pending_chain = p->pending_chain;
373  pending_stack_adjust = p->pending_stack_adjust;
374  inhibit_defer_pop = p->inhibit_defer_pop;
375  saveregs_value = p->saveregs_value;
376  apply_args_value = p->apply_args_value;
377  forced_labels = p->forced_labels;
378}
379
380/* Manage the queue of increment instructions to be output
381   for POSTINCREMENT_EXPR expressions, etc.  */
382
383/* Queue up to increment (or change) VAR later.  BODY says how:
384   BODY should be the same thing you would pass to emit_insn
385   to increment right away.  It will go to emit_insn later on.
386
387   The value is a QUEUED expression to be used in place of VAR
388   where you want to guarantee the pre-incrementation value of VAR.  */
389
390static rtx
391enqueue_insn (var, body)
392     rtx var, body;
393{
394  pending_chain = gen_rtx_QUEUED (GET_MODE (var),
395				  var, NULL_RTX, NULL_RTX, body,
396				  pending_chain);
397  return pending_chain;
398}
399
400/* Use protect_from_queue to convert a QUEUED expression
401   into something that you can put immediately into an instruction.
402   If the queued incrementation has not happened yet,
403   protect_from_queue returns the variable itself.
404   If the incrementation has happened, protect_from_queue returns a temp
405   that contains a copy of the old value of the variable.
406
407   Any time an rtx which might possibly be a QUEUED is to be put
408   into an instruction, it must be passed through protect_from_queue first.
409   QUEUED expressions are not meaningful in instructions.
410
411   Do not pass a value through protect_from_queue and then hold
412   on to it for a while before putting it in an instruction!
413   If the queue is flushed in between, incorrect code will result.  */
414
415rtx
416protect_from_queue (x, modify)
417     register rtx x;
418     int modify;
419{
420  register RTX_CODE code = GET_CODE (x);
421
422#if 0  /* A QUEUED can hang around after the queue is forced out.  */
423  /* Shortcut for most common case.  */
424  if (pending_chain == 0)
425    return x;
426#endif
427
428  if (code != QUEUED)
429    {
430      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
431	 use of autoincrement.  Make a copy of the contents of the memory
432	 location rather than a copy of the address, but not if the value is
433	 of mode BLKmode.  Don't modify X in place since it might be
434	 shared.  */
435      if (code == MEM && GET_MODE (x) != BLKmode
436	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
437	{
438	  register rtx y = XEXP (x, 0);
439	  register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
440
441	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
442	  MEM_COPY_ATTRIBUTES (new, x);
443	  MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
444
445	  if (QUEUED_INSN (y))
446	    {
447	      register rtx temp = gen_reg_rtx (GET_MODE (new));
448	      emit_insn_before (gen_move_insn (temp, new),
449				QUEUED_INSN (y));
450	      return temp;
451	    }
452	  return new;
453	}
454      /* Otherwise, recursively protect the subexpressions of all
455	 the kinds of rtx's that can contain a QUEUED.  */
456      if (code == MEM)
457	{
458	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
459	  if (tem != XEXP (x, 0))
460	    {
461	      x = copy_rtx (x);
462	      XEXP (x, 0) = tem;
463	    }
464	}
465      else if (code == PLUS || code == MULT)
466	{
467	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
468	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
469	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
470	    {
471	      x = copy_rtx (x);
472	      XEXP (x, 0) = new0;
473	      XEXP (x, 1) = new1;
474	    }
475	}
476      return x;
477    }
478  /* If the increment has not happened, use the variable itself.  */
479  if (QUEUED_INSN (x) == 0)
480    return QUEUED_VAR (x);
481  /* If the increment has happened and a pre-increment copy exists,
482     use that copy.  */
483  if (QUEUED_COPY (x) != 0)
484    return QUEUED_COPY (x);
485  /* The increment has happened but we haven't set up a pre-increment copy.
486     Set one up now, and use it.  */
487  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
488  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
489		    QUEUED_INSN (x));
490  return QUEUED_COPY (x);
491}
492
493/* Return nonzero if X contains a QUEUED expression:
494   if it contains anything that will be altered by a queued increment.
495   We handle only combinations of MEM, PLUS, MINUS and MULT operators
496   since memory addresses generally contain only those.  */
497
498int
499queued_subexp_p (x)
500     rtx x;
501{
502  register enum rtx_code code = GET_CODE (x);
503  switch (code)
504    {
505    case QUEUED:
506      return 1;
507    case MEM:
508      return queued_subexp_p (XEXP (x, 0));
509    case MULT:
510    case PLUS:
511    case MINUS:
512      return (queued_subexp_p (XEXP (x, 0))
513	      || queued_subexp_p (XEXP (x, 1)));
514    default:
515      return 0;
516    }
517}
518
519/* Perform all the pending incrementations.  */
520
521void
522emit_queue ()
523{
524  register rtx p;
525  while ((p = pending_chain))
526    {
527      rtx body = QUEUED_BODY (p);
528
529      if (GET_CODE (body) == SEQUENCE)
530	{
531	  QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
532	  emit_insn (QUEUED_BODY (p));
533	}
534      else
535	QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
536      pending_chain = QUEUED_NEXT (p);
537    }
538}
539
540static void
541init_queue ()
542{
543  if (pending_chain)
544    abort ();
545}
546
547/* Copy data from FROM to TO, where the machine modes are not the same.
548   Both modes may be integer, or both may be floating.
549   UNSIGNEDP should be nonzero if FROM is an unsigned type.
550   This causes zero-extension instead of sign-extension.  */
551
552void
553convert_move (to, from, unsignedp)
554     register rtx to, from;
555     int unsignedp;
556{
557  enum machine_mode to_mode = GET_MODE (to);
558  enum machine_mode from_mode = GET_MODE (from);
559  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
560  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
561  enum insn_code code;
562  rtx libcall;
563
564  /* rtx code for making an equivalent value.  */
565  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
566
567  to = protect_from_queue (to, 1);
568  from = protect_from_queue (from, 0);
569
570  if (to_real != from_real)
571    abort ();
572
573  /* If FROM is a SUBREG that indicates that we have already done at least
574     the required extension, strip it.  We don't handle such SUBREGs as
575     TO here.  */
576
577  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
578      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
579	  >= GET_MODE_SIZE (to_mode))
580      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
581    from = gen_lowpart (to_mode, from), from_mode = to_mode;
582
583  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
584    abort ();
585
586  if (to_mode == from_mode
587      || (from_mode == VOIDmode && CONSTANT_P (from)))
588    {
589      emit_move_insn (to, from);
590      return;
591    }
592
593  if (to_real)
594    {
595      rtx value;
596
597      if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
598	{
599	  /* Try converting directly if the insn is supported.  */
600	  if ((code = can_extend_p (to_mode, from_mode, 0))
601	      != CODE_FOR_nothing)
602	    {
603	      emit_unop_insn (code, to, from, UNKNOWN);
604	      return;
605	    }
606	}
607
608#ifdef HAVE_trunchfqf2
609      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
610	{
611	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
612	  return;
613	}
614#endif
615#ifdef HAVE_trunctqfqf2
616      if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
617	{
618	  emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
619	  return;
620	}
621#endif
622#ifdef HAVE_truncsfqf2
623      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
624	{
625	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
626	  return;
627	}
628#endif
629#ifdef HAVE_truncdfqf2
630      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
631	{
632	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
633	  return;
634	}
635#endif
636#ifdef HAVE_truncxfqf2
637      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
638	{
639	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
640	  return;
641	}
642#endif
643#ifdef HAVE_trunctfqf2
644      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
645	{
646	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
647	  return;
648	}
649#endif
650
651#ifdef HAVE_trunctqfhf2
652      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
653	{
654	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
655	  return;
656	}
657#endif
658#ifdef HAVE_truncsfhf2
659      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
660	{
661	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
662	  return;
663	}
664#endif
665#ifdef HAVE_truncdfhf2
666      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
667	{
668	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
669	  return;
670	}
671#endif
672#ifdef HAVE_truncxfhf2
673      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
674	{
675	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
676	  return;
677	}
678#endif
679#ifdef HAVE_trunctfhf2
680      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
681	{
682	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
683	  return;
684	}
685#endif
686
687#ifdef HAVE_truncsftqf2
688      if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
689	{
690	  emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
691	  return;
692	}
693#endif
694#ifdef HAVE_truncdftqf2
695      if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
696	{
697	  emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
698	  return;
699	}
700#endif
701#ifdef HAVE_truncxftqf2
702      if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
703	{
704	  emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
705	  return;
706	}
707#endif
708#ifdef HAVE_trunctftqf2
709      if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
710	{
711	  emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
712	  return;
713	}
714#endif
715
716#ifdef HAVE_truncdfsf2
717      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
718	{
719	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
720	  return;
721	}
722#endif
723#ifdef HAVE_truncxfsf2
724      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
725	{
726	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
727	  return;
728	}
729#endif
730#ifdef HAVE_trunctfsf2
731      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
732	{
733	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
734	  return;
735	}
736#endif
737#ifdef HAVE_truncxfdf2
738      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
739	{
740	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
741	  return;
742	}
743#endif
744#ifdef HAVE_trunctfdf2
745      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
746	{
747	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
748	  return;
749	}
750#endif
751
752      libcall = (rtx) 0;
753      switch (from_mode)
754	{
755	case SFmode:
756	  switch (to_mode)
757	    {
758	    case DFmode:
759	      libcall = extendsfdf2_libfunc;
760	      break;
761
762	    case XFmode:
763	      libcall = extendsfxf2_libfunc;
764	      break;
765
766	    case TFmode:
767	      libcall = extendsftf2_libfunc;
768	      break;
769
770	    default:
771	      break;
772	    }
773	  break;
774
775	case DFmode:
776	  switch (to_mode)
777	    {
778	    case SFmode:
779	      libcall = truncdfsf2_libfunc;
780	      break;
781
782	    case XFmode:
783	      libcall = extenddfxf2_libfunc;
784	      break;
785
786	    case TFmode:
787	      libcall = extenddftf2_libfunc;
788	      break;
789
790	    default:
791	      break;
792	    }
793	  break;
794
795	case XFmode:
796	  switch (to_mode)
797	    {
798	    case SFmode:
799	      libcall = truncxfsf2_libfunc;
800	      break;
801
802	    case DFmode:
803	      libcall = truncxfdf2_libfunc;
804	      break;
805
806	    default:
807	      break;
808	    }
809	  break;
810
811	case TFmode:
812	  switch (to_mode)
813	    {
814	    case SFmode:
815	      libcall = trunctfsf2_libfunc;
816	      break;
817
818	    case DFmode:
819	      libcall = trunctfdf2_libfunc;
820	      break;
821
822	    default:
823	      break;
824	    }
825	  break;
826
827	default:
828	  break;
829	}
830
831      if (libcall == (rtx) 0)
832	/* This conversion is not implemented yet.  */
833	abort ();
834
835      value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
836				       1, from, from_mode);
837      emit_move_insn (to, value);
838      return;
839    }
840
841  /* Now both modes are integers.  */
842
843  /* Handle expanding beyond a word.  */
844  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
845      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
846    {
847      rtx insns;
848      rtx lowpart;
849      rtx fill_value;
850      rtx lowfrom;
851      int i;
852      enum machine_mode lowpart_mode;
853      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
854
855      /* Try converting directly if the insn is supported.  */
856      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
857	  != CODE_FOR_nothing)
858	{
859	  /* If FROM is a SUBREG, put it into a register.  Do this
860	     so that we always generate the same set of insns for
861	     better cse'ing; if an intermediate assignment occurred,
862	     we won't be doing the operation directly on the SUBREG.  */
863	  if (optimize > 0 && GET_CODE (from) == SUBREG)
864	    from = force_reg (from_mode, from);
865	  emit_unop_insn (code, to, from, equiv_code);
866	  return;
867	}
868      /* Next, try converting via full word.  */
869      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
870	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
871		   != CODE_FOR_nothing))
872	{
873	  if (GET_CODE (to) == REG)
874	    emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
875	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
876	  emit_unop_insn (code, to,
877			  gen_lowpart (word_mode, to), equiv_code);
878	  return;
879	}
880
881      /* No special multiword conversion insn; do it by hand.  */
882      start_sequence ();
883
884      /* Since we will turn this into a no conflict block, we must ensure
885	 that the source does not overlap the target.  */
886
887      if (reg_overlap_mentioned_p (to, from))
888	from = force_reg (from_mode, from);
889
890      /* Get a copy of FROM widened to a word, if necessary.  */
891      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
892	lowpart_mode = word_mode;
893      else
894	lowpart_mode = from_mode;
895
896      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
897
898      lowpart = gen_lowpart (lowpart_mode, to);
899      emit_move_insn (lowpart, lowfrom);
900
901      /* Compute the value to put in each remaining word.  */
902      if (unsignedp)
903	fill_value = const0_rtx;
904      else
905	{
906#ifdef HAVE_slt
907	  if (HAVE_slt
908	      && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
909	      && STORE_FLAG_VALUE == -1)
910	    {
911	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
912			     lowpart_mode, 0, 0);
913	      fill_value = gen_reg_rtx (word_mode);
914	      emit_insn (gen_slt (fill_value));
915	    }
916	  else
917#endif
918	    {
919	      fill_value
920		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
921				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
922				NULL_RTX, 0);
923	      fill_value = convert_to_mode (word_mode, fill_value, 1);
924	    }
925	}
926
927      /* Fill the remaining words.  */
928      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
929	{
930	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
931	  rtx subword = operand_subword (to, index, 1, to_mode);
932
933	  if (subword == 0)
934	    abort ();
935
936	  if (fill_value != subword)
937	    emit_move_insn (subword, fill_value);
938	}
939
940      insns = get_insns ();
941      end_sequence ();
942
943      emit_no_conflict_block (insns, to, from, NULL_RTX,
944			      gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
945      return;
946    }
947
948  /* Truncating multi-word to a word or less.  */
949  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
950      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
951    {
952      if (!((GET_CODE (from) == MEM
953	     && ! MEM_VOLATILE_P (from)
954	     && direct_load[(int) to_mode]
955	     && ! mode_dependent_address_p (XEXP (from, 0)))
956	    || GET_CODE (from) == REG
957	    || GET_CODE (from) == SUBREG))
958	from = force_reg (from_mode, from);
959      convert_move (to, gen_lowpart (word_mode, from), 0);
960      return;
961    }
962
963  /* Handle pointer conversion */			/* SPEE 900220 */
964  if (to_mode == PQImode)
965    {
966      if (from_mode != QImode)
967	from = convert_to_mode (QImode, from, unsignedp);
968
969#ifdef HAVE_truncqipqi2
970      if (HAVE_truncqipqi2)
971	{
972	  emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
973	  return;
974	}
975#endif /* HAVE_truncqipqi2 */
976      abort ();
977    }
978
979  if (from_mode == PQImode)
980    {
981      if (to_mode != QImode)
982	{
983	  from = convert_to_mode (QImode, from, unsignedp);
984	  from_mode = QImode;
985	}
986      else
987	{
988#ifdef HAVE_extendpqiqi2
989	  if (HAVE_extendpqiqi2)
990	    {
991	      emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
992	      return;
993	    }
994#endif /* HAVE_extendpqiqi2 */
995	  abort ();
996	}
997    }
998
999  if (to_mode == PSImode)
1000    {
1001      if (from_mode != SImode)
1002	from = convert_to_mode (SImode, from, unsignedp);
1003
1004#ifdef HAVE_truncsipsi2
1005      if (HAVE_truncsipsi2)
1006	{
1007	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1008	  return;
1009	}
1010#endif /* HAVE_truncsipsi2 */
1011      abort ();
1012    }
1013
1014  if (from_mode == PSImode)
1015    {
1016      if (to_mode != SImode)
1017	{
1018	  from = convert_to_mode (SImode, from, unsignedp);
1019	  from_mode = SImode;
1020	}
1021      else
1022	{
1023#ifdef HAVE_extendpsisi2
1024	  if (HAVE_extendpsisi2)
1025	    {
1026	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1027	      return;
1028	    }
1029#endif /* HAVE_extendpsisi2 */
1030	  abort ();
1031	}
1032    }
1033
1034  if (to_mode == PDImode)
1035    {
1036      if (from_mode != DImode)
1037	from = convert_to_mode (DImode, from, unsignedp);
1038
1039#ifdef HAVE_truncdipdi2
1040      if (HAVE_truncdipdi2)
1041	{
1042	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1043	  return;
1044	}
1045#endif /* HAVE_truncdipdi2 */
1046      abort ();
1047    }
1048
1049  if (from_mode == PDImode)
1050    {
1051      if (to_mode != DImode)
1052	{
1053	  from = convert_to_mode (DImode, from, unsignedp);
1054	  from_mode = DImode;
1055	}
1056      else
1057	{
1058#ifdef HAVE_extendpdidi2
1059	  if (HAVE_extendpdidi2)
1060	    {
1061	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1062	      return;
1063	    }
1064#endif /* HAVE_extendpdidi2 */
1065	  abort ();
1066	}
1067    }
1068
1069  /* Now follow all the conversions between integers
1070     no more than a word long.  */
1071
1072  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1073  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1074      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1075				GET_MODE_BITSIZE (from_mode)))
1076    {
1077      if (!((GET_CODE (from) == MEM
1078	     && ! MEM_VOLATILE_P (from)
1079	     && direct_load[(int) to_mode]
1080	     && ! mode_dependent_address_p (XEXP (from, 0)))
1081	    || GET_CODE (from) == REG
1082	    || GET_CODE (from) == SUBREG))
1083	from = force_reg (from_mode, from);
1084      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1085	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1086	from = copy_to_reg (from);
1087      emit_move_insn (to, gen_lowpart (to_mode, from));
1088      return;
1089    }
1090
1091  /* Handle extension.  */
1092  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1093    {
1094      /* Convert directly if that works.  */
1095      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1096	  != CODE_FOR_nothing)
1097	{
1098	  emit_unop_insn (code, to, from, equiv_code);
1099	  return;
1100	}
1101      else
1102	{
1103	  enum machine_mode intermediate;
1104	  rtx tmp;
1105	  tree shift_amount;
1106
1107	  /* Search for a mode to convert via.  */
1108	  for (intermediate = from_mode; intermediate != VOIDmode;
1109	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1110	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1111		  != CODE_FOR_nothing)
1112		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1113		     && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1114		&& (can_extend_p (intermediate, from_mode, unsignedp)
1115		    != CODE_FOR_nothing))
1116	      {
1117		convert_move (to, convert_to_mode (intermediate, from,
1118						   unsignedp), unsignedp);
1119		return;
1120	      }
1121
1122	  /* No suitable intermediate mode.
1123	     Generate what we need with	shifts. */
1124	  shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1125				      - GET_MODE_BITSIZE (from_mode), 0);
1126	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
1127	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1128			      to, unsignedp);
1129	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp,  shift_amount,
1130			      to, unsignedp);
1131	  if (tmp != to)
1132	    emit_move_insn (to, tmp);
1133	  return;
1134	}
1135    }
1136
1137  /* Support special truncate insns for certain modes.  */
1138
1139  if (from_mode == DImode && to_mode == SImode)
1140    {
1141#ifdef HAVE_truncdisi2
1142      if (HAVE_truncdisi2)
1143	{
1144	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1145	  return;
1146	}
1147#endif
1148      convert_move (to, force_reg (from_mode, from), unsignedp);
1149      return;
1150    }
1151
1152  if (from_mode == DImode && to_mode == HImode)
1153    {
1154#ifdef HAVE_truncdihi2
1155      if (HAVE_truncdihi2)
1156	{
1157	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1158	  return;
1159	}
1160#endif
1161      convert_move (to, force_reg (from_mode, from), unsignedp);
1162      return;
1163    }
1164
1165  if (from_mode == DImode && to_mode == QImode)
1166    {
1167#ifdef HAVE_truncdiqi2
1168      if (HAVE_truncdiqi2)
1169	{
1170	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1171	  return;
1172	}
1173#endif
1174      convert_move (to, force_reg (from_mode, from), unsignedp);
1175      return;
1176    }
1177
1178  if (from_mode == SImode && to_mode == HImode)
1179    {
1180#ifdef HAVE_truncsihi2
1181      if (HAVE_truncsihi2)
1182	{
1183	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1184	  return;
1185	}
1186#endif
1187      convert_move (to, force_reg (from_mode, from), unsignedp);
1188      return;
1189    }
1190
1191  if (from_mode == SImode && to_mode == QImode)
1192    {
1193#ifdef HAVE_truncsiqi2
1194      if (HAVE_truncsiqi2)
1195	{
1196	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1197	  return;
1198	}
1199#endif
1200      convert_move (to, force_reg (from_mode, from), unsignedp);
1201      return;
1202    }
1203
1204  if (from_mode == HImode && to_mode == QImode)
1205    {
1206#ifdef HAVE_trunchiqi2
1207      if (HAVE_trunchiqi2)
1208	{
1209	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1210	  return;
1211	}
1212#endif
1213      convert_move (to, force_reg (from_mode, from), unsignedp);
1214      return;
1215    }
1216
1217  if (from_mode == TImode && to_mode == DImode)
1218    {
1219#ifdef HAVE_trunctidi2
1220      if (HAVE_trunctidi2)
1221	{
1222	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1223	  return;
1224	}
1225#endif
1226      convert_move (to, force_reg (from_mode, from), unsignedp);
1227      return;
1228    }
1229
1230  if (from_mode == TImode && to_mode == SImode)
1231    {
1232#ifdef HAVE_trunctisi2
1233      if (HAVE_trunctisi2)
1234	{
1235	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1236	  return;
1237	}
1238#endif
1239      convert_move (to, force_reg (from_mode, from), unsignedp);
1240      return;
1241    }
1242
1243  if (from_mode == TImode && to_mode == HImode)
1244    {
1245#ifdef HAVE_trunctihi2
1246      if (HAVE_trunctihi2)
1247	{
1248	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1249	  return;
1250	}
1251#endif
1252      convert_move (to, force_reg (from_mode, from), unsignedp);
1253      return;
1254    }
1255
1256  if (from_mode == TImode && to_mode == QImode)
1257    {
1258#ifdef HAVE_trunctiqi2
1259      if (HAVE_trunctiqi2)
1260	{
1261	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1262	  return;
1263	}
1264#endif
1265      convert_move (to, force_reg (from_mode, from), unsignedp);
1266      return;
1267    }
1268
1269  /* Handle truncation of volatile memrefs, and so on;
1270     the things that couldn't be truncated directly,
1271     and for which there was no special instruction.  */
1272  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1273    {
1274      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1275      emit_move_insn (to, temp);
1276      return;
1277    }
1278
1279  /* Mode combination is not recognized.  */
1280  abort ();
1281}
1282
1283/* Return an rtx for a value that would result
1284   from converting X to mode MODE.
1285   Both X and MODE may be floating, or both integer.
1286   UNSIGNEDP is nonzero if X is an unsigned value.
1287   This can be done by referring to a part of X in place
1288   or by copying to a new temporary with conversion.
1289
1290   This function *must not* call protect_from_queue
1291   except when putting X into an insn (in which case convert_move does it).  */
1292
1293rtx
1294convert_to_mode (mode, x, unsignedp)
1295     enum machine_mode mode;
1296     rtx x;
1297     int unsignedp;
1298{
1299  return convert_modes (mode, VOIDmode, x, unsignedp);
1300}
1301
1302/* Return an rtx for a value that would result
1303   from converting X from mode OLDMODE to mode MODE.
1304   Both modes may be floating, or both integer.
1305   UNSIGNEDP is nonzero if X is an unsigned value.
1306
1307   This can be done by referring to a part of X in place
1308   or by copying to a new temporary with conversion.
1309
1310   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1311
1312   This function *must not* call protect_from_queue
1313   except when putting X into an insn (in which case convert_move does it).  */
1314
1315rtx
1316convert_modes (mode, oldmode, x, unsignedp)
1317     enum machine_mode mode, oldmode;
1318     rtx x;
1319     int unsignedp;
1320{
1321  register rtx temp;
1322
1323  /* If FROM is a SUBREG that indicates that we have already done at least
1324     the required extension, strip it.  */
1325
1326  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1327      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1328      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1329    x = gen_lowpart (mode, x);
1330
1331  if (GET_MODE (x) != VOIDmode)
1332    oldmode = GET_MODE (x);
1333
1334  if (mode == oldmode)
1335    return x;
1336
1337  /* There is one case that we must handle specially: If we are converting
1338     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1339     we are to interpret the constant as unsigned, gen_lowpart will do
1340     the wrong if the constant appears negative.  What we want to do is
1341     make the high-order word of the constant zero, not all ones.  */
1342
1343  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1344      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1345      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1346    {
1347      HOST_WIDE_INT val = INTVAL (x);
1348
1349      if (oldmode != VOIDmode
1350	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1351	{
1352	  int width = GET_MODE_BITSIZE (oldmode);
1353
1354	  /* We need to zero extend VAL.  */
1355	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1356	}
1357
1358      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1359    }
1360
1361  /* We can do this with a gen_lowpart if both desired and current modes
1362     are integer, and this is either a constant integer, a register, or a
1363     non-volatile MEM.  Except for the constant case where MODE is no
1364     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1365
1366  if ((GET_CODE (x) == CONST_INT
1367       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1368      || (GET_MODE_CLASS (mode) == MODE_INT
1369	  && GET_MODE_CLASS (oldmode) == MODE_INT
1370	  && (GET_CODE (x) == CONST_DOUBLE
1371	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1372		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1373		       && direct_load[(int) mode])
1374		      || (GET_CODE (x) == REG
1375			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1376						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1377    {
1378      /* ?? If we don't know OLDMODE, we have to assume here that
1379	 X does not need sign- or zero-extension.   This may not be
1380	 the case, but it's the best we can do.  */
1381      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1382	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1383	{
1384	  HOST_WIDE_INT val = INTVAL (x);
1385	  int width = GET_MODE_BITSIZE (oldmode);
1386
1387	  /* We must sign or zero-extend in this case.  Start by
1388	     zero-extending, then sign extend if we need to.  */
1389	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1390	  if (! unsignedp
1391	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1392	    val |= (HOST_WIDE_INT) (-1) << width;
1393
1394	  return GEN_INT (val);
1395	}
1396
1397      return gen_lowpart (mode, x);
1398    }
1399
1400  temp = gen_reg_rtx (mode);
1401  convert_move (temp, x, unsignedp);
1402  return temp;
1403}
1404
1405
1406/* This macro is used to determine what the largest unit size that
1407   move_by_pieces can use is. */
1408
1409/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1410   move efficiently, as opposed to  MOVE_MAX which is the maximum
1411   number of bhytes we can move with a single instruction. */
1412
1413#ifndef MOVE_MAX_PIECES
1414#define MOVE_MAX_PIECES   MOVE_MAX
1415#endif
1416
1417/* Generate several move instructions to copy LEN bytes
1418   from block FROM to block TO.  (These are MEM rtx's with BLKmode).
1419   The caller must pass FROM and TO
1420    through protect_from_queue before calling.
1421   ALIGN (in bytes) is maximum alignment we can assume.  */
1422
1423void
1424move_by_pieces (to, from, len, align)
1425     rtx to, from;
1426     int len, align;
1427{
1428  struct move_by_pieces data;
1429  rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1430  int max_size = MOVE_MAX_PIECES + 1;
1431  enum machine_mode mode = VOIDmode, tmode;
1432  enum insn_code icode;
1433
1434  data.offset = 0;
1435  data.to_addr = to_addr;
1436  data.from_addr = from_addr;
1437  data.to = to;
1438  data.from = from;
1439  data.autinc_to
1440    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1441       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1442  data.autinc_from
1443    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1444       || GET_CODE (from_addr) == POST_INC
1445       || GET_CODE (from_addr) == POST_DEC);
1446
1447  data.explicit_inc_from = 0;
1448  data.explicit_inc_to = 0;
1449  data.reverse
1450    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1451  if (data.reverse) data.offset = len;
1452  data.len = len;
1453
1454  data.to_struct = MEM_IN_STRUCT_P (to);
1455  data.from_struct = MEM_IN_STRUCT_P (from);
1456
1457  /* If copying requires more than two move insns,
1458     copy addresses to registers (to make displacements shorter)
1459     and use post-increment if available.  */
1460  if (!(data.autinc_from && data.autinc_to)
1461      && move_by_pieces_ninsns (len, align) > 2)
1462    {
1463      /* Find the mode of the largest move... */
1464      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1465	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1466	if (GET_MODE_SIZE (tmode) < max_size)
1467	  mode = tmode;
1468
1469      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1470	{
1471	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1472	  data.autinc_from = 1;
1473	  data.explicit_inc_from = -1;
1474	}
1475      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1476	{
1477	  data.from_addr = copy_addr_to_reg (from_addr);
1478	  data.autinc_from = 1;
1479	  data.explicit_inc_from = 1;
1480	}
1481      if (!data.autinc_from && CONSTANT_P (from_addr))
1482	data.from_addr = copy_addr_to_reg (from_addr);
1483      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1484	{
1485	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1486	  data.autinc_to = 1;
1487	  data.explicit_inc_to = -1;
1488	}
1489      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1490	{
1491	  data.to_addr = copy_addr_to_reg (to_addr);
1492	  data.autinc_to = 1;
1493	  data.explicit_inc_to = 1;
1494	}
1495      if (!data.autinc_to && CONSTANT_P (to_addr))
1496	data.to_addr = copy_addr_to_reg (to_addr);
1497    }
1498
1499  if (! SLOW_UNALIGNED_ACCESS
1500      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1501    align = MOVE_MAX;
1502
1503  /* First move what we can in the largest integer mode, then go to
1504     successively smaller modes.  */
1505
1506  while (max_size > 1)
1507    {
1508      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510	if (GET_MODE_SIZE (tmode) < max_size)
1511	  mode = tmode;
1512
1513      if (mode == VOIDmode)
1514	break;
1515
1516      icode = mov_optab->handlers[(int) mode].insn_code;
1517      if (icode != CODE_FOR_nothing
1518	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1519			   GET_MODE_SIZE (mode)))
1520	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1521
1522      max_size = GET_MODE_SIZE (mode);
1523    }
1524
1525  /* The code above should have handled everything.  */
1526  if (data.len > 0)
1527    abort ();
1528}
1529
1530/* Return number of insns required to move L bytes by pieces.
1531   ALIGN (in bytes) is maximum alignment we can assume.  */
1532
1533static int
1534move_by_pieces_ninsns (l, align)
1535     unsigned int l;
1536     int align;
1537{
1538  register int n_insns = 0;
1539  int max_size = MOVE_MAX + 1;
1540
1541  if (! SLOW_UNALIGNED_ACCESS
1542      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1543    align = MOVE_MAX;
1544
1545  while (max_size > 1)
1546    {
1547      enum machine_mode mode = VOIDmode, tmode;
1548      enum insn_code icode;
1549
1550      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1551	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1552	if (GET_MODE_SIZE (tmode) < max_size)
1553	  mode = tmode;
1554
1555      if (mode == VOIDmode)
1556	break;
1557
1558      icode = mov_optab->handlers[(int) mode].insn_code;
1559      if (icode != CODE_FOR_nothing
1560	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1561			   GET_MODE_SIZE (mode)))
1562	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1563
1564      max_size = GET_MODE_SIZE (mode);
1565    }
1566
1567  return n_insns;
1568}
1569
1570/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1571   with move instructions for mode MODE.  GENFUN is the gen_... function
1572   to make a move insn for that mode.  DATA has all the other info.  */
1573
1574static void
1575move_by_pieces_1 (genfun, mode, data)
1576     rtx (*genfun) PROTO ((rtx, ...));
1577     enum machine_mode mode;
1578     struct move_by_pieces *data;
1579{
1580  register int size = GET_MODE_SIZE (mode);
1581  register rtx to1, from1;
1582
1583  while (data->len >= size)
1584    {
1585      if (data->reverse) data->offset -= size;
1586
1587      to1 = (data->autinc_to
1588	     ? gen_rtx_MEM (mode, data->to_addr)
1589	     : copy_rtx (change_address (data->to, mode,
1590					 plus_constant (data->to_addr,
1591							data->offset))));
1592      MEM_IN_STRUCT_P (to1) = data->to_struct;
1593
1594      from1
1595	= (data->autinc_from
1596	   ? gen_rtx_MEM (mode, data->from_addr)
1597	   : copy_rtx (change_address (data->from, mode,
1598				       plus_constant (data->from_addr,
1599						      data->offset))));
1600      MEM_IN_STRUCT_P (from1) = data->from_struct;
1601
1602      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1603	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1604      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1605	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1606
1607      emit_insn ((*genfun) (to1, from1));
1608      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1609	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1610      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1611	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1612
1613      if (! data->reverse) data->offset += size;
1614
1615      data->len -= size;
1616    }
1617}
1618
1619/* Emit code to move a block Y to a block X.
1620   This may be done with string-move instructions,
1621   with multiple scalar move instructions, or with a library call.
1622
1623   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1624   with mode BLKmode.
1625   SIZE is an rtx that says how long they are.
1626   ALIGN is the maximum alignment we can assume they have,
1627   measured in bytes.
1628
1629   Return the address of the new block, if memcpy is called and returns it,
1630   0 otherwise.  */
1631
1632rtx
1633emit_block_move (x, y, size, align)
1634     rtx x, y;
1635     rtx size;
1636     int align;
1637{
1638  rtx retval = 0;
1639#ifdef TARGET_MEM_FUNCTIONS
1640  static tree fn;
1641  tree call_expr, arg_list;
1642#endif
1643
1644  if (GET_MODE (x) != BLKmode)
1645    abort ();
1646
1647  if (GET_MODE (y) != BLKmode)
1648    abort ();
1649
1650  x = protect_from_queue (x, 1);
1651  y = protect_from_queue (y, 0);
1652  size = protect_from_queue (size, 0);
1653
1654  if (GET_CODE (x) != MEM)
1655    abort ();
1656  if (GET_CODE (y) != MEM)
1657    abort ();
1658  if (size == 0)
1659    abort ();
1660
1661  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1662    move_by_pieces (x, y, INTVAL (size), align);
1663  else
1664    {
1665      /* Try the most limited insn first, because there's no point
1666	 including more than one in the machine description unless
1667	 the more limited one has some advantage.  */
1668
1669      rtx opalign = GEN_INT (align);
1670      enum machine_mode mode;
1671
1672      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1673	   mode = GET_MODE_WIDER_MODE (mode))
1674	{
1675	  enum insn_code code = movstr_optab[(int) mode];
1676
1677	  if (code != CODE_FOR_nothing
1678	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1679		 here because if SIZE is less than the mode mask, as it is
1680		 returned by the macro, it will definitely be less than the
1681		 actual mode mask.  */
1682	      && ((GET_CODE (size) == CONST_INT
1683		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1684		       <= (GET_MODE_MASK (mode) >> 1)))
1685		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1686	      && (insn_operand_predicate[(int) code][0] == 0
1687		  || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1688	      && (insn_operand_predicate[(int) code][1] == 0
1689		  || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1690	      && (insn_operand_predicate[(int) code][3] == 0
1691		  || (*insn_operand_predicate[(int) code][3]) (opalign,
1692							       VOIDmode)))
1693	    {
1694	      rtx op2;
1695	      rtx last = get_last_insn ();
1696	      rtx pat;
1697
1698	      op2 = convert_to_mode (mode, size, 1);
1699	      if (insn_operand_predicate[(int) code][2] != 0
1700		  && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1701		op2 = copy_to_mode_reg (mode, op2);
1702
1703	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1704	      if (pat)
1705		{
1706		  emit_insn (pat);
1707		  return 0;
1708		}
1709	      else
1710		delete_insns_since (last);
1711	    }
1712	}
1713
1714      /* X, Y, or SIZE may have been passed through protect_from_queue.
1715
1716	 It is unsafe to save the value generated by protect_from_queue
1717	 and reuse it later.  Consider what happens if emit_queue is
1718	 called before the return value from protect_from_queue is used.
1719
1720	 Expansion of the CALL_EXPR below will call emit_queue before
1721	 we are finished emitting RTL for argument setup.  So if we are
1722	 not careful we could get the wrong value for an argument.
1723
1724	 To avoid this problem we go ahead and emit code to copy X, Y &
1725	 SIZE into new pseudos.  We can then place those new pseudos
1726	 into an RTL_EXPR and use them later, even after a call to
1727	 emit_queue.
1728
1729	 Note this is not strictly needed for library calls since they
1730	 do not call emit_queue before loading their arguments.  However,
1731	 we may need to have library calls call emit_queue in the future
1732	 since failing to do so could cause problems for targets which
1733	 define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
1734      x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1735      y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1736
1737#ifdef TARGET_MEM_FUNCTIONS
1738      size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1739#else
1740      size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1741			      TREE_UNSIGNED (integer_type_node));
1742      size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1743#endif
1744
1745#ifdef TARGET_MEM_FUNCTIONS
1746      /* It is incorrect to use the libcall calling conventions to call
1747	 memcpy in this context.
1748
1749	 This could be a user call to memcpy and the user may wish to
1750	 examine the return value from memcpy.
1751
1752	 For targets where libcalls and normal calls have different conventions
1753	 for returning pointers, we could end up generating incorrect code.
1754
1755	 So instead of using a libcall sequence we build up a suitable
1756	 CALL_EXPR and expand the call in the normal fashion.  */
1757      if (fn == NULL_TREE)
1758	{
1759	  tree fntype;
1760
1761	  /* This was copied from except.c, I don't know if all this is
1762	     necessary in this context or not.  */
1763	  fn = get_identifier ("memcpy");
1764	  push_obstacks_nochange ();
1765	  end_temporary_allocation ();
1766	  fntype = build_pointer_type (void_type_node);
1767	  fntype = build_function_type (fntype, NULL_TREE);
1768	  fn = build_decl (FUNCTION_DECL, fn, fntype);
1769	  DECL_EXTERNAL (fn) = 1;
1770	  TREE_PUBLIC (fn) = 1;
1771	  DECL_ARTIFICIAL (fn) = 1;
1772	  make_decl_rtl (fn, NULL_PTR, 1);
1773	  assemble_external (fn);
1774	  pop_obstacks ();
1775	}
1776
1777      /* We need to make an argument list for the function call.
1778
1779	 memcpy has three arguments, the first two are void * addresses and
1780	 the last is a size_t byte count for the copy.  */
1781      arg_list
1782	= build_tree_list (NULL_TREE,
1783			   make_tree (build_pointer_type (void_type_node), x));
1784      TREE_CHAIN (arg_list)
1785	= build_tree_list (NULL_TREE,
1786			   make_tree (build_pointer_type (void_type_node), y));
1787      TREE_CHAIN (TREE_CHAIN (arg_list))
1788	 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1789      TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1790
1791      /* Now we have to build up the CALL_EXPR itself.  */
1792      call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1793      call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1794			 call_expr, arg_list, NULL_TREE);
1795      TREE_SIDE_EFFECTS (call_expr) = 1;
1796
1797      retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1798#else
1799      emit_library_call (bcopy_libfunc, 0,
1800			 VOIDmode, 3, y, Pmode, x, Pmode,
1801			 convert_to_mode (TYPE_MODE (integer_type_node), size,
1802					  TREE_UNSIGNED (integer_type_node)),
1803			 TYPE_MODE (integer_type_node));
1804#endif
1805    }
1806
1807  return retval;
1808}
1809
1810/* Copy all or part of a value X into registers starting at REGNO.
1811   The number of registers to be filled is NREGS.  */
1812
1813void
1814move_block_to_reg (regno, x, nregs, mode)
1815     int regno;
1816     rtx x;
1817     int nregs;
1818     enum machine_mode mode;
1819{
1820  int i;
1821#ifdef HAVE_load_multiple
1822  rtx pat;
1823  rtx last;
1824#endif
1825
1826  if (nregs == 0)
1827    return;
1828
1829  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1830    x = validize_mem (force_const_mem (mode, x));
1831
1832  /* See if the machine can do this with a load multiple insn.  */
1833#ifdef HAVE_load_multiple
1834  if (HAVE_load_multiple)
1835    {
1836      last = get_last_insn ();
1837      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1838			       GEN_INT (nregs));
1839      if (pat)
1840	{
1841	  emit_insn (pat);
1842	  return;
1843	}
1844      else
1845	delete_insns_since (last);
1846    }
1847#endif
1848
1849  for (i = 0; i < nregs; i++)
1850    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1851		    operand_subword_force (x, i, mode));
1852}
1853
1854/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1855   The number of registers to be filled is NREGS.  SIZE indicates the number
1856   of bytes in the object X.  */
1857
1858
1859void
1860move_block_from_reg (regno, x, nregs, size)
1861     int regno;
1862     rtx x;
1863     int nregs;
1864     int size;
1865{
1866  int i;
1867#ifdef HAVE_store_multiple
1868  rtx pat;
1869  rtx last;
1870#endif
1871  enum machine_mode mode;
1872
1873  /* If SIZE is that of a mode no bigger than a word, just use that
1874     mode's store operation.  */
1875  if (size <= UNITS_PER_WORD
1876      && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1877    {
1878      emit_move_insn (change_address (x, mode, NULL),
1879		      gen_rtx_REG (mode, regno));
1880      return;
1881    }
1882
1883  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884     to the left before storing to memory.  Note that the previous test
1885     doesn't handle all cases (e.g. SIZE == 3).  */
1886  if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1887    {
1888      rtx tem = operand_subword (x, 0, 1, BLKmode);
1889      rtx shift;
1890
1891      if (tem == 0)
1892	abort ();
1893
1894      shift = expand_shift (LSHIFT_EXPR, word_mode,
1895			    gen_rtx_REG (word_mode, regno),
1896			    build_int_2 ((UNITS_PER_WORD - size)
1897					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1898      emit_move_insn (tem, shift);
1899      return;
1900    }
1901
1902  /* See if the machine can do this with a store multiple insn.  */
1903#ifdef HAVE_store_multiple
1904  if (HAVE_store_multiple)
1905    {
1906      last = get_last_insn ();
1907      pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1908				GEN_INT (nregs));
1909      if (pat)
1910	{
1911	  emit_insn (pat);
1912	  return;
1913	}
1914      else
1915	delete_insns_since (last);
1916    }
1917#endif
1918
1919  for (i = 0; i < nregs; i++)
1920    {
1921      rtx tem = operand_subword (x, i, 1, BLKmode);
1922
1923      if (tem == 0)
1924	abort ();
1925
1926      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1927    }
1928}
1929
1930/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1931   registers represented by a PARALLEL.  SSIZE represents the total size of
1932   block SRC in bytes, or -1 if not known.  ALIGN is the known alignment of
1933   SRC in bits.  */
1934/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1935   the balance will be in what would be the low-order memory addresses, i.e.
1936   left justified for big endian, right justified for little endian.  This
1937   happens to be true for the targets currently using this support.  If this
1938   ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1939   would be needed.  */
1940
1941void
1942emit_group_load (dst, orig_src, ssize, align)
1943     rtx dst, orig_src;
1944     int align, ssize;
1945{
1946  rtx *tmps, src;
1947  int start, i;
1948
1949  if (GET_CODE (dst) != PARALLEL)
1950    abort ();
1951
1952  /* Check for a NULL entry, used to indicate that the parameter goes
1953     both on the stack and in registers.  */
1954  if (XEXP (XVECEXP (dst, 0, 0), 0))
1955    start = 0;
1956  else
1957    start = 1;
1958
1959  tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1960
1961  /* If we won't be loading directly from memory, protect the real source
1962     from strange tricks we might play.  */
1963  src = orig_src;
1964  if (GET_CODE (src) != MEM)
1965    {
1966      src = gen_reg_rtx (GET_MODE (orig_src));
1967      emit_move_insn (src, orig_src);
1968    }
1969
1970  /* Process the pieces.  */
1971  for (i = start; i < XVECLEN (dst, 0); i++)
1972    {
1973      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1974      int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1975      int bytelen = GET_MODE_SIZE (mode);
1976      int shift = 0;
1977
1978      /* Handle trailing fragments that run over the size of the struct.  */
1979      if (ssize >= 0 && bytepos + bytelen > ssize)
1980	{
1981	  shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1982	  bytelen = ssize - bytepos;
1983	  if (bytelen <= 0)
1984	    abort();
1985	}
1986
1987      /* Optimize the access just a bit.  */
1988      if (GET_CODE (src) == MEM
1989	  && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1990	  && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1991	  && bytelen == GET_MODE_SIZE (mode))
1992	{
1993	  tmps[i] = gen_reg_rtx (mode);
1994	  emit_move_insn (tmps[i],
1995			  change_address (src, mode,
1996					  plus_constant (XEXP (src, 0),
1997							 bytepos)));
1998	}
1999      else
2000	{
2001	  tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
2002				       bytepos*BITS_PER_UNIT, 1, NULL_RTX,
2003				       mode, mode, align, ssize);
2004	}
2005
2006      if (BYTES_BIG_ENDIAN && shift)
2007	{
2008	  expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2009			tmps[i], 0, OPTAB_WIDEN);
2010	}
2011    }
2012  emit_queue();
2013
2014  /* Copy the extracted pieces into the proper (probable) hard regs.  */
2015  for (i = start; i < XVECLEN (dst, 0); i++)
2016    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2017}
2018
2019/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2020   registers represented by a PARALLEL.  SSIZE represents the total size of
2021   block DST, or -1 if not known.  ALIGN is the known alignment of DST.  */
2022
2023void
2024emit_group_store (orig_dst, src, ssize, align)
2025     rtx orig_dst, src;
2026     int ssize, align;
2027{
2028  rtx *tmps, dst;
2029  int start, i;
2030
2031  if (GET_CODE (src) != PARALLEL)
2032    abort ();
2033
2034  /* Check for a NULL entry, used to indicate that the parameter goes
2035     both on the stack and in registers.  */
2036  if (XEXP (XVECEXP (src, 0, 0), 0))
2037    start = 0;
2038  else
2039    start = 1;
2040
2041  tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2042
2043  /* Copy the (probable) hard regs into pseudos.  */
2044  for (i = start; i < XVECLEN (src, 0); i++)
2045    {
2046      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2047      tmps[i] = gen_reg_rtx (GET_MODE (reg));
2048      emit_move_insn (tmps[i], reg);
2049    }
2050  emit_queue();
2051
2052  /* If we won't be storing directly into memory, protect the real destination
2053     from strange tricks we might play.  */
2054  dst = orig_dst;
2055  if (GET_CODE (dst) == PARALLEL)
2056    {
2057      rtx temp;
2058
2059      /* We can get a PARALLEL dst if there is a conditional expression in
2060	 a return statement.  In that case, the dst and src are the same,
2061	 so no action is necessary.  */
2062      if (rtx_equal_p (dst, src))
2063	return;
2064
2065      /* It is unclear if we can ever reach here, but we may as well handle
2066	 it.  Allocate a temporary, and split this into a store/load to/from
2067	 the temporary.  */
2068
2069      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2070      emit_group_store (temp, src, ssize, align);
2071      emit_group_load (dst, temp, ssize, align);
2072      return;
2073    }
2074  else if (GET_CODE (dst) != MEM)
2075    {
2076      dst = gen_reg_rtx (GET_MODE (orig_dst));
2077      /* Make life a bit easier for combine.  */
2078      emit_move_insn (dst, const0_rtx);
2079    }
2080  else if (! MEM_IN_STRUCT_P (dst))
2081    {
2082      /* store_bit_field requires that memory operations have
2083	 mem_in_struct_p set; we might not.  */
2084
2085      dst = copy_rtx (orig_dst);
2086      MEM_SET_IN_STRUCT_P (dst, 1);
2087    }
2088
2089  /* Process the pieces.  */
2090  for (i = start; i < XVECLEN (src, 0); i++)
2091    {
2092      int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2093      enum machine_mode mode = GET_MODE (tmps[i]);
2094      int bytelen = GET_MODE_SIZE (mode);
2095
2096      /* Handle trailing fragments that run over the size of the struct.  */
2097      if (ssize >= 0 && bytepos + bytelen > ssize)
2098	{
2099	  if (BYTES_BIG_ENDIAN)
2100	    {
2101	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2102	      expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2103			    tmps[i], 0, OPTAB_WIDEN);
2104	    }
2105	  bytelen = ssize - bytepos;
2106	}
2107
2108      /* Optimize the access just a bit.  */
2109      if (GET_CODE (dst) == MEM
2110	  && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2111	  && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2112	  && bytelen == GET_MODE_SIZE (mode))
2113	{
2114	  emit_move_insn (change_address (dst, mode,
2115					  plus_constant (XEXP (dst, 0),
2116							 bytepos)),
2117			  tmps[i]);
2118	}
2119      else
2120	{
2121	  store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2122			   mode, tmps[i], align, ssize);
2123	}
2124    }
2125  emit_queue();
2126
2127  /* Copy from the pseudo into the (probable) hard reg.  */
2128  if (GET_CODE (dst) == REG)
2129    emit_move_insn (orig_dst, dst);
2130}
2131
2132/* Generate code to copy a BLKmode object of TYPE out of a
2133   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2134   is null, a stack temporary is created.  TGTBLK is returned.
2135
2136   The primary purpose of this routine is to handle functions
2137   that return BLKmode structures in registers.  Some machines
2138   (the PA for example) want to return all small structures
2139   in registers regardless of the structure's alignment.
2140  */
2141
2142rtx
2143copy_blkmode_from_reg(tgtblk,srcreg,type)
2144     rtx tgtblk;
2145     rtx srcreg;
2146     tree type;
2147{
2148      int bytes = int_size_in_bytes (type);
2149      rtx src = NULL, dst = NULL;
2150      int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2151      int bitpos, xbitpos, big_endian_correction = 0;
2152
2153      if (tgtblk == 0)
2154	{
2155	  tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2156	  MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2157	  preserve_temp_slots (tgtblk);
2158	}
2159
2160      /* This code assumes srcreg is at least a full word.  If it isn't,
2161	 copy it into a new pseudo which is a full word.  */
2162      if (GET_MODE (srcreg) != BLKmode
2163	  && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2164	srcreg = convert_to_mode (word_mode, srcreg,
2165				  TREE_UNSIGNED (type));
2166
2167      /* Structures whose size is not a multiple of a word are aligned
2168	 to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2169	 machine, this means we must skip the empty high order bytes when
2170	 calculating the bit offset.  */
2171      if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2172	big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2173						  * BITS_PER_UNIT));
2174
2175      /* Copy the structure BITSIZE bites at a time.
2176
2177	 We could probably emit more efficient code for machines
2178	 which do not use strict alignment, but it doesn't seem
2179	 worth the effort at the current time.  */
2180      for (bitpos = 0, xbitpos = big_endian_correction;
2181	   bitpos < bytes * BITS_PER_UNIT;
2182	   bitpos += bitsize, xbitpos += bitsize)
2183	{
2184
2185	  /* We need a new source operand each time xbitpos is on a
2186	     word boundary and when xbitpos == big_endian_correction
2187	     (the first time through).  */
2188	  if (xbitpos % BITS_PER_WORD == 0
2189	      || xbitpos == big_endian_correction)
2190	    src = operand_subword_force (srcreg,
2191					 xbitpos / BITS_PER_WORD,
2192					 BLKmode);
2193
2194	  /* We need a new destination operand each time bitpos is on
2195	     a word boundary.  */
2196	  if (bitpos % BITS_PER_WORD == 0)
2197	    dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2198
2199	  /* Use xbitpos for the source extraction (right justified) and
2200	     xbitpos for the destination store (left justified).  */
2201	  store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2202			   extract_bit_field (src, bitsize,
2203					      xbitpos % BITS_PER_WORD, 1,
2204					      NULL_RTX, word_mode,
2205					      word_mode,
2206					      bitsize / BITS_PER_UNIT,
2207					      BITS_PER_WORD),
2208			   bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2209	}
2210      return tgtblk;
2211}
2212
2213
2214/* Add a USE expression for REG to the (possibly empty) list pointed
2215   to by CALL_FUSAGE.  REG must denote a hard register.  */
2216
2217void
2218use_reg (call_fusage, reg)
2219     rtx *call_fusage, reg;
2220{
2221  if (GET_CODE (reg) != REG
2222      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2223    abort();
2224
2225  *call_fusage
2226    = gen_rtx_EXPR_LIST (VOIDmode,
2227			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2228}
2229
2230/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2231   starting at REGNO.  All of these registers must be hard registers.  */
2232
2233void
2234use_regs (call_fusage, regno, nregs)
2235     rtx *call_fusage;
2236     int regno;
2237     int nregs;
2238{
2239  int i;
2240
2241  if (regno + nregs > FIRST_PSEUDO_REGISTER)
2242    abort ();
2243
2244  for (i = 0; i < nregs; i++)
2245    use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2246}
2247
2248/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2249   PARALLEL REGS.  This is for calls that pass values in multiple
2250   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2251
2252void
2253use_group_regs (call_fusage, regs)
2254     rtx *call_fusage;
2255     rtx regs;
2256{
2257  int i;
2258
2259  for (i = 0; i < XVECLEN (regs, 0); i++)
2260    {
2261      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2262
2263      /* A NULL entry means the parameter goes both on the stack and in
2264	 registers.  This can also be a MEM for targets that pass values
2265	 partially on the stack and partially in registers.  */
2266      if (reg != 0 && GET_CODE (reg) == REG)
2267	use_reg (call_fusage, reg);
2268    }
2269}
2270
2271/* Generate several move instructions to clear LEN bytes of block TO.
2272   (A MEM rtx with BLKmode).   The caller must pass TO through
2273   protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2274   we can assume.  */
2275
2276static void
2277clear_by_pieces (to, len, align)
2278     rtx to;
2279     int len, align;
2280{
2281  struct clear_by_pieces data;
2282  rtx to_addr = XEXP (to, 0);
2283  int max_size = MOVE_MAX_PIECES + 1;
2284  enum machine_mode mode = VOIDmode, tmode;
2285  enum insn_code icode;
2286
2287  data.offset = 0;
2288  data.to_addr = to_addr;
2289  data.to = to;
2290  data.autinc_to
2291    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2292       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2293
2294  data.explicit_inc_to = 0;
2295  data.reverse
2296    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2297  if (data.reverse) data.offset = len;
2298  data.len = len;
2299
2300  data.to_struct = MEM_IN_STRUCT_P (to);
2301
2302  /* If copying requires more than two move insns,
2303     copy addresses to registers (to make displacements shorter)
2304     and use post-increment if available.  */
2305  if (!data.autinc_to
2306      && move_by_pieces_ninsns (len, align) > 2)
2307    {
2308      /* Determine the main mode we'll be using */
2309      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2310	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2311	if (GET_MODE_SIZE (tmode) < max_size)
2312	  mode = tmode;
2313
2314      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2315	{
2316	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2317	  data.autinc_to = 1;
2318	  data.explicit_inc_to = -1;
2319	}
2320      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2321	{
2322	  data.to_addr = copy_addr_to_reg (to_addr);
2323	  data.autinc_to = 1;
2324	  data.explicit_inc_to = 1;
2325	}
2326      if (!data.autinc_to && CONSTANT_P (to_addr))
2327	data.to_addr = copy_addr_to_reg (to_addr);
2328    }
2329
2330  if (! SLOW_UNALIGNED_ACCESS
2331      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2332    align = MOVE_MAX;
2333
2334  /* First move what we can in the largest integer mode, then go to
2335     successively smaller modes.  */
2336
2337  while (max_size > 1)
2338    {
2339      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2340	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2341	if (GET_MODE_SIZE (tmode) < max_size)
2342	  mode = tmode;
2343
2344      if (mode == VOIDmode)
2345	break;
2346
2347      icode = mov_optab->handlers[(int) mode].insn_code;
2348      if (icode != CODE_FOR_nothing
2349	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2350			   GET_MODE_SIZE (mode)))
2351	clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2352
2353      max_size = GET_MODE_SIZE (mode);
2354    }
2355
2356  /* The code above should have handled everything.  */
2357  if (data.len != 0)
2358    abort ();
2359}
2360
2361/* Subroutine of clear_by_pieces.  Clear as many bytes as appropriate
2362   with move instructions for mode MODE.  GENFUN is the gen_... function
2363   to make a move insn for that mode.  DATA has all the other info.  */
2364
2365static void
2366clear_by_pieces_1 (genfun, mode, data)
2367     rtx (*genfun) PROTO ((rtx, ...));
2368     enum machine_mode mode;
2369     struct clear_by_pieces *data;
2370{
2371  register int size = GET_MODE_SIZE (mode);
2372  register rtx to1;
2373
2374  while (data->len >= size)
2375    {
2376      if (data->reverse) data->offset -= size;
2377
2378      to1 = (data->autinc_to
2379	     ? gen_rtx_MEM (mode, data->to_addr)
2380	     : copy_rtx (change_address (data->to, mode,
2381					 plus_constant (data->to_addr,
2382							data->offset))));
2383      MEM_IN_STRUCT_P (to1) = data->to_struct;
2384
2385      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2386	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2387
2388      emit_insn ((*genfun) (to1, const0_rtx));
2389      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2390	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2391
2392      if (! data->reverse) data->offset += size;
2393
2394      data->len -= size;
2395    }
2396}
2397
2398/* Write zeros through the storage of OBJECT.
2399   If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2400   the maximum alignment we can is has, measured in bytes.
2401
2402   If we call a function that returns the length of the block, return it.  */
2403
2404rtx
2405clear_storage (object, size, align)
2406     rtx object;
2407     rtx size;
2408     int align;
2409{
2410#ifdef TARGET_MEM_FUNCTIONS
2411  static tree fn;
2412  tree call_expr, arg_list;
2413#endif
2414  rtx retval = 0;
2415
2416  if (GET_MODE (object) == BLKmode)
2417    {
2418      object = protect_from_queue (object, 1);
2419      size = protect_from_queue (size, 0);
2420
2421      if (GET_CODE (size) == CONST_INT
2422	  && MOVE_BY_PIECES_P (INTVAL (size), align))
2423	clear_by_pieces (object, INTVAL (size), align);
2424
2425      else
2426	{
2427	  /* Try the most limited insn first, because there's no point
2428	     including more than one in the machine description unless
2429	     the more limited one has some advantage.  */
2430
2431	  rtx opalign = GEN_INT (align);
2432	  enum machine_mode mode;
2433
2434	  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2435	       mode = GET_MODE_WIDER_MODE (mode))
2436	    {
2437	      enum insn_code code = clrstr_optab[(int) mode];
2438
2439	      if (code != CODE_FOR_nothing
2440		  /* We don't need MODE to be narrower than
2441		     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2442		     the mode mask, as it is returned by the macro, it will
2443		     definitely be less than the actual mode mask.  */
2444		  && ((GET_CODE (size) == CONST_INT
2445		       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2446			   <= (GET_MODE_MASK (mode) >> 1)))
2447		      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2448		  && (insn_operand_predicate[(int) code][0] == 0
2449		      || (*insn_operand_predicate[(int) code][0]) (object,
2450								   BLKmode))
2451		  && (insn_operand_predicate[(int) code][2] == 0
2452		      || (*insn_operand_predicate[(int) code][2]) (opalign,
2453								   VOIDmode)))
2454		{
2455		  rtx op1;
2456		  rtx last = get_last_insn ();
2457		  rtx pat;
2458
2459		  op1 = convert_to_mode (mode, size, 1);
2460		  if (insn_operand_predicate[(int) code][1] != 0
2461		      && ! (*insn_operand_predicate[(int) code][1]) (op1,
2462								     mode))
2463		    op1 = copy_to_mode_reg (mode, op1);
2464
2465		  pat = GEN_FCN ((int) code) (object, op1, opalign);
2466		  if (pat)
2467		    {
2468		      emit_insn (pat);
2469		      return 0;
2470		    }
2471		  else
2472		    delete_insns_since (last);
2473		}
2474	    }
2475
2476	  /* OBJECT or SIZE may have been passed through protect_from_queue.
2477
2478	     It is unsafe to save the value generated by protect_from_queue
2479	     and reuse it later.  Consider what happens if emit_queue is
2480	     called before the return value from protect_from_queue is used.
2481
2482	     Expansion of the CALL_EXPR below will call emit_queue before
2483	     we are finished emitting RTL for argument setup.  So if we are
2484	     not careful we could get the wrong value for an argument.
2485
2486	     To avoid this problem we go ahead and emit code to copy OBJECT
2487	     and SIZE into new pseudos.  We can then place those new pseudos
2488	     into an RTL_EXPR and use them later, even after a call to
2489	     emit_queue.
2490
2491	     Note this is not strictly needed for library calls since they
2492	     do not call emit_queue before loading their arguments.  However,
2493	     we may need to have library calls call emit_queue in the future
2494	     since failing to do so could cause problems for targets which
2495	     define SMALL_REGISTER_CLASSES and pass arguments in registers.  */
2496	  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2497
2498#ifdef TARGET_MEM_FUNCTIONS
2499	  size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2500#else
2501	  size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2502				  TREE_UNSIGNED (integer_type_node));
2503	  size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2504#endif
2505
2506
2507#ifdef TARGET_MEM_FUNCTIONS
2508	  /* It is incorrect to use the libcall calling conventions to call
2509	     memset in this context.
2510
2511	     This could be a user call to memset and the user may wish to
2512	     examine the return value from memset.
2513
2514	     For targets where libcalls and normal calls have different
2515	     conventions for returning pointers, we could end up generating
2516	      incorrect code.
2517
2518	     So instead of using a libcall sequence we build up a suitable
2519	     CALL_EXPR and expand the call in the normal fashion.  */
2520	  if (fn == NULL_TREE)
2521	    {
2522	      tree fntype;
2523
2524	      /* This was copied from except.c, I don't know if all this is
2525		 necessary in this context or not.  */
2526	      fn = get_identifier ("memset");
2527	      push_obstacks_nochange ();
2528	      end_temporary_allocation ();
2529	      fntype = build_pointer_type (void_type_node);
2530	      fntype = build_function_type (fntype, NULL_TREE);
2531	      fn = build_decl (FUNCTION_DECL, fn, fntype);
2532	      DECL_EXTERNAL (fn) = 1;
2533	      TREE_PUBLIC (fn) = 1;
2534	      DECL_ARTIFICIAL (fn) = 1;
2535	      make_decl_rtl (fn, NULL_PTR, 1);
2536	      assemble_external (fn);
2537	      pop_obstacks ();
2538	    }
2539
2540	  /* We need to make an argument list for the function call.
2541
2542	     memset has three arguments, the first is a void * addresses, the
2543	     second a integer with the initialization value, the last is a
2544	     size_t byte count for the copy.  */
2545	  arg_list
2546	    = build_tree_list (NULL_TREE,
2547			       make_tree (build_pointer_type (void_type_node),
2548					  object));
2549	  TREE_CHAIN (arg_list)
2550	    = build_tree_list (NULL_TREE,
2551			        make_tree (integer_type_node, const0_rtx));
2552	  TREE_CHAIN (TREE_CHAIN (arg_list))
2553	    = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2554	  TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2555
2556	  /* Now we have to build up the CALL_EXPR itself.  */
2557	  call_expr = build1 (ADDR_EXPR,
2558			      build_pointer_type (TREE_TYPE (fn)), fn);
2559	  call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2560			     call_expr, arg_list, NULL_TREE);
2561	  TREE_SIDE_EFFECTS (call_expr) = 1;
2562
2563	  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2564#else
2565	  emit_library_call (bzero_libfunc, 0,
2566			     VOIDmode, 2, object, Pmode, size,
2567			     TYPE_MODE (integer_type_node));
2568#endif
2569	}
2570    }
2571  else
2572    emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2573
2574  return retval;
2575}
2576
2577/* Generate code to copy Y into X.
2578   Both Y and X must have the same mode, except that
2579   Y can be a constant with VOIDmode.
2580   This mode cannot be BLKmode; use emit_block_move for that.
2581
2582   Return the last instruction emitted.  */
2583
2584rtx
2585emit_move_insn (x, y)
2586     rtx x, y;
2587{
2588  enum machine_mode mode = GET_MODE (x);
2589
2590  x = protect_from_queue (x, 1);
2591  y = protect_from_queue (y, 0);
2592
2593  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2594    abort ();
2595
2596  /* Never force constant_p_rtx to memory.  */
2597  if (GET_CODE (y) == CONSTANT_P_RTX)
2598    ;
2599  else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2600    y = force_const_mem (mode, y);
2601
2602  /* If X or Y are memory references, verify that their addresses are valid
2603     for the machine.  */
2604  if (GET_CODE (x) == MEM
2605      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2606	   && ! push_operand (x, GET_MODE (x)))
2607	  || (flag_force_addr
2608	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2609    x = change_address (x, VOIDmode, XEXP (x, 0));
2610
2611  if (GET_CODE (y) == MEM
2612      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2613	  || (flag_force_addr
2614	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2615    y = change_address (y, VOIDmode, XEXP (y, 0));
2616
2617  if (mode == BLKmode)
2618    abort ();
2619
2620  return emit_move_insn_1 (x, y);
2621}
2622
2623/* Low level part of emit_move_insn.
2624   Called just like emit_move_insn, but assumes X and Y
2625   are basically valid.  */
2626
2627rtx
2628emit_move_insn_1 (x, y)
2629     rtx x, y;
2630{
2631  enum machine_mode mode = GET_MODE (x);
2632  enum machine_mode submode;
2633  enum mode_class class = GET_MODE_CLASS (mode);
2634  int i;
2635
2636  if (mode >= MAX_MACHINE_MODE)
2637      abort ();
2638
2639  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2640    return
2641      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2642
2643  /* Expand complex moves by moving real part and imag part, if possible.  */
2644  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2645	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2646						    * BITS_PER_UNIT),
2647						   (class == MODE_COMPLEX_INT
2648						    ? MODE_INT : MODE_FLOAT),
2649						   0))
2650	   && (mov_optab->handlers[(int) submode].insn_code
2651	       != CODE_FOR_nothing))
2652    {
2653      /* Don't split destination if it is a stack push.  */
2654      int stack = push_operand (x, GET_MODE (x));
2655
2656      /* If this is a stack, push the highpart first, so it
2657	 will be in the argument order.
2658
2659	 In that case, change_address is used only to convert
2660	 the mode, not to change the address.  */
2661      if (stack)
2662	{
2663	  /* Note that the real part always precedes the imag part in memory
2664	     regardless of machine's endianness.  */
2665#ifdef STACK_GROWS_DOWNWARD
2666	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2667		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2668		      gen_imagpart (submode, y)));
2669	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2670		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2671		      gen_realpart (submode, y)));
2672#else
2673	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2674		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2675		      gen_realpart (submode, y)));
2676	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2677		     (gen_rtx_MEM (submode, (XEXP (x, 0))),
2678		      gen_imagpart (submode, y)));
2679#endif
2680	}
2681      else
2682	{
2683	  rtx realpart_x, realpart_y;
2684	  rtx imagpart_x, imagpart_y;
2685
2686	  /* If this is a complex value with each part being smaller than a
2687	     word, the usual calling sequence will likely pack the pieces into
2688	     a single register.  Unfortunately, SUBREG of hard registers only
2689	     deals in terms of words, so we have a problem converting input
2690	     arguments to the CONCAT of two registers that is used elsewhere
2691	     for complex values.  If this is before reload, we can copy it into
2692	     memory and reload.  FIXME, we should see about using extract and
2693	     insert on integer registers, but complex short and complex char
2694	     variables should be rarely used.  */
2695	  if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2696	      && (reload_in_progress | reload_completed) == 0)
2697	    {
2698	      int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2699	      int packed_src_p  = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2700
2701	      if (packed_dest_p || packed_src_p)
2702		{
2703		  enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2704					       ? MODE_FLOAT : MODE_INT);
2705
2706		  enum machine_mode reg_mode =
2707		    mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2708
2709		  if (reg_mode != BLKmode)
2710		    {
2711		      rtx mem = assign_stack_temp (reg_mode,
2712						   GET_MODE_SIZE (mode), 0);
2713
2714		      rtx cmem = change_address (mem, mode, NULL_RTX);
2715
2716		      current_function_cannot_inline
2717			= "function using short complex types cannot be inline";
2718
2719		      if (packed_dest_p)
2720			{
2721			  rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2722			  emit_move_insn_1 (cmem, y);
2723			  return emit_move_insn_1 (sreg, mem);
2724			}
2725		      else
2726			{
2727			  rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2728			  emit_move_insn_1 (mem, sreg);
2729			  return emit_move_insn_1 (x, cmem);
2730			}
2731		    }
2732		}
2733	    }
2734
2735	  realpart_x = gen_realpart (submode, x);
2736	  realpart_y = gen_realpart (submode, y);
2737	  imagpart_x = gen_imagpart (submode, x);
2738	  imagpart_y = gen_imagpart (submode, y);
2739
2740	  /* Show the output dies here.  This is necessary for SUBREGs
2741	     of pseudos since we cannot track their lifetimes correctly;
2742	     hard regs shouldn't appear here except as return values.
2743	     We never want to emit such a clobber after reload.  */
2744	  if (x != y
2745	      && ! (reload_in_progress || reload_completed)
2746	      && (GET_CODE (realpart_x) == SUBREG
2747		  || GET_CODE (imagpart_x) == SUBREG))
2748	    {
2749	      emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2750	    }
2751
2752	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2753		     (realpart_x, realpart_y));
2754	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2755		     (imagpart_x, imagpart_y));
2756	}
2757
2758      return get_last_insn ();
2759    }
2760
2761  /* This will handle any multi-word mode that lacks a move_insn pattern.
2762     However, you will get better code if you define such patterns,
2763     even if they must turn into multiple assembler instructions.  */
2764  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2765    {
2766      rtx last_insn = 0;
2767      rtx seq;
2768      int need_clobber;
2769
2770#ifdef PUSH_ROUNDING
2771
2772      /* If X is a push on the stack, do the push now and replace
2773	 X with a reference to the stack pointer.  */
2774      if (push_operand (x, GET_MODE (x)))
2775	{
2776	  anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2777	  x = change_address (x, VOIDmode, stack_pointer_rtx);
2778	}
2779#endif
2780
2781      start_sequence ();
2782
2783      need_clobber = 0;
2784      for (i = 0;
2785	   i < (GET_MODE_SIZE (mode)  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2786	   i++)
2787	{
2788	  rtx xpart = operand_subword (x, i, 1, mode);
2789	  rtx ypart = operand_subword (y, i, 1, mode);
2790
2791	  /* If we can't get a part of Y, put Y into memory if it is a
2792	     constant.  Otherwise, force it into a register.  If we still
2793	     can't get a part of Y, abort.  */
2794	  if (ypart == 0 && CONSTANT_P (y))
2795	    {
2796	      y = force_const_mem (mode, y);
2797	      ypart = operand_subword (y, i, 1, mode);
2798	    }
2799	  else if (ypart == 0)
2800	    ypart = operand_subword_force (y, i, mode);
2801
2802	  if (xpart == 0 || ypart == 0)
2803	    abort ();
2804
2805	  need_clobber |= (GET_CODE (xpart) == SUBREG);
2806
2807	  last_insn = emit_move_insn (xpart, ypart);
2808	}
2809
2810      seq = gen_sequence ();
2811      end_sequence ();
2812
2813      /* Show the output dies here.  This is necessary for SUBREGs
2814	 of pseudos since we cannot track their lifetimes correctly;
2815	 hard regs shouldn't appear here except as return values.
2816	 We never want to emit such a clobber after reload.  */
2817      if (x != y
2818	  && ! (reload_in_progress || reload_completed)
2819	  && need_clobber != 0)
2820	{
2821	  emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2822	}
2823
2824      emit_insn (seq);
2825
2826      return last_insn;
2827    }
2828  else
2829    abort ();
2830}
2831
2832/* Pushing data onto the stack.  */
2833
2834/* Push a block of length SIZE (perhaps variable)
2835   and return an rtx to address the beginning of the block.
2836   Note that it is not possible for the value returned to be a QUEUED.
2837   The value may be virtual_outgoing_args_rtx.
2838
2839   EXTRA is the number of bytes of padding to push in addition to SIZE.
2840   BELOW nonzero means this padding comes at low addresses;
2841   otherwise, the padding comes at high addresses.  */
2842
2843rtx
2844push_block (size, extra, below)
2845     rtx size;
2846     int extra, below;
2847{
2848  register rtx temp;
2849
2850  size = convert_modes (Pmode, ptr_mode, size, 1);
2851  if (CONSTANT_P (size))
2852    anti_adjust_stack (plus_constant (size, extra));
2853  else if (GET_CODE (size) == REG && extra == 0)
2854    anti_adjust_stack (size);
2855  else
2856    {
2857      rtx temp = copy_to_mode_reg (Pmode, size);
2858      if (extra != 0)
2859	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2860			     temp, 0, OPTAB_LIB_WIDEN);
2861      anti_adjust_stack (temp);
2862    }
2863
2864#if defined (STACK_GROWS_DOWNWARD) \
2865    || (defined (ARGS_GROW_DOWNWARD) \
2866	&& !defined (ACCUMULATE_OUTGOING_ARGS))
2867
2868  /* Return the lowest stack address when STACK or ARGS grow downward and
2869     we are not aaccumulating outgoing arguments (the c4x port uses such
2870     conventions).  */
2871  temp = virtual_outgoing_args_rtx;
2872  if (extra != 0 && below)
2873    temp = plus_constant (temp, extra);
2874#else
2875  if (GET_CODE (size) == CONST_INT)
2876    temp = plus_constant (virtual_outgoing_args_rtx,
2877			  - INTVAL (size) - (below ? 0 : extra));
2878  else if (extra != 0 && !below)
2879    temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2880		    negate_rtx (Pmode, plus_constant (size, extra)));
2881  else
2882    temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2883		    negate_rtx (Pmode, size));
2884#endif
2885
2886  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2887}
2888
2889rtx
2890gen_push_operand ()
2891{
2892  return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2893}
2894
2895/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2896   block of SIZE bytes.  */
2897
2898static rtx
2899get_push_address (size)
2900	int size;
2901{
2902  register rtx temp;
2903
2904  if (STACK_PUSH_CODE == POST_DEC)
2905    temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2906  else if (STACK_PUSH_CODE == POST_INC)
2907    temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2908  else
2909    temp = stack_pointer_rtx;
2910
2911  return copy_to_reg (temp);
2912}
2913
2914/* Generate code to push X onto the stack, assuming it has mode MODE and
2915   type TYPE.
2916   MODE is redundant except when X is a CONST_INT (since they don't
2917   carry mode info).
2918   SIZE is an rtx for the size of data to be copied (in bytes),
2919   needed only if X is BLKmode.
2920
2921   ALIGN (in bytes) is maximum alignment we can assume.
2922
2923   If PARTIAL and REG are both nonzero, then copy that many of the first
2924   words of X into registers starting with REG, and push the rest of X.
2925   The amount of space pushed is decreased by PARTIAL words,
2926   rounded *down* to a multiple of PARM_BOUNDARY.
2927   REG must be a hard register in this case.
2928   If REG is zero but PARTIAL is not, take any all others actions for an
2929   argument partially in registers, but do not actually load any
2930   registers.
2931
2932   EXTRA is the amount in bytes of extra space to leave next to this arg.
2933   This is ignored if an argument block has already been allocated.
2934
2935   On a machine that lacks real push insns, ARGS_ADDR is the address of
2936   the bottom of the argument block for this call.  We use indexing off there
2937   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
2938   argument block has not been preallocated.
2939
2940   ARGS_SO_FAR is the size of args previously pushed for this call.
2941
2942   REG_PARM_STACK_SPACE is nonzero if functions require stack space
2943   for arguments passed in registers.  If nonzero, it will be the number
2944   of bytes required.  */
2945
2946void
2947emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2948		args_addr, args_so_far, reg_parm_stack_space)
2949     register rtx x;
2950     enum machine_mode mode;
2951     tree type;
2952     rtx size;
2953     int align;
2954     int partial;
2955     rtx reg;
2956     int extra;
2957     rtx args_addr;
2958     rtx args_so_far;
2959     int reg_parm_stack_space;
2960{
2961  rtx xinner;
2962  enum direction stack_direction
2963#ifdef STACK_GROWS_DOWNWARD
2964    = downward;
2965#else
2966    = upward;
2967#endif
2968
2969  /* Decide where to pad the argument: `downward' for below,
2970     `upward' for above, or `none' for don't pad it.
2971     Default is below for small data on big-endian machines; else above.  */
2972  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2973
2974  /* Invert direction if stack is post-update.  */
2975  if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2976    if (where_pad != none)
2977      where_pad = (where_pad == downward ? upward : downward);
2978
2979  xinner = x = protect_from_queue (x, 0);
2980
2981  if (mode == BLKmode)
2982    {
2983      /* Copy a block into the stack, entirely or partially.  */
2984
2985      register rtx temp;
2986      int used = partial * UNITS_PER_WORD;
2987      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2988      int skip;
2989
2990      if (size == 0)
2991	abort ();
2992
2993      used -= offset;
2994
2995      /* USED is now the # of bytes we need not copy to the stack
2996	 because registers will take care of them.  */
2997
2998      if (partial != 0)
2999	xinner = change_address (xinner, BLKmode,
3000				 plus_constant (XEXP (xinner, 0), used));
3001
3002      /* If the partial register-part of the arg counts in its stack size,
3003	 skip the part of stack space corresponding to the registers.
3004	 Otherwise, start copying to the beginning of the stack space,
3005	 by setting SKIP to 0.  */
3006      skip = (reg_parm_stack_space == 0) ? 0 : used;
3007
3008#ifdef PUSH_ROUNDING
3009      /* Do it with several push insns if that doesn't take lots of insns
3010	 and if there is no difficulty with push insns that skip bytes
3011	 on the stack for alignment purposes.  */
3012      if (args_addr == 0
3013	  && GET_CODE (size) == CONST_INT
3014	  && skip == 0
3015	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3016	  /* Here we avoid the case of a structure whose weak alignment
3017	     forces many pushes of a small amount of data,
3018	     and such small pushes do rounding that causes trouble.  */
3019	  && ((! SLOW_UNALIGNED_ACCESS)
3020	      || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
3021	      || PUSH_ROUNDING (align) == align)
3022	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3023	{
3024	  /* Push padding now if padding above and stack grows down,
3025	     or if padding below and stack grows up.
3026	     But if space already allocated, this has already been done.  */
3027	  if (extra && args_addr == 0
3028	      && where_pad != none && where_pad != stack_direction)
3029	    anti_adjust_stack (GEN_INT (extra));
3030
3031	  move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3032			  INTVAL (size) - used, align);
3033
3034	  if (current_function_check_memory_usage && ! in_check_memory_usage)
3035	    {
3036	      rtx temp;
3037
3038	      in_check_memory_usage = 1;
3039	      temp = get_push_address (INTVAL(size) - used);
3040	      if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3041		emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3042				   temp, Pmode,
3043				   XEXP (xinner, 0), Pmode,
3044				   GEN_INT (INTVAL(size) - used),
3045				   TYPE_MODE (sizetype));
3046	      else
3047		emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3048				   temp, Pmode,
3049			 	   GEN_INT (INTVAL(size) - used),
3050				   TYPE_MODE (sizetype),
3051				   GEN_INT (MEMORY_USE_RW),
3052				   TYPE_MODE (integer_type_node));
3053	      in_check_memory_usage = 0;
3054	    }
3055	}
3056      else
3057#endif /* PUSH_ROUNDING */
3058	{
3059	  /* Otherwise make space on the stack and copy the data
3060	     to the address of that space.  */
3061
3062	  /* Deduct words put into registers from the size we must copy.  */
3063	  if (partial != 0)
3064	    {
3065	      if (GET_CODE (size) == CONST_INT)
3066		size = GEN_INT (INTVAL (size) - used);
3067	      else
3068		size = expand_binop (GET_MODE (size), sub_optab, size,
3069				     GEN_INT (used), NULL_RTX, 0,
3070				     OPTAB_LIB_WIDEN);
3071	    }
3072
3073	  /* Get the address of the stack space.
3074	     In this case, we do not deal with EXTRA separately.
3075	     A single stack adjust will do.  */
3076	  if (! args_addr)
3077	    {
3078	      temp = push_block (size, extra, where_pad == downward);
3079	      extra = 0;
3080	    }
3081	  else if (GET_CODE (args_so_far) == CONST_INT)
3082	    temp = memory_address (BLKmode,
3083				   plus_constant (args_addr,
3084						  skip + INTVAL (args_so_far)));
3085	  else
3086	    temp = memory_address (BLKmode,
3087				   plus_constant (gen_rtx_PLUS (Pmode,
3088								args_addr,
3089								args_so_far),
3090						  skip));
3091	  if (current_function_check_memory_usage && ! in_check_memory_usage)
3092	    {
3093	      rtx target;
3094
3095	      in_check_memory_usage = 1;
3096	      target = copy_to_reg (temp);
3097	      if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3098		emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3099				   target, Pmode,
3100				   XEXP (xinner, 0), Pmode,
3101				   size, TYPE_MODE (sizetype));
3102	      else
3103	        emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3104				   target, Pmode,
3105			 	   size, TYPE_MODE (sizetype),
3106				   GEN_INT (MEMORY_USE_RW),
3107				   TYPE_MODE (integer_type_node));
3108	      in_check_memory_usage = 0;
3109	    }
3110
3111	  /* TEMP is the address of the block.  Copy the data there.  */
3112	  if (GET_CODE (size) == CONST_INT
3113	      && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3114	    {
3115	      move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3116			      INTVAL (size), align);
3117	      goto ret;
3118	    }
3119	  else
3120	    {
3121	      rtx opalign = GEN_INT (align);
3122	      enum machine_mode mode;
3123	      rtx target = gen_rtx_MEM (BLKmode, temp);
3124
3125	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3126		   mode != VOIDmode;
3127		   mode = GET_MODE_WIDER_MODE (mode))
3128		{
3129		  enum insn_code code = movstr_optab[(int) mode];
3130
3131		  if (code != CODE_FOR_nothing
3132		      && ((GET_CODE (size) == CONST_INT
3133			   && ((unsigned HOST_WIDE_INT) INTVAL (size)
3134			       <= (GET_MODE_MASK (mode) >> 1)))
3135			  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3136		      && (insn_operand_predicate[(int) code][0] == 0
3137			  || ((*insn_operand_predicate[(int) code][0])
3138			      (target, BLKmode)))
3139		      && (insn_operand_predicate[(int) code][1] == 0
3140			  || ((*insn_operand_predicate[(int) code][1])
3141			      (xinner, BLKmode)))
3142		      && (insn_operand_predicate[(int) code][3] == 0
3143			  || ((*insn_operand_predicate[(int) code][3])
3144			      (opalign, VOIDmode))))
3145		    {
3146		      rtx op2 = convert_to_mode (mode, size, 1);
3147		      rtx last = get_last_insn ();
3148		      rtx pat;
3149
3150		      if (insn_operand_predicate[(int) code][2] != 0
3151			  && ! ((*insn_operand_predicate[(int) code][2])
3152				(op2, mode)))
3153			op2 = copy_to_mode_reg (mode, op2);
3154
3155		      pat = GEN_FCN ((int) code) (target, xinner,
3156						  op2, opalign);
3157		      if (pat)
3158			{
3159			  emit_insn (pat);
3160			  goto ret;
3161			}
3162		      else
3163			delete_insns_since (last);
3164		    }
3165		}
3166	    }
3167
3168#ifndef ACCUMULATE_OUTGOING_ARGS
3169	  /* If the source is referenced relative to the stack pointer,
3170	     copy it to another register to stabilize it.  We do not need
3171	     to do this if we know that we won't be changing sp.  */
3172
3173	  if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3174	      || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3175	    temp = copy_to_reg (temp);
3176#endif
3177
3178	  /* Make inhibit_defer_pop nonzero around the library call
3179	     to force it to pop the bcopy-arguments right away.  */
3180	  NO_DEFER_POP;
3181#ifdef TARGET_MEM_FUNCTIONS
3182	  emit_library_call (memcpy_libfunc, 0,
3183			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3184			     convert_to_mode (TYPE_MODE (sizetype),
3185					      size, TREE_UNSIGNED (sizetype)),
3186			     TYPE_MODE (sizetype));
3187#else
3188	  emit_library_call (bcopy_libfunc, 0,
3189			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3190			     convert_to_mode (TYPE_MODE (integer_type_node),
3191					      size,
3192					      TREE_UNSIGNED (integer_type_node)),
3193			     TYPE_MODE (integer_type_node));
3194#endif
3195	  OK_DEFER_POP;
3196	}
3197    }
3198  else if (partial > 0)
3199    {
3200      /* Scalar partly in registers.  */
3201
3202      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3203      int i;
3204      int not_stack;
3205      /* # words of start of argument
3206	 that we must make space for but need not store.  */
3207      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3208      int args_offset = INTVAL (args_so_far);
3209      int skip;
3210
3211      /* Push padding now if padding above and stack grows down,
3212	 or if padding below and stack grows up.
3213	 But if space already allocated, this has already been done.  */
3214      if (extra && args_addr == 0
3215	  && where_pad != none && where_pad != stack_direction)
3216	anti_adjust_stack (GEN_INT (extra));
3217
3218      /* If we make space by pushing it, we might as well push
3219	 the real data.  Otherwise, we can leave OFFSET nonzero
3220	 and leave the space uninitialized.  */
3221      if (args_addr == 0)
3222	offset = 0;
3223
3224      /* Now NOT_STACK gets the number of words that we don't need to
3225	 allocate on the stack.  */
3226      not_stack = partial - offset;
3227
3228      /* If the partial register-part of the arg counts in its stack size,
3229	 skip the part of stack space corresponding to the registers.
3230	 Otherwise, start copying to the beginning of the stack space,
3231	 by setting SKIP to 0.  */
3232      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3233
3234      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3235	x = validize_mem (force_const_mem (mode, x));
3236
3237      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3238	 SUBREGs of such registers are not allowed.  */
3239      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3240	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3241	x = copy_to_reg (x);
3242
3243      /* Loop over all the words allocated on the stack for this arg.  */
3244      /* We can do it by words, because any scalar bigger than a word
3245	 has a size a multiple of a word.  */
3246#ifndef PUSH_ARGS_REVERSED
3247      for (i = not_stack; i < size; i++)
3248#else
3249      for (i = size - 1; i >= not_stack; i--)
3250#endif
3251	if (i >= not_stack + offset)
3252	  emit_push_insn (operand_subword_force (x, i, mode),
3253			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3254			  0, args_addr,
3255			  GEN_INT (args_offset + ((i - not_stack + skip)
3256						  * UNITS_PER_WORD)),
3257			  reg_parm_stack_space);
3258    }
3259  else
3260    {
3261      rtx addr;
3262      rtx target = NULL_RTX;
3263
3264      /* Push padding now if padding above and stack grows down,
3265	 or if padding below and stack grows up.
3266	 But if space already allocated, this has already been done.  */
3267      if (extra && args_addr == 0
3268	  && where_pad != none && where_pad != stack_direction)
3269	anti_adjust_stack (GEN_INT (extra));
3270
3271#ifdef PUSH_ROUNDING
3272      if (args_addr == 0)
3273	addr = gen_push_operand ();
3274      else
3275#endif
3276	{
3277	  if (GET_CODE (args_so_far) == CONST_INT)
3278	    addr
3279	      = memory_address (mode,
3280				plus_constant (args_addr,
3281					       INTVAL (args_so_far)));
3282          else
3283	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3284						       args_so_far));
3285	  target = addr;
3286	}
3287
3288      emit_move_insn (gen_rtx_MEM (mode, addr), x);
3289
3290      if (current_function_check_memory_usage && ! in_check_memory_usage)
3291	{
3292	  in_check_memory_usage = 1;
3293	  if (target == 0)
3294	    target = get_push_address (GET_MODE_SIZE (mode));
3295
3296	  if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3297	    emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3298			       target, Pmode,
3299			       XEXP (x, 0), Pmode,
3300			       GEN_INT (GET_MODE_SIZE (mode)),
3301			       TYPE_MODE (sizetype));
3302	  else
3303	    emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3304			       target, Pmode,
3305			       GEN_INT (GET_MODE_SIZE (mode)),
3306			       TYPE_MODE (sizetype),
3307			       GEN_INT (MEMORY_USE_RW),
3308			       TYPE_MODE (integer_type_node));
3309	  in_check_memory_usage = 0;
3310	}
3311    }
3312
3313 ret:
3314  /* If part should go in registers, copy that part
3315     into the appropriate registers.  Do this now, at the end,
3316     since mem-to-mem copies above may do function calls.  */
3317  if (partial > 0 && reg != 0)
3318    {
3319      /* Handle calls that pass values in multiple non-contiguous locations.
3320	 The Irix 6 ABI has examples of this.  */
3321      if (GET_CODE (reg) == PARALLEL)
3322	emit_group_load (reg, x, -1, align);  /* ??? size? */
3323      else
3324	move_block_to_reg (REGNO (reg), x, partial, mode);
3325    }
3326
3327  if (extra && args_addr == 0 && where_pad == stack_direction)
3328    anti_adjust_stack (GEN_INT (extra));
3329}
3330
3331/* Expand an assignment that stores the value of FROM into TO.
3332   If WANT_VALUE is nonzero, return an rtx for the value of TO.
3333   (This may contain a QUEUED rtx;
3334   if the value is constant, this rtx is a constant.)
3335   Otherwise, the returned value is NULL_RTX.
3336
3337   SUGGEST_REG is no longer actually used.
3338   It used to mean, copy the value through a register
3339   and return that register, if that is possible.
3340   We now use WANT_VALUE to decide whether to do this.  */
3341
3342rtx
3343expand_assignment (to, from, want_value, suggest_reg)
3344     tree to, from;
3345     int want_value;
3346     int suggest_reg;
3347{
3348  register rtx to_rtx = 0;
3349  rtx result;
3350
3351  /* Don't crash if the lhs of the assignment was erroneous.  */
3352
3353  if (TREE_CODE (to) == ERROR_MARK)
3354    {
3355      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3356      return want_value ? result : NULL_RTX;
3357    }
3358
3359  /* Assignment of a structure component needs special treatment
3360     if the structure component's rtx is not simply a MEM.
3361     Assignment of an array element at a constant index, and assignment of
3362     an array element in an unaligned packed structure field, has the same
3363     problem.  */
3364
3365  if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3366      || TREE_CODE (to) == ARRAY_REF)
3367    {
3368      enum machine_mode mode1;
3369      int bitsize;
3370      int bitpos;
3371      tree offset;
3372      int unsignedp;
3373      int volatilep = 0;
3374      tree tem;
3375      int alignment;
3376
3377      push_temp_slots ();
3378      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3379				 &unsignedp, &volatilep, &alignment);
3380
3381      /* If we are going to use store_bit_field and extract_bit_field,
3382	 make sure to_rtx will be safe for multiple use.  */
3383
3384      if (mode1 == VOIDmode && want_value)
3385	tem = stabilize_reference (tem);
3386
3387      to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3388      if (offset != 0)
3389	{
3390	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3391
3392	  if (GET_CODE (to_rtx) != MEM)
3393	    abort ();
3394
3395	  if (GET_MODE (offset_rtx) != ptr_mode)
3396	    {
3397#ifdef POINTERS_EXTEND_UNSIGNED
3398	      offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3399#else
3400	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3401#endif
3402	    }
3403
3404	  /* A constant address in TO_RTX can have VOIDmode, we must not try
3405	     to call force_reg for that case.  Avoid that case.  */
3406	  if (GET_CODE (to_rtx) == MEM
3407	      && GET_MODE (to_rtx) == BLKmode
3408	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3409	      && bitsize
3410	      && (bitpos % bitsize) == 0
3411	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3412	      && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3413	    {
3414	      rtx temp = change_address (to_rtx, mode1,
3415				         plus_constant (XEXP (to_rtx, 0),
3416						        (bitpos /
3417						         BITS_PER_UNIT)));
3418	      if (GET_CODE (XEXP (temp, 0)) == REG)
3419	        to_rtx = temp;
3420	      else
3421		to_rtx = change_address (to_rtx, mode1,
3422				         force_reg (GET_MODE (XEXP (temp, 0)),
3423						    XEXP (temp, 0)));
3424	      bitpos = 0;
3425	    }
3426
3427	  to_rtx = change_address (to_rtx, VOIDmode,
3428				   gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3429						 force_reg (ptr_mode, offset_rtx)));
3430	}
3431      if (volatilep)
3432	{
3433	  if (GET_CODE (to_rtx) == MEM)
3434	    {
3435	      /* When the offset is zero, to_rtx is the address of the
3436		 structure we are storing into, and hence may be shared.
3437		 We must make a new MEM before setting the volatile bit.  */
3438	      if (offset == 0)
3439		to_rtx = copy_rtx (to_rtx);
3440
3441	      MEM_VOLATILE_P (to_rtx) = 1;
3442	    }
3443#if 0  /* This was turned off because, when a field is volatile
3444	  in an object which is not volatile, the object may be in a register,
3445	  and then we would abort over here.  */
3446	  else
3447	    abort ();
3448#endif
3449	}
3450
3451      if (TREE_CODE (to) == COMPONENT_REF
3452	  && TREE_READONLY (TREE_OPERAND (to, 1)))
3453	{
3454	  if (offset == 0)
3455	    to_rtx = copy_rtx (to_rtx);
3456
3457	  RTX_UNCHANGING_P (to_rtx) = 1;
3458	}
3459
3460      /* Check the access.  */
3461      if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3462	{
3463	  rtx to_addr;
3464	  int size;
3465	  int best_mode_size;
3466	  enum machine_mode best_mode;
3467
3468	  best_mode = get_best_mode (bitsize, bitpos,
3469	  			     TYPE_ALIGN (TREE_TYPE (tem)),
3470	  			     mode1, volatilep);
3471	  if (best_mode == VOIDmode)
3472	    best_mode = QImode;
3473
3474	  best_mode_size = GET_MODE_BITSIZE (best_mode);
3475	  to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3476	  size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3477	  size *= GET_MODE_SIZE (best_mode);
3478
3479	  /* Check the access right of the pointer.  */
3480	  if (size)
3481	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3482			       to_addr, Pmode,
3483			       GEN_INT (size), TYPE_MODE (sizetype),
3484			       GEN_INT (MEMORY_USE_WO),
3485			       TYPE_MODE (integer_type_node));
3486	}
3487
3488      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3489			    (want_value
3490			     /* Spurious cast makes HPUX compiler happy.  */
3491			     ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3492			     : VOIDmode),
3493			    unsignedp,
3494			    /* Required alignment of containing datum.  */
3495			    alignment,
3496			    int_size_in_bytes (TREE_TYPE (tem)),
3497			    get_alias_set (to));
3498      preserve_temp_slots (result);
3499      free_temp_slots ();
3500      pop_temp_slots ();
3501
3502      /* If the value is meaningful, convert RESULT to the proper mode.
3503	 Otherwise, return nothing.  */
3504      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3505					  TYPE_MODE (TREE_TYPE (from)),
3506					  result,
3507					  TREE_UNSIGNED (TREE_TYPE (to)))
3508	      : NULL_RTX);
3509    }
3510
3511  /* If the rhs is a function call and its value is not an aggregate,
3512     call the function before we start to compute the lhs.
3513     This is needed for correct code for cases such as
3514     val = setjmp (buf) on machines where reference to val
3515     requires loading up part of an address in a separate insn.
3516
3517     Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3518     a promoted variable where the zero- or sign- extension needs to be done.
3519     Handling this in the normal way is safe because no computation is done
3520     before the call.  */
3521  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3522      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3523      && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3524    {
3525      rtx value;
3526
3527      push_temp_slots ();
3528      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3529      if (to_rtx == 0)
3530	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3531
3532      /* Handle calls that return values in multiple non-contiguous locations.
3533	 The Irix 6 ABI has examples of this.  */
3534      if (GET_CODE (to_rtx) == PARALLEL)
3535	emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3536			 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3537      else if (GET_MODE (to_rtx) == BLKmode)
3538	emit_block_move (to_rtx, value, expr_size (from),
3539			 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3540      else
3541	{
3542#ifdef POINTERS_EXTEND_UNSIGNED
3543	  if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3544	     || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3545	    value = convert_memory_address (GET_MODE (to_rtx), value);
3546#endif
3547	  emit_move_insn (to_rtx, value);
3548	}
3549      preserve_temp_slots (to_rtx);
3550      free_temp_slots ();
3551      pop_temp_slots ();
3552      return want_value ? to_rtx : NULL_RTX;
3553    }
3554
3555  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
3556     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
3557
3558  if (to_rtx == 0)
3559    {
3560      to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3561      if (GET_CODE (to_rtx) == MEM)
3562	MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3563    }
3564
3565  /* Don't move directly into a return register.  */
3566  if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3567    {
3568      rtx temp;
3569
3570      push_temp_slots ();
3571      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3572      emit_move_insn (to_rtx, temp);
3573      preserve_temp_slots (to_rtx);
3574      free_temp_slots ();
3575      pop_temp_slots ();
3576      return want_value ? to_rtx : NULL_RTX;
3577    }
3578
3579  /* In case we are returning the contents of an object which overlaps
3580     the place the value is being stored, use a safe function when copying
3581     a value through a pointer into a structure value return block.  */
3582  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3583      && current_function_returns_struct
3584      && !current_function_returns_pcc_struct)
3585    {
3586      rtx from_rtx, size;
3587
3588      push_temp_slots ();
3589      size = expr_size (from);
3590      from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3591			      EXPAND_MEMORY_USE_DONT);
3592
3593      /* Copy the rights of the bitmap.  */
3594      if (current_function_check_memory_usage)
3595	emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3596			   XEXP (to_rtx, 0), Pmode,
3597			   XEXP (from_rtx, 0), Pmode,
3598			   convert_to_mode (TYPE_MODE (sizetype),
3599					    size, TREE_UNSIGNED (sizetype)),
3600			   TYPE_MODE (sizetype));
3601
3602#ifdef TARGET_MEM_FUNCTIONS
3603      emit_library_call (memcpy_libfunc, 0,
3604			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3605			 XEXP (from_rtx, 0), Pmode,
3606			 convert_to_mode (TYPE_MODE (sizetype),
3607					  size, TREE_UNSIGNED (sizetype)),
3608			 TYPE_MODE (sizetype));
3609#else
3610      emit_library_call (bcopy_libfunc, 0,
3611			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3612			 XEXP (to_rtx, 0), Pmode,
3613			 convert_to_mode (TYPE_MODE (integer_type_node),
3614					  size, TREE_UNSIGNED (integer_type_node)),
3615			 TYPE_MODE (integer_type_node));
3616#endif
3617
3618      preserve_temp_slots (to_rtx);
3619      free_temp_slots ();
3620      pop_temp_slots ();
3621      return want_value ? to_rtx : NULL_RTX;
3622    }
3623
3624  /* Compute FROM and store the value in the rtx we got.  */
3625
3626  push_temp_slots ();
3627  result = store_expr (from, to_rtx, want_value);
3628  preserve_temp_slots (result);
3629  free_temp_slots ();
3630  pop_temp_slots ();
3631  return want_value ? result : NULL_RTX;
3632}
3633
3634/* Generate code for computing expression EXP,
3635   and storing the value into TARGET.
3636   TARGET may contain a QUEUED rtx.
3637
3638   If WANT_VALUE is nonzero, return a copy of the value
3639   not in TARGET, so that we can be sure to use the proper
3640   value in a containing expression even if TARGET has something
3641   else stored in it.  If possible, we copy the value through a pseudo
3642   and return that pseudo.  Or, if the value is constant, we try to
3643   return the constant.  In some cases, we return a pseudo
3644   copied *from* TARGET.
3645
3646   If the mode is BLKmode then we may return TARGET itself.
3647   It turns out that in BLKmode it doesn't cause a problem.
3648   because C has no operators that could combine two different
3649   assignments into the same BLKmode object with different values
3650   with no sequence point.  Will other languages need this to
3651   be more thorough?
3652
3653   If WANT_VALUE is 0, we return NULL, to make sure
3654   to catch quickly any cases where the caller uses the value
3655   and fails to set WANT_VALUE.  */
3656
3657rtx
3658store_expr (exp, target, want_value)
3659     register tree exp;
3660     register rtx target;
3661     int want_value;
3662{
3663  register rtx temp;
3664  int dont_return_target = 0;
3665
3666  if (TREE_CODE (exp) == COMPOUND_EXPR)
3667    {
3668      /* Perform first part of compound expression, then assign from second
3669	 part.  */
3670      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3671      emit_queue ();
3672      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3673    }
3674  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3675    {
3676      /* For conditional expression, get safe form of the target.  Then
3677	 test the condition, doing the appropriate assignment on either
3678	 side.  This avoids the creation of unnecessary temporaries.
3679	 For non-BLKmode, it is more efficient not to do this.  */
3680
3681      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3682
3683      emit_queue ();
3684      target = protect_from_queue (target, 1);
3685
3686      do_pending_stack_adjust ();
3687      NO_DEFER_POP;
3688      jumpifnot (TREE_OPERAND (exp, 0), lab1);
3689      start_cleanup_deferral ();
3690      store_expr (TREE_OPERAND (exp, 1), target, 0);
3691      end_cleanup_deferral ();
3692      emit_queue ();
3693      emit_jump_insn (gen_jump (lab2));
3694      emit_barrier ();
3695      emit_label (lab1);
3696      start_cleanup_deferral ();
3697      store_expr (TREE_OPERAND (exp, 2), target, 0);
3698      end_cleanup_deferral ();
3699      emit_queue ();
3700      emit_label (lab2);
3701      OK_DEFER_POP;
3702
3703      return want_value ? target : NULL_RTX;
3704    }
3705  else if (queued_subexp_p (target))
3706    /* If target contains a postincrement, let's not risk
3707       using it as the place to generate the rhs.  */
3708    {
3709      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3710	{
3711	  /* Expand EXP into a new pseudo.  */
3712	  temp = gen_reg_rtx (GET_MODE (target));
3713	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
3714	}
3715      else
3716	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3717
3718      /* If target is volatile, ANSI requires accessing the value
3719	 *from* the target, if it is accessed.  So make that happen.
3720	 In no case return the target itself.  */
3721      if (! MEM_VOLATILE_P (target) && want_value)
3722	dont_return_target = 1;
3723    }
3724  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3725	   && GET_MODE (target) != BLKmode)
3726    /* If target is in memory and caller wants value in a register instead,
3727       arrange that.  Pass TARGET as target for expand_expr so that,
3728       if EXP is another assignment, WANT_VALUE will be nonzero for it.
3729       We know expand_expr will not use the target in that case.
3730       Don't do this if TARGET is volatile because we are supposed
3731       to write it and then read it.  */
3732    {
3733      temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3734			  GET_MODE (target), 0);
3735      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3736	temp = copy_to_reg (temp);
3737      dont_return_target = 1;
3738    }
3739  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3740    /* If this is an scalar in a register that is stored in a wider mode
3741       than the declared mode, compute the result into its declared mode
3742       and then convert to the wider mode.  Our value is the computed
3743       expression.  */
3744    {
3745      /* If we don't want a value, we can do the conversion inside EXP,
3746	 which will often result in some optimizations.  Do the conversion
3747	 in two steps: first change the signedness, if needed, then
3748	 the extend.  But don't do this if the type of EXP is a subtype
3749	 of something else since then the conversion might involve
3750	 more than just converting modes.  */
3751      if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3752	  && TREE_TYPE (TREE_TYPE (exp)) == 0)
3753	{
3754	  if (TREE_UNSIGNED (TREE_TYPE (exp))
3755	      != SUBREG_PROMOTED_UNSIGNED_P (target))
3756	    exp
3757	      = convert
3758		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3759					  TREE_TYPE (exp)),
3760		 exp);
3761
3762	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3763					SUBREG_PROMOTED_UNSIGNED_P (target)),
3764			 exp);
3765	}
3766
3767      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3768
3769      /* If TEMP is a volatile MEM and we want a result value, make
3770	 the access now so it gets done only once.  Likewise if
3771	 it contains TARGET.  */
3772      if (GET_CODE (temp) == MEM && want_value
3773	  && (MEM_VOLATILE_P (temp)
3774	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3775	temp = copy_to_reg (temp);
3776
3777      /* If TEMP is a VOIDmode constant, use convert_modes to make
3778	 sure that we properly convert it.  */
3779      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3780	temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3781			      TYPE_MODE (TREE_TYPE (exp)), temp,
3782			      SUBREG_PROMOTED_UNSIGNED_P (target));
3783
3784      convert_move (SUBREG_REG (target), temp,
3785		    SUBREG_PROMOTED_UNSIGNED_P (target));
3786      return want_value ? temp : NULL_RTX;
3787    }
3788  else
3789    {
3790      temp = expand_expr (exp, target, GET_MODE (target), 0);
3791      /* Return TARGET if it's a specified hardware register.
3792	 If TARGET is a volatile mem ref, either return TARGET
3793	 or return a reg copied *from* TARGET; ANSI requires this.
3794
3795	 Otherwise, if TEMP is not TARGET, return TEMP
3796	 if it is constant (for efficiency),
3797	 or if we really want the correct value.  */
3798      if (!(target && GET_CODE (target) == REG
3799	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
3800	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3801	  && ! rtx_equal_p (temp, target)
3802	  && (CONSTANT_P (temp) || want_value))
3803	dont_return_target = 1;
3804    }
3805
3806  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3807     the same as that of TARGET, adjust the constant.  This is needed, for
3808     example, in case it is a CONST_DOUBLE and we want only a word-sized
3809     value.  */
3810  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3811      && TREE_CODE (exp) != ERROR_MARK
3812      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3813    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3814			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3815
3816  if (current_function_check_memory_usage
3817      && GET_CODE (target) == MEM
3818      && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3819    {
3820      if (GET_CODE (temp) == MEM)
3821        emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3822			   XEXP (target, 0), Pmode,
3823			   XEXP (temp, 0), Pmode,
3824			   expr_size (exp), TYPE_MODE (sizetype));
3825      else
3826        emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3827			   XEXP (target, 0), Pmode,
3828			   expr_size (exp), TYPE_MODE (sizetype),
3829			   GEN_INT (MEMORY_USE_WO),
3830			   TYPE_MODE (integer_type_node));
3831    }
3832
3833  /* If value was not generated in the target, store it there.
3834     Convert the value to TARGET's type first if nec.  */
3835  /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3836     one or both of them are volatile memory refs, we have to distinguish
3837     two cases:
3838     - expand_expr has used TARGET.  In this case, we must not generate
3839       another copy.  This can be detected by TARGET being equal according
3840       to == .
3841     - expand_expr has not used TARGET - that means that the source just
3842       happens to have the same RTX form.  Since temp will have been created
3843       by expand_expr, it will compare unequal according to == .
3844       We must generate a copy in this case, to reach the correct number
3845       of volatile memory references.  */
3846
3847  if ((! rtx_equal_p (temp, target)
3848       || (temp != target && (side_effects_p (temp)
3849			      || side_effects_p (target))))
3850      && TREE_CODE (exp) != ERROR_MARK)
3851    {
3852      target = protect_from_queue (target, 1);
3853      if (GET_MODE (temp) != GET_MODE (target)
3854	  && GET_MODE (temp) != VOIDmode)
3855	{
3856	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3857	  if (dont_return_target)
3858	    {
3859	      /* In this case, we will return TEMP,
3860		 so make sure it has the proper mode.
3861		 But don't forget to store the value into TARGET.  */
3862	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3863	      emit_move_insn (target, temp);
3864	    }
3865	  else
3866	    convert_move (target, temp, unsignedp);
3867	}
3868
3869      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3870	{
3871	  /* Handle copying a string constant into an array.
3872	     The string constant may be shorter than the array.
3873	     So copy just the string's actual length, and clear the rest.  */
3874	  rtx size;
3875	  rtx addr;
3876
3877	  /* Get the size of the data type of the string,
3878	     which is actually the size of the target.  */
3879	  size = expr_size (exp);
3880	  if (GET_CODE (size) == CONST_INT
3881	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
3882	    emit_block_move (target, temp, size,
3883			     TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3884	  else
3885	    {
3886	      /* Compute the size of the data to copy from the string.  */
3887	      tree copy_size
3888		= size_binop (MIN_EXPR,
3889			      make_tree (sizetype, size),
3890			      convert (sizetype,
3891				       build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3892	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3893					       VOIDmode, 0);
3894	      rtx label = 0;
3895
3896	      /* Copy that much.  */
3897	      emit_block_move (target, temp, copy_size_rtx,
3898			       TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3899
3900	      /* Figure out how much is left in TARGET that we have to clear.
3901		 Do all calculations in ptr_mode.  */
3902
3903	      addr = XEXP (target, 0);
3904	      addr = convert_modes (ptr_mode, Pmode, addr, 1);
3905
3906	      if (GET_CODE (copy_size_rtx) == CONST_INT)
3907		{
3908		  addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3909		  size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3910		}
3911	      else
3912		{
3913		  addr = force_reg (ptr_mode, addr);
3914		  addr = expand_binop (ptr_mode, add_optab, addr,
3915				       copy_size_rtx, NULL_RTX, 0,
3916				       OPTAB_LIB_WIDEN);
3917
3918		  size = expand_binop (ptr_mode, sub_optab, size,
3919				       copy_size_rtx, NULL_RTX, 0,
3920				       OPTAB_LIB_WIDEN);
3921
3922		  label = gen_label_rtx ();
3923		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3924					   GET_MODE (size), 0, 0, label);
3925		}
3926
3927	      if (size != const0_rtx)
3928		{
3929		  /* Be sure we can write on ADDR.  */
3930		  if (current_function_check_memory_usage)
3931		    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3932				       addr, Pmode,
3933				       size, TYPE_MODE (sizetype),
3934 				       GEN_INT (MEMORY_USE_WO),
3935				       TYPE_MODE (integer_type_node));
3936#ifdef TARGET_MEM_FUNCTIONS
3937		  emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3938				     addr, ptr_mode,
3939				     const0_rtx, TYPE_MODE (integer_type_node),
3940				     convert_to_mode (TYPE_MODE (sizetype),
3941						      size,
3942						      TREE_UNSIGNED (sizetype)),
3943				     TYPE_MODE (sizetype));
3944#else
3945		  emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3946				     addr, ptr_mode,
3947				     convert_to_mode (TYPE_MODE (integer_type_node),
3948						      size,
3949						      TREE_UNSIGNED (integer_type_node)),
3950				     TYPE_MODE (integer_type_node));
3951#endif
3952		}
3953
3954	      if (label)
3955		emit_label (label);
3956	    }
3957	}
3958      /* Handle calls that return values in multiple non-contiguous locations.
3959	 The Irix 6 ABI has examples of this.  */
3960      else if (GET_CODE (target) == PARALLEL)
3961	emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3962			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3963      else if (GET_MODE (temp) == BLKmode)
3964	emit_block_move (target, temp, expr_size (exp),
3965			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3966      else
3967	emit_move_insn (target, temp);
3968    }
3969
3970  /* If we don't want a value, return NULL_RTX.  */
3971  if (! want_value)
3972    return NULL_RTX;
3973
3974  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3975     ??? The latter test doesn't seem to make sense.  */
3976  else if (dont_return_target && GET_CODE (temp) != MEM)
3977    return temp;
3978
3979  /* Return TARGET itself if it is a hard register.  */
3980  else if (want_value && GET_MODE (target) != BLKmode
3981	   && ! (GET_CODE (target) == REG
3982		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3983    return copy_to_reg (target);
3984
3985  else
3986    return target;
3987}
3988
3989/* Return 1 if EXP just contains zeros.  */
3990
3991static int
3992is_zeros_p (exp)
3993     tree exp;
3994{
3995  tree elt;
3996
3997  switch (TREE_CODE (exp))
3998    {
3999    case CONVERT_EXPR:
4000    case NOP_EXPR:
4001    case NON_LVALUE_EXPR:
4002      return is_zeros_p (TREE_OPERAND (exp, 0));
4003
4004    case INTEGER_CST:
4005      return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
4006
4007    case COMPLEX_CST:
4008      return
4009	is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4010
4011    case REAL_CST:
4012      return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4013
4014    case CONSTRUCTOR:
4015      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4016	return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4017      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4018	if (! is_zeros_p (TREE_VALUE (elt)))
4019	  return 0;
4020
4021      return 1;
4022
4023    default:
4024      return 0;
4025    }
4026}
4027
4028/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4029
4030static int
4031mostly_zeros_p (exp)
4032     tree exp;
4033{
4034  if (TREE_CODE (exp) == CONSTRUCTOR)
4035    {
4036      int elts = 0, zeros = 0;
4037      tree elt = CONSTRUCTOR_ELTS (exp);
4038      if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4039	{
4040	  /* If there are no ranges of true bits, it is all zero.  */
4041	  return elt == NULL_TREE;
4042	}
4043      for (; elt; elt = TREE_CHAIN (elt))
4044	{
4045	  /* We do not handle the case where the index is a RANGE_EXPR,
4046	     so the statistic will be somewhat inaccurate.
4047	     We do make a more accurate count in store_constructor itself,
4048	     so since this function is only used for nested array elements,
4049	     this should be close enough.  */
4050	  if (mostly_zeros_p (TREE_VALUE (elt)))
4051	    zeros++;
4052	  elts++;
4053	}
4054
4055      return 4 * zeros >= 3 * elts;
4056    }
4057
4058  return is_zeros_p (exp);
4059}
4060
4061/* Helper function for store_constructor.
4062   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4063   TYPE is the type of the CONSTRUCTOR, not the element type.
4064   CLEARED is as for store_constructor.
4065
4066   This provides a recursive shortcut back to store_constructor when it isn't
4067   necessary to go through store_field.  This is so that we can pass through
4068   the cleared field to let store_constructor know that we may not have to
4069   clear a substructure if the outer structure has already been cleared.  */
4070
4071static void
4072store_constructor_field (target, bitsize, bitpos,
4073			 mode, exp, type, cleared)
4074     rtx target;
4075     int bitsize, bitpos;
4076     enum machine_mode mode;
4077     tree exp, type;
4078     int cleared;
4079{
4080  if (TREE_CODE (exp) == CONSTRUCTOR
4081      && bitpos % BITS_PER_UNIT == 0
4082      /* If we have a non-zero bitpos for a register target, then we just
4083	 let store_field do the bitfield handling.  This is unlikely to
4084	 generate unnecessary clear instructions anyways.  */
4085      && (bitpos == 0 || GET_CODE (target) == MEM))
4086    {
4087      if (bitpos != 0)
4088	target = change_address (target, VOIDmode,
4089				 plus_constant (XEXP (target, 0),
4090						bitpos / BITS_PER_UNIT));
4091      store_constructor (exp, target, cleared);
4092    }
4093  else
4094    store_field (target, bitsize, bitpos, mode, exp,
4095		 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
4096		 int_size_in_bytes (type), 0);
4097}
4098
4099/* Store the value of constructor EXP into the rtx TARGET.
4100   TARGET is either a REG or a MEM.
4101   CLEARED is true if TARGET is known to have been zero'd.  */
4102
4103static void
4104store_constructor (exp, target, cleared)
4105     tree exp;
4106     rtx target;
4107     int cleared;
4108{
4109  tree type = TREE_TYPE (exp);
4110  rtx exp_size = expr_size (exp);
4111
4112  /* We know our target cannot conflict, since safe_from_p has been called.  */
4113#if 0
4114  /* Don't try copying piece by piece into a hard register
4115     since that is vulnerable to being clobbered by EXP.
4116     Instead, construct in a pseudo register and then copy it all.  */
4117  if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4118    {
4119      rtx temp = gen_reg_rtx (GET_MODE (target));
4120      store_constructor (exp, temp, 0);
4121      emit_move_insn (target, temp);
4122      return;
4123    }
4124#endif
4125
4126  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4127      || TREE_CODE (type) == QUAL_UNION_TYPE)
4128    {
4129      register tree elt;
4130
4131      /* Inform later passes that the whole union value is dead.  */
4132      if (TREE_CODE (type) == UNION_TYPE
4133	  || TREE_CODE (type) == QUAL_UNION_TYPE)
4134	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4135
4136      /* If we are building a static constructor into a register,
4137	 set the initial value as zero so we can fold the value into
4138	 a constant.  But if more than one register is involved,
4139	 this probably loses.  */
4140      else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4141	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4142	{
4143	  if (! cleared)
4144	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4145
4146	  cleared = 1;
4147	}
4148
4149      /* If the constructor has fewer fields than the structure
4150	 or if we are initializing the structure to mostly zeros,
4151	 clear the whole structure first.  */
4152      else if ((list_length (CONSTRUCTOR_ELTS (exp))
4153		!= list_length (TYPE_FIELDS (type)))
4154	       || mostly_zeros_p (exp))
4155	{
4156	  if (! cleared)
4157	    clear_storage (target, expr_size (exp),
4158			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4159
4160	  cleared = 1;
4161	}
4162      else
4163	/* Inform later passes that the old value is dead.  */
4164	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4165
4166      /* Store each element of the constructor into
4167	 the corresponding field of TARGET.  */
4168
4169      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4170	{
4171	  register tree field = TREE_PURPOSE (elt);
4172	  tree value = TREE_VALUE (elt);
4173	  register enum machine_mode mode;
4174	  int bitsize;
4175	  int bitpos = 0;
4176	  int unsignedp;
4177	  tree pos, constant = 0, offset = 0;
4178	  rtx to_rtx = target;
4179
4180	  /* Just ignore missing fields.
4181	     We cleared the whole structure, above,
4182	     if any fields are missing.  */
4183	  if (field == 0)
4184	    continue;
4185
4186	  if (cleared && is_zeros_p (TREE_VALUE (elt)))
4187	    continue;
4188
4189	  bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4190	  unsignedp = TREE_UNSIGNED (field);
4191	  mode = DECL_MODE (field);
4192	  if (DECL_BIT_FIELD (field))
4193	    mode = VOIDmode;
4194
4195	  pos = DECL_FIELD_BITPOS (field);
4196	  if (TREE_CODE (pos) == INTEGER_CST)
4197	    constant = pos;
4198	  else if (TREE_CODE (pos) == PLUS_EXPR
4199		   && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4200	    constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4201	  else
4202	    offset = pos;
4203
4204	  if (constant)
4205	    bitpos = TREE_INT_CST_LOW (constant);
4206
4207	  if (offset)
4208	    {
4209	      rtx offset_rtx;
4210
4211	      if (contains_placeholder_p (offset))
4212		offset = build (WITH_RECORD_EXPR, sizetype,
4213				offset, make_tree (TREE_TYPE (exp), target));
4214
4215	      offset = size_binop (FLOOR_DIV_EXPR, offset,
4216				   size_int (BITS_PER_UNIT));
4217
4218	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4219	      if (GET_CODE (to_rtx) != MEM)
4220		abort ();
4221
4222              if (GET_MODE (offset_rtx) != ptr_mode)
4223                {
4224#ifdef POINTERS_EXTEND_UNSIGNED
4225                  offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4226#else
4227                  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4228#endif
4229                }
4230
4231	      to_rtx
4232		= change_address (to_rtx, VOIDmode,
4233				  gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4234					   force_reg (ptr_mode, offset_rtx)));
4235	    }
4236	  if (TREE_READONLY (field))
4237	    {
4238	      if (GET_CODE (to_rtx) == MEM)
4239		to_rtx = copy_rtx (to_rtx);
4240
4241	      RTX_UNCHANGING_P (to_rtx) = 1;
4242	    }
4243
4244#ifdef WORD_REGISTER_OPERATIONS
4245	  /* If this initializes a field that is smaller than a word, at the
4246	     start of a word, try to widen it to a full word.
4247	     This special case allows us to output C++ member function
4248	     initializations in a form that the optimizers can understand.  */
4249	  if (constant
4250	      && GET_CODE (target) == REG
4251	      && bitsize < BITS_PER_WORD
4252	      && bitpos % BITS_PER_WORD == 0
4253	      && GET_MODE_CLASS (mode) == MODE_INT
4254	      && TREE_CODE (value) == INTEGER_CST
4255	      && GET_CODE (exp_size) == CONST_INT
4256	      && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4257	    {
4258	      tree type = TREE_TYPE (value);
4259	      if (TYPE_PRECISION (type) < BITS_PER_WORD)
4260		{
4261		  type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4262		  value = convert (type, value);
4263		}
4264	      if (BYTES_BIG_ENDIAN)
4265		value
4266		  = fold (build (LSHIFT_EXPR, type, value,
4267				 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4268	      bitsize = BITS_PER_WORD;
4269	      mode = word_mode;
4270	    }
4271#endif
4272	  store_constructor_field (to_rtx, bitsize, bitpos,
4273				   mode, value, type, cleared);
4274	}
4275    }
4276  else if (TREE_CODE (type) == ARRAY_TYPE)
4277    {
4278      register tree elt;
4279      register int i;
4280      int need_to_clear;
4281      tree domain = TYPE_DOMAIN (type);
4282      HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4283      HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4284      tree elttype = TREE_TYPE (type);
4285
4286      /* If the constructor has fewer elements than the array,
4287         clear the whole array first.  Similarly if this is
4288         static constructor of a non-BLKmode object.  */
4289      if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4290	need_to_clear = 1;
4291      else
4292	{
4293	  HOST_WIDE_INT count = 0, zero_count = 0;
4294	  need_to_clear = 0;
4295	  /* This loop is a more accurate version of the loop in
4296	     mostly_zeros_p (it handles RANGE_EXPR in an index).
4297	     It is also needed to check for missing elements.  */
4298	  for (elt = CONSTRUCTOR_ELTS (exp);
4299	       elt != NULL_TREE;
4300	       elt = TREE_CHAIN (elt))
4301	    {
4302	      tree index = TREE_PURPOSE (elt);
4303	      HOST_WIDE_INT this_node_count;
4304	      if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4305		{
4306		  tree lo_index = TREE_OPERAND (index, 0);
4307		  tree hi_index = TREE_OPERAND (index, 1);
4308		  if (TREE_CODE (lo_index) != INTEGER_CST
4309		      || TREE_CODE (hi_index) != INTEGER_CST)
4310		    {
4311		      need_to_clear = 1;
4312		      break;
4313		    }
4314		  this_node_count = TREE_INT_CST_LOW (hi_index)
4315		    - TREE_INT_CST_LOW (lo_index) + 1;
4316		}
4317	      else
4318		this_node_count = 1;
4319	      count += this_node_count;
4320	      if (mostly_zeros_p (TREE_VALUE (elt)))
4321		zero_count += this_node_count;
4322	    }
4323	  /* Clear the entire array first if there are any missing elements,
4324	     or if the incidence of zero elements is >= 75%.  */
4325	  if (count < maxelt - minelt + 1
4326	      || 4 * zero_count >= 3 * count)
4327	    need_to_clear = 1;
4328	}
4329      if (need_to_clear)
4330	{
4331	  if (! cleared)
4332	    clear_storage (target, expr_size (exp),
4333			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4334	  cleared = 1;
4335	}
4336      else
4337	/* Inform later passes that the old value is dead.  */
4338	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4339
4340      /* Store each element of the constructor into
4341	 the corresponding element of TARGET, determined
4342	 by counting the elements.  */
4343      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4344	   elt;
4345	   elt = TREE_CHAIN (elt), i++)
4346	{
4347	  register enum machine_mode mode;
4348	  int bitsize;
4349	  int bitpos;
4350	  int unsignedp;
4351	  tree value = TREE_VALUE (elt);
4352	  tree index = TREE_PURPOSE (elt);
4353	  rtx xtarget = target;
4354
4355	  if (cleared && is_zeros_p (value))
4356	    continue;
4357
4358	  mode = TYPE_MODE (elttype);
4359	  bitsize = GET_MODE_BITSIZE (mode);
4360	  unsignedp = TREE_UNSIGNED (elttype);
4361
4362	  if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4363	    {
4364	      tree lo_index = TREE_OPERAND (index, 0);
4365	      tree hi_index = TREE_OPERAND (index, 1);
4366	      rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4367	      struct nesting *loop;
4368	      HOST_WIDE_INT lo, hi, count;
4369	      tree position;
4370
4371	      /* If the range is constant and "small", unroll the loop.  */
4372	      if (TREE_CODE (lo_index) == INTEGER_CST
4373		  && TREE_CODE (hi_index) == INTEGER_CST
4374		  && (lo = TREE_INT_CST_LOW (lo_index),
4375		      hi = TREE_INT_CST_LOW (hi_index),
4376		      count = hi - lo + 1,
4377		      (GET_CODE (target) != MEM
4378		       || count <= 2
4379		       || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4380			   && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4381			   <= 40 * 8))))
4382		{
4383		  lo -= minelt;  hi -= minelt;
4384		  for (; lo <= hi; lo++)
4385		    {
4386		      bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4387		      store_constructor_field (target, bitsize, bitpos,
4388					       mode, value, type, cleared);
4389		    }
4390		}
4391	      else
4392		{
4393		  hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4394		  loop_top = gen_label_rtx ();
4395		  loop_end = gen_label_rtx ();
4396
4397		  unsignedp = TREE_UNSIGNED (domain);
4398
4399		  index = build_decl (VAR_DECL, NULL_TREE, domain);
4400
4401		  DECL_RTL (index) = index_r
4402		    = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4403						 &unsignedp, 0));
4404
4405		  if (TREE_CODE (value) == SAVE_EXPR
4406		      && SAVE_EXPR_RTL (value) == 0)
4407		    {
4408		      /* Make sure value gets expanded once before the
4409                         loop.  */
4410		      expand_expr (value, const0_rtx, VOIDmode, 0);
4411		      emit_queue ();
4412		    }
4413		  store_expr (lo_index, index_r, 0);
4414		  loop = expand_start_loop (0);
4415
4416		  /* Assign value to element index.  */
4417		  position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4418					 size_int (BITS_PER_UNIT));
4419		  position = size_binop (MULT_EXPR,
4420					 size_binop (MINUS_EXPR, index,
4421						     TYPE_MIN_VALUE (domain)),
4422					 position);
4423		  pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4424		  addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4425		  xtarget = change_address (target, mode, addr);
4426		  if (TREE_CODE (value) == CONSTRUCTOR)
4427		    store_constructor (value, xtarget, cleared);
4428		  else
4429		    store_expr (value, xtarget, 0);
4430
4431		  expand_exit_loop_if_false (loop,
4432					     build (LT_EXPR, integer_type_node,
4433						    index, hi_index));
4434
4435		  expand_increment (build (PREINCREMENT_EXPR,
4436					   TREE_TYPE (index),
4437					   index, integer_one_node), 0, 0);
4438		  expand_end_loop ();
4439		  emit_label (loop_end);
4440
4441		  /* Needed by stupid register allocation. to extend the
4442		     lifetime of pseudo-regs used by target past the end
4443		     of the loop.  */
4444		  emit_insn (gen_rtx_USE (GET_MODE (target), target));
4445		}
4446	    }
4447	  else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4448	      || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4449	    {
4450	      rtx pos_rtx, addr;
4451	      tree position;
4452
4453	      if (index == 0)
4454		index = size_int (i);
4455
4456	      if (minelt)
4457		index = size_binop (MINUS_EXPR, index,
4458				    TYPE_MIN_VALUE (domain));
4459	      position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4460				     size_int (BITS_PER_UNIT));
4461	      position = size_binop (MULT_EXPR, index, position);
4462	      pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4463	      addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4464	      xtarget = change_address (target, mode, addr);
4465	      store_expr (value, xtarget, 0);
4466	    }
4467	  else
4468	    {
4469	      if (index != 0)
4470		bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4471			  * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4472	      else
4473		bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4474	      store_constructor_field (target, bitsize, bitpos,
4475				       mode, value, type, cleared);
4476	    }
4477	}
4478    }
4479  /* set constructor assignments */
4480  else if (TREE_CODE (type) == SET_TYPE)
4481    {
4482      tree elt = CONSTRUCTOR_ELTS (exp);
4483      int nbytes = int_size_in_bytes (type), nbits;
4484      tree domain = TYPE_DOMAIN (type);
4485      tree domain_min, domain_max, bitlength;
4486
4487      /* The default implementation strategy is to extract the constant
4488	 parts of the constructor, use that to initialize the target,
4489	 and then "or" in whatever non-constant ranges we need in addition.
4490
4491	 If a large set is all zero or all ones, it is
4492	 probably better to set it using memset (if available) or bzero.
4493	 Also, if a large set has just a single range, it may also be
4494	 better to first clear all the first clear the set (using
4495	 bzero/memset), and set the bits we want.  */
4496
4497      /* Check for all zeros.  */
4498      if (elt == NULL_TREE)
4499	{
4500	  if (!cleared)
4501	    clear_storage (target, expr_size (exp),
4502			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4503	  return;
4504	}
4505
4506      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4507      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4508      bitlength = size_binop (PLUS_EXPR,
4509			      size_binop (MINUS_EXPR, domain_max, domain_min),
4510			      size_one_node);
4511
4512      if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4513	abort ();
4514      nbits = TREE_INT_CST_LOW (bitlength);
4515
4516      /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4517	 are "complicated" (more than one range), initialize (the
4518	 constant parts) by copying from a constant.  */
4519      if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4520	  || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4521	{
4522	  int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4523	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4524	  char *bit_buffer = (char *) alloca (nbits);
4525	  HOST_WIDE_INT word = 0;
4526	  int bit_pos = 0;
4527	  int ibit = 0;
4528	  int offset = 0;  /* In bytes from beginning of set.  */
4529	  elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4530	  for (;;)
4531	    {
4532	      if (bit_buffer[ibit])
4533		{
4534		  if (BYTES_BIG_ENDIAN)
4535		    word |= (1 << (set_word_size - 1 - bit_pos));
4536		  else
4537		    word |= 1 << bit_pos;
4538		}
4539	      bit_pos++;  ibit++;
4540	      if (bit_pos >= set_word_size || ibit == nbits)
4541		{
4542		  if (word != 0 || ! cleared)
4543		    {
4544		      rtx datum = GEN_INT (word);
4545		      rtx to_rtx;
4546		      /* The assumption here is that it is safe to use
4547			 XEXP if the set is multi-word, but not if
4548			 it's single-word.  */
4549		      if (GET_CODE (target) == MEM)
4550			{
4551			  to_rtx = plus_constant (XEXP (target, 0), offset);
4552			  to_rtx = change_address (target, mode, to_rtx);
4553			}
4554		      else if (offset == 0)
4555			to_rtx = target;
4556		      else
4557			abort ();
4558		      emit_move_insn (to_rtx, datum);
4559		    }
4560		  if (ibit == nbits)
4561		    break;
4562		  word = 0;
4563		  bit_pos = 0;
4564		  offset += set_word_size / BITS_PER_UNIT;
4565		}
4566	    }
4567	}
4568      else if (!cleared)
4569	{
4570	  /* Don't bother clearing storage if the set is all ones.  */
4571	  if (TREE_CHAIN (elt) != NULL_TREE
4572	      || (TREE_PURPOSE (elt) == NULL_TREE
4573		  ? nbits != 1
4574		  : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4575		     || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4576		     || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4577			 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4578			 != nbits))))
4579	    clear_storage (target, expr_size (exp),
4580			   TYPE_ALIGN (type) / BITS_PER_UNIT);
4581	}
4582
4583      for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4584	{
4585	  /* start of range of element or NULL */
4586	  tree startbit = TREE_PURPOSE (elt);
4587	  /* end of range of element, or element value */
4588	  tree endbit   = TREE_VALUE (elt);
4589#ifdef TARGET_MEM_FUNCTIONS
4590	  HOST_WIDE_INT startb, endb;
4591#endif
4592	  rtx  bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4593
4594	  bitlength_rtx = expand_expr (bitlength,
4595			    NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4596
4597	  /* handle non-range tuple element like [ expr ]  */
4598	  if (startbit == NULL_TREE)
4599	    {
4600	      startbit = save_expr (endbit);
4601	      endbit = startbit;
4602	    }
4603	  startbit = convert (sizetype, startbit);
4604	  endbit = convert (sizetype, endbit);
4605	  if (! integer_zerop (domain_min))
4606	    {
4607	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4608	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4609	    }
4610	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4611				      EXPAND_CONST_ADDRESS);
4612	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4613				    EXPAND_CONST_ADDRESS);
4614
4615	  if (REG_P (target))
4616	    {
4617	      targetx = assign_stack_temp (GET_MODE (target),
4618					   GET_MODE_SIZE (GET_MODE (target)),
4619					   0);
4620	      emit_move_insn (targetx, target);
4621	    }
4622	  else if (GET_CODE (target) == MEM)
4623	    targetx = target;
4624	  else
4625	    abort ();
4626
4627#ifdef TARGET_MEM_FUNCTIONS
4628	  /* Optimization:  If startbit and endbit are
4629	     constants divisible by BITS_PER_UNIT,
4630	     call memset instead.  */
4631	  if (TREE_CODE (startbit) == INTEGER_CST
4632	      && TREE_CODE (endbit) == INTEGER_CST
4633	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4634	      && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4635	    {
4636	      emit_library_call (memset_libfunc, 0,
4637				 VOIDmode, 3,
4638				 plus_constant (XEXP (targetx, 0),
4639						startb / BITS_PER_UNIT),
4640				 Pmode,
4641				 constm1_rtx, TYPE_MODE (integer_type_node),
4642				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4643				 TYPE_MODE (sizetype));
4644	    }
4645	  else
4646#endif
4647	    {
4648	      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4649				 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4650				 bitlength_rtx, TYPE_MODE (sizetype),
4651				 startbit_rtx, TYPE_MODE (sizetype),
4652				 endbit_rtx, TYPE_MODE (sizetype));
4653	    }
4654	  if (REG_P (target))
4655	    emit_move_insn (target, targetx);
4656	}
4657    }
4658
4659  else
4660    abort ();
4661}
4662
4663/* Store the value of EXP (an expression tree)
4664   into a subfield of TARGET which has mode MODE and occupies
4665   BITSIZE bits, starting BITPOS bits from the start of TARGET.
4666   If MODE is VOIDmode, it means that we are storing into a bit-field.
4667
4668   If VALUE_MODE is VOIDmode, return nothing in particular.
4669   UNSIGNEDP is not used in this case.
4670
4671   Otherwise, return an rtx for the value stored.  This rtx
4672   has mode VALUE_MODE if that is convenient to do.
4673   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4674
4675   ALIGN is the alignment that TARGET is known to have, measured in bytes.
4676   TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4677
4678   ALIAS_SET is the alias set for the destination.  This value will
4679   (in general) be different from that for TARGET, since TARGET is a
4680   reference to the containing structure.  */
4681
4682static rtx
4683store_field (target, bitsize, bitpos, mode, exp, value_mode,
4684	     unsignedp, align, total_size, alias_set)
4685     rtx target;
4686     int bitsize, bitpos;
4687     enum machine_mode mode;
4688     tree exp;
4689     enum machine_mode value_mode;
4690     int unsignedp;
4691     int align;
4692     int total_size;
4693     int alias_set;
4694{
4695  HOST_WIDE_INT width_mask = 0;
4696
4697  if (TREE_CODE (exp) == ERROR_MARK)
4698    return const0_rtx;
4699
4700  if (bitsize < HOST_BITS_PER_WIDE_INT)
4701    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4702
4703  /* If we are storing into an unaligned field of an aligned union that is
4704     in a register, we may have the mode of TARGET being an integer mode but
4705     MODE == BLKmode.  In that case, get an aligned object whose size and
4706     alignment are the same as TARGET and store TARGET into it (we can avoid
4707     the store if the field being stored is the entire width of TARGET).  Then
4708     call ourselves recursively to store the field into a BLKmode version of
4709     that object.  Finally, load from the object into TARGET.  This is not
4710     very efficient in general, but should only be slightly more expensive
4711     than the otherwise-required unaligned accesses.  Perhaps this can be
4712     cleaned up later.  */
4713
4714  if (mode == BLKmode
4715      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4716    {
4717      rtx object = assign_stack_temp (GET_MODE (target),
4718				      GET_MODE_SIZE (GET_MODE (target)), 0);
4719      rtx blk_object = copy_rtx (object);
4720
4721      MEM_SET_IN_STRUCT_P (object, 1);
4722      MEM_SET_IN_STRUCT_P (blk_object, 1);
4723      PUT_MODE (blk_object, BLKmode);
4724
4725      if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4726	emit_move_insn (object, target);
4727
4728      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4729		   align, total_size, alias_set);
4730
4731      /* Even though we aren't returning target, we need to
4732	 give it the updated value.  */
4733      emit_move_insn (target, object);
4734
4735      return blk_object;
4736    }
4737
4738  /* If the structure is in a register or if the component
4739     is a bit field, we cannot use addressing to access it.
4740     Use bit-field techniques or SUBREG to store in it.  */
4741
4742  if (mode == VOIDmode
4743      || (mode != BLKmode && ! direct_store[(int) mode]
4744	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4745	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4746      || GET_CODE (target) == REG
4747      || GET_CODE (target) == SUBREG
4748      /* If the field isn't aligned enough to store as an ordinary memref,
4749	 store it as a bit field.  */
4750      || (SLOW_UNALIGNED_ACCESS
4751	  && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4752      || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4753    {
4754      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4755
4756      /* If BITSIZE is narrower than the size of the type of EXP
4757	 we will be narrowing TEMP.  Normally, what's wanted are the
4758	 low-order bits.  However, if EXP's type is a record and this is
4759	 big-endian machine, we want the upper BITSIZE bits.  */
4760      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4761	  && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4762	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4763	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4764			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4765				       - bitsize),
4766			     temp, 1);
4767
4768      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4769	 MODE.  */
4770      if (mode != VOIDmode && mode != BLKmode
4771	  && mode != TYPE_MODE (TREE_TYPE (exp)))
4772	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4773
4774      /* If the modes of TARGET and TEMP are both BLKmode, both
4775	 must be in memory and BITPOS must be aligned on a byte
4776	 boundary.  If so, we simply do a block copy.  */
4777      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4778	{
4779	  if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4780	      || bitpos % BITS_PER_UNIT != 0)
4781	    abort ();
4782
4783	  target = change_address (target, VOIDmode,
4784				   plus_constant (XEXP (target, 0),
4785						bitpos / BITS_PER_UNIT));
4786
4787	  emit_block_move (target, temp,
4788			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4789				    / BITS_PER_UNIT),
4790			   1);
4791
4792	  return value_mode == VOIDmode ? const0_rtx : target;
4793	}
4794
4795      /* Store the value in the bitfield.  */
4796      store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4797      if (value_mode != VOIDmode)
4798	{
4799	  /* The caller wants an rtx for the value.  */
4800	  /* If possible, avoid refetching from the bitfield itself.  */
4801	  if (width_mask != 0
4802	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4803	    {
4804	      tree count;
4805	      enum machine_mode tmode;
4806
4807	      if (unsignedp)
4808		return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4809	      tmode = GET_MODE (temp);
4810	      if (tmode == VOIDmode)
4811		tmode = value_mode;
4812	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4813	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4814	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4815	    }
4816	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
4817				    NULL_RTX, value_mode, 0, align,
4818				    total_size);
4819	}
4820      return const0_rtx;
4821    }
4822  else
4823    {
4824      rtx addr = XEXP (target, 0);
4825      rtx to_rtx;
4826
4827      /* If a value is wanted, it must be the lhs;
4828	 so make the address stable for multiple use.  */
4829
4830      if (value_mode != VOIDmode && GET_CODE (addr) != REG
4831	  && ! CONSTANT_ADDRESS_P (addr)
4832	  /* A frame-pointer reference is already stable.  */
4833	  && ! (GET_CODE (addr) == PLUS
4834		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
4835		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
4836		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4837	addr = copy_to_reg (addr);
4838
4839      /* Now build a reference to just the desired component.  */
4840
4841      to_rtx = copy_rtx (change_address (target, mode,
4842					 plus_constant (addr,
4843							(bitpos
4844							 / BITS_PER_UNIT))));
4845      MEM_SET_IN_STRUCT_P (to_rtx, 1);
4846      MEM_ALIAS_SET (to_rtx) = alias_set;
4847
4848      return store_expr (exp, to_rtx, value_mode != VOIDmode);
4849    }
4850}
4851
4852/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4853   or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4854   ARRAY_REFs and find the ultimate containing object, which we return.
4855
4856   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4857   bit position, and *PUNSIGNEDP to the signedness of the field.
4858   If the position of the field is variable, we store a tree
4859   giving the variable offset (in units) in *POFFSET.
4860   This offset is in addition to the bit position.
4861   If the position is not variable, we store 0 in *POFFSET.
4862   We set *PALIGNMENT to the alignment in bytes of the address that will be
4863   computed.  This is the alignment of the thing we return if *POFFSET
4864   is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4865
4866   If any of the extraction expressions is volatile,
4867   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
4868
4869   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
4870   is a mode that can be used to access the field.  In that case, *PBITSIZE
4871   is redundant.
4872
4873   If the field describes a variable-sized object, *PMODE is set to
4874   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
4875   this case, but the address of the object can be found.   */
4876
4877tree
4878get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4879		     punsignedp, pvolatilep, palignment)
4880     tree exp;
4881     int *pbitsize;
4882     int *pbitpos;
4883     tree *poffset;
4884     enum machine_mode *pmode;
4885     int *punsignedp;
4886     int *pvolatilep;
4887     int *palignment;
4888{
4889  tree orig_exp = exp;
4890  tree size_tree = 0;
4891  enum machine_mode mode = VOIDmode;
4892  tree offset = integer_zero_node;
4893  unsigned int alignment = BIGGEST_ALIGNMENT;
4894
4895  if (TREE_CODE (exp) == COMPONENT_REF)
4896    {
4897      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4898      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4899	mode = DECL_MODE (TREE_OPERAND (exp, 1));
4900      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4901    }
4902  else if (TREE_CODE (exp) == BIT_FIELD_REF)
4903    {
4904      size_tree = TREE_OPERAND (exp, 1);
4905      *punsignedp = TREE_UNSIGNED (exp);
4906    }
4907  else
4908    {
4909      mode = TYPE_MODE (TREE_TYPE (exp));
4910      if (mode == BLKmode)
4911	size_tree = TYPE_SIZE (TREE_TYPE (exp));
4912
4913      *pbitsize = GET_MODE_BITSIZE (mode);
4914      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4915    }
4916
4917  if (size_tree)
4918    {
4919      if (TREE_CODE (size_tree) != INTEGER_CST)
4920	mode = BLKmode, *pbitsize = -1;
4921      else
4922	*pbitsize = TREE_INT_CST_LOW (size_tree);
4923    }
4924
4925  /* Compute cumulative bit-offset for nested component-refs and array-refs,
4926     and find the ultimate containing object.  */
4927
4928  *pbitpos = 0;
4929
4930  while (1)
4931    {
4932      if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4933	{
4934	  tree pos = (TREE_CODE (exp) == COMPONENT_REF
4935		      ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4936		      : TREE_OPERAND (exp, 2));
4937	  tree constant = integer_zero_node, var = pos;
4938
4939	  /* If this field hasn't been filled in yet, don't go
4940	     past it.  This should only happen when folding expressions
4941	     made during type construction.  */
4942	  if (pos == 0)
4943	    break;
4944
4945	  /* Assume here that the offset is a multiple of a unit.
4946	     If not, there should be an explicitly added constant.  */
4947	  if (TREE_CODE (pos) == PLUS_EXPR
4948	      && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4949	    constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4950	  else if (TREE_CODE (pos) == INTEGER_CST)
4951	    constant = pos, var = integer_zero_node;
4952
4953	  *pbitpos += TREE_INT_CST_LOW (constant);
4954	  offset = size_binop (PLUS_EXPR, offset,
4955			       size_binop (EXACT_DIV_EXPR, var,
4956					   size_int (BITS_PER_UNIT)));
4957	}
4958
4959      else if (TREE_CODE (exp) == ARRAY_REF)
4960	{
4961	  /* This code is based on the code in case ARRAY_REF in expand_expr
4962	     below.  We assume here that the size of an array element is
4963	     always an integral multiple of BITS_PER_UNIT.  */
4964
4965	  tree index = TREE_OPERAND (exp, 1);
4966	  tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4967	  tree low_bound
4968	    = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4969	  tree index_type = TREE_TYPE (index);
4970	  tree xindex;
4971
4972	  if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4973	    {
4974	      index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4975			       index);
4976	      index_type = TREE_TYPE (index);
4977	    }
4978
4979	  /* Optimize the special-case of a zero lower bound.
4980
4981	     We convert the low_bound to sizetype to avoid some problems
4982	     with constant folding.  (E.g. suppose the lower bound is 1,
4983	     and its mode is QI.  Without the conversion,  (ARRAY
4984	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4985	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)
4986
4987	     But sizetype isn't quite right either (especially if
4988	     the lowbound is negative).  FIXME */
4989
4990	  if (! integer_zerop (low_bound))
4991	    index = fold (build (MINUS_EXPR, index_type, index,
4992				 convert (sizetype, low_bound)));
4993
4994	  if (TREE_CODE (index) == INTEGER_CST)
4995	    {
4996	      index = convert (sbitsizetype, index);
4997	      index_type = TREE_TYPE (index);
4998	    }
4999
5000	  xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5001			        convert (sbitsizetype,
5002					 TYPE_SIZE (TREE_TYPE (exp)))));
5003
5004	  if (TREE_CODE (xindex) == INTEGER_CST
5005	      && TREE_INT_CST_HIGH (xindex) == 0)
5006	    *pbitpos += TREE_INT_CST_LOW (xindex);
5007	  else
5008	    {
5009	      /* Either the bit offset calculated above is not constant, or
5010		 it overflowed.  In either case, redo the multiplication
5011		 against the size in units.  This is especially important
5012		 in the non-constant case to avoid a division at runtime.  */
5013	      xindex = fold (build (MULT_EXPR, ssizetype, index,
5014                                    convert (ssizetype,
5015                                         TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5016
5017	      if (contains_placeholder_p (xindex))
5018		xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5019
5020	      offset = size_binop (PLUS_EXPR, offset, xindex);
5021	    }
5022	}
5023      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5024	       && ! ((TREE_CODE (exp) == NOP_EXPR
5025		      || TREE_CODE (exp) == CONVERT_EXPR)
5026		     && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
5027			   && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5028			       != UNION_TYPE))
5029		     && (TYPE_MODE (TREE_TYPE (exp))
5030			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5031	break;
5032
5033      /* If any reference in the chain is volatile, the effect is volatile.  */
5034      if (TREE_THIS_VOLATILE (exp))
5035	*pvolatilep = 1;
5036
5037      /* If the offset is non-constant already, then we can't assume any
5038	 alignment more than the alignment here.  */
5039      if (! integer_zerop (offset))
5040	alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5041
5042      exp = TREE_OPERAND (exp, 0);
5043    }
5044
5045  if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5046    alignment = MIN (alignment, DECL_ALIGN (exp));
5047  else if (TREE_TYPE (exp) != 0)
5048    alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5049
5050  if (integer_zerop (offset))
5051    offset = 0;
5052
5053  if (offset != 0 && contains_placeholder_p (offset))
5054    offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5055
5056  *pmode = mode;
5057  *poffset = offset;
5058  *palignment = alignment / BITS_PER_UNIT;
5059  return exp;
5060}
5061
5062/* Subroutine of expand_exp: compute memory_usage from modifier.  */
5063static enum memory_use_mode
5064get_memory_usage_from_modifier (modifier)
5065     enum expand_modifier modifier;
5066{
5067  switch (modifier)
5068    {
5069    case EXPAND_NORMAL:
5070    case EXPAND_SUM:
5071      return MEMORY_USE_RO;
5072      break;
5073    case EXPAND_MEMORY_USE_WO:
5074      return MEMORY_USE_WO;
5075      break;
5076    case EXPAND_MEMORY_USE_RW:
5077      return MEMORY_USE_RW;
5078      break;
5079    case EXPAND_MEMORY_USE_DONT:
5080      /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5081	 MEMORY_USE_DONT, because they are modifiers to a call of
5082	 expand_expr in the ADDR_EXPR case of expand_expr.  */
5083    case EXPAND_CONST_ADDRESS:
5084    case EXPAND_INITIALIZER:
5085      return MEMORY_USE_DONT;
5086    case EXPAND_MEMORY_USE_BAD:
5087    default:
5088      abort ();
5089    }
5090}
5091
5092/* Given an rtx VALUE that may contain additions and multiplications,
5093   return an equivalent value that just refers to a register or memory.
5094   This is done by generating instructions to perform the arithmetic
5095   and returning a pseudo-register containing the value.
5096
5097   The returned value may be a REG, SUBREG, MEM or constant.  */
5098
5099rtx
5100force_operand (value, target)
5101     rtx value, target;
5102{
5103  register optab binoptab = 0;
5104  /* Use a temporary to force order of execution of calls to
5105     `force_operand'.  */
5106  rtx tmp;
5107  register rtx op2;
5108  /* Use subtarget as the target for operand 0 of a binary operation.  */
5109  register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5110
5111  /* Check for a PIC address load.  */
5112  if (flag_pic
5113      && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5114      && XEXP (value, 0) == pic_offset_table_rtx
5115      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5116	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
5117	  || GET_CODE (XEXP (value, 1)) == CONST))
5118    {
5119      if (!subtarget)
5120	subtarget = gen_reg_rtx (GET_MODE (value));
5121      emit_move_insn (subtarget, value);
5122      return subtarget;
5123    }
5124
5125  if (GET_CODE (value) == PLUS)
5126    binoptab = add_optab;
5127  else if (GET_CODE (value) == MINUS)
5128    binoptab = sub_optab;
5129  else if (GET_CODE (value) == MULT)
5130    {
5131      op2 = XEXP (value, 1);
5132      if (!CONSTANT_P (op2)
5133	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5134	subtarget = 0;
5135      tmp = force_operand (XEXP (value, 0), subtarget);
5136      return expand_mult (GET_MODE (value), tmp,
5137			  force_operand (op2, NULL_RTX),
5138			  target, 0);
5139    }
5140
5141  if (binoptab)
5142    {
5143      op2 = XEXP (value, 1);
5144      if (!CONSTANT_P (op2)
5145	  && !(GET_CODE (op2) == REG && op2 != subtarget))
5146	subtarget = 0;
5147      if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5148	{
5149	  binoptab = add_optab;
5150	  op2 = negate_rtx (GET_MODE (value), op2);
5151	}
5152
5153      /* Check for an addition with OP2 a constant integer and our first
5154	 operand a PLUS of a virtual register and something else.  In that
5155	 case, we want to emit the sum of the virtual register and the
5156	 constant first and then add the other value.  This allows virtual
5157	 register instantiation to simply modify the constant rather than
5158	 creating another one around this addition.  */
5159      if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5160	  && GET_CODE (XEXP (value, 0)) == PLUS
5161	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5162	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5163	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5164	{
5165	  rtx temp = expand_binop (GET_MODE (value), binoptab,
5166				   XEXP (XEXP (value, 0), 0), op2,
5167				   subtarget, 0, OPTAB_LIB_WIDEN);
5168	  return expand_binop (GET_MODE (value), binoptab, temp,
5169			       force_operand (XEXP (XEXP (value, 0), 1), 0),
5170			       target, 0, OPTAB_LIB_WIDEN);
5171	}
5172
5173      tmp = force_operand (XEXP (value, 0), subtarget);
5174      return expand_binop (GET_MODE (value), binoptab, tmp,
5175			   force_operand (op2, NULL_RTX),
5176			   target, 0, OPTAB_LIB_WIDEN);
5177      /* We give UNSIGNEDP = 0 to expand_binop
5178	 because the only operations we are expanding here are signed ones.  */
5179    }
5180  return value;
5181}
5182
5183/* Subroutine of expand_expr:
5184   save the non-copied parts (LIST) of an expr (LHS), and return a list
5185   which can restore these values to their previous values,
5186   should something modify their storage.  */
5187
5188static tree
5189save_noncopied_parts (lhs, list)
5190     tree lhs;
5191     tree list;
5192{
5193  tree tail;
5194  tree parts = 0;
5195
5196  for (tail = list; tail; tail = TREE_CHAIN (tail))
5197    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5198      parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5199    else
5200      {
5201	tree part = TREE_VALUE (tail);
5202	tree part_type = TREE_TYPE (part);
5203	tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5204	rtx target = assign_temp (part_type, 0, 1, 1);
5205	if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5206	  target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5207	parts = tree_cons (to_be_saved,
5208			   build (RTL_EXPR, part_type, NULL_TREE,
5209				  (tree) target),
5210			   parts);
5211	store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5212      }
5213  return parts;
5214}
5215
5216/* Subroutine of expand_expr:
5217   record the non-copied parts (LIST) of an expr (LHS), and return a list
5218   which specifies the initial values of these parts.  */
5219
5220static tree
5221init_noncopied_parts (lhs, list)
5222     tree lhs;
5223     tree list;
5224{
5225  tree tail;
5226  tree parts = 0;
5227
5228  for (tail = list; tail; tail = TREE_CHAIN (tail))
5229    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5230      parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5231    else if (TREE_PURPOSE (tail))
5232      {
5233	tree part = TREE_VALUE (tail);
5234	tree part_type = TREE_TYPE (part);
5235	tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5236	parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5237      }
5238  return parts;
5239}
5240
5241/* Subroutine of expand_expr: return nonzero iff there is no way that
5242   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5243   call is going to be used to determine whether we need a temporary
5244   for EXP, as opposed to a recursive call to this function.
5245
5246   It is always safe for this routine to return zero since it merely
5247   searches for optimization opportunities.  */
5248
5249static int
5250safe_from_p (x, exp, top_p)
5251     rtx x;
5252     tree exp;
5253     int top_p;
5254{
5255  rtx exp_rtl = 0;
5256  int i, nops;
5257  static int save_expr_count;
5258  static int save_expr_size = 0;
5259  static tree *save_expr_rewritten;
5260  static tree save_expr_trees[256];
5261
5262  if (x == 0
5263      /* If EXP has varying size, we MUST use a target since we currently
5264	 have no way of allocating temporaries of variable size
5265	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5266	 So we assume here that something at a higher level has prevented a
5267	 clash.  This is somewhat bogus, but the best we can do.  Only
5268	 do this when X is BLKmode and when we are at the top level.  */
5269      || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5270	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5271	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5272	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5273	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5274	      != INTEGER_CST)
5275	  && GET_MODE (x) == BLKmode))
5276    return 1;
5277
5278  if (top_p && save_expr_size == 0)
5279    {
5280      int rtn;
5281
5282      save_expr_count = 0;
5283      save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5284      save_expr_rewritten = &save_expr_trees[0];
5285
5286      rtn = safe_from_p (x, exp, 1);
5287
5288      for (i = 0; i < save_expr_count; ++i)
5289	{
5290	  if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5291	    abort ();
5292	  TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5293	}
5294
5295      save_expr_size = 0;
5296
5297      return rtn;
5298    }
5299
5300  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5301     find the underlying pseudo.  */
5302  if (GET_CODE (x) == SUBREG)
5303    {
5304      x = SUBREG_REG (x);
5305      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5306	return 0;
5307    }
5308
5309  /* If X is a location in the outgoing argument area, it is always safe.  */
5310  if (GET_CODE (x) == MEM
5311      && (XEXP (x, 0) == virtual_outgoing_args_rtx
5312	  || (GET_CODE (XEXP (x, 0)) == PLUS
5313	      && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5314    return 1;
5315
5316  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5317    {
5318    case 'd':
5319      exp_rtl = DECL_RTL (exp);
5320      break;
5321
5322    case 'c':
5323      return 1;
5324
5325    case 'x':
5326      if (TREE_CODE (exp) == TREE_LIST)
5327	return ((TREE_VALUE (exp) == 0
5328		 || safe_from_p (x, TREE_VALUE (exp), 0))
5329		&& (TREE_CHAIN (exp) == 0
5330		    || safe_from_p (x, TREE_CHAIN (exp), 0)));
5331      else if (TREE_CODE (exp) == ERROR_MARK)
5332	return 1;	/* An already-visited SAVE_EXPR? */
5333      else
5334	return 0;
5335
5336    case '1':
5337      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5338
5339    case '2':
5340    case '<':
5341      return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5342	      && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5343
5344    case 'e':
5345    case 'r':
5346      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
5347	 the expression.  If it is set, we conflict iff we are that rtx or
5348	 both are in memory.  Otherwise, we check all operands of the
5349	 expression recursively.  */
5350
5351      switch (TREE_CODE (exp))
5352	{
5353	case ADDR_EXPR:
5354	  return (staticp (TREE_OPERAND (exp, 0))
5355		  || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5356		  || TREE_STATIC (exp));
5357
5358	case INDIRECT_REF:
5359	  if (GET_CODE (x) == MEM)
5360	    return 0;
5361	  break;
5362
5363	case CALL_EXPR:
5364	  exp_rtl = CALL_EXPR_RTL (exp);
5365	  if (exp_rtl == 0)
5366	    {
5367	      /* Assume that the call will clobber all hard registers and
5368		 all of memory.  */
5369	      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5370		  || GET_CODE (x) == MEM)
5371		return 0;
5372	    }
5373
5374	  break;
5375
5376	case RTL_EXPR:
5377	  /* If a sequence exists, we would have to scan every instruction
5378	     in the sequence to see if it was safe.  This is probably not
5379	     worthwhile.  */
5380	  if (RTL_EXPR_SEQUENCE (exp))
5381	    return 0;
5382
5383	  exp_rtl = RTL_EXPR_RTL (exp);
5384	  break;
5385
5386	case WITH_CLEANUP_EXPR:
5387	  exp_rtl = RTL_EXPR_RTL (exp);
5388	  break;
5389
5390	case CLEANUP_POINT_EXPR:
5391	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5392
5393	case SAVE_EXPR:
5394	  exp_rtl = SAVE_EXPR_RTL (exp);
5395	  if (exp_rtl)
5396	    break;
5397
5398	  /* This SAVE_EXPR might appear many times in the top-level
5399	     safe_from_p() expression, and if it has a complex
5400	     subexpression, examining it multiple times could result
5401	     in a combinatorial explosion.  E.g. on an Alpha
5402	     running at least 200MHz, a Fortran test case compiled with
5403	     optimization took about 28 minutes to compile -- even though
5404	     it was only a few lines long, and the complicated line causing
5405	     so much time to be spent in the earlier version of safe_from_p()
5406	     had only 293 or so unique nodes.
5407
5408	     So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5409	     where it is so we can turn it back in the top-level safe_from_p()
5410	     when we're done.  */
5411
5412	  /* For now, don't bother re-sizing the array. */
5413	  if (save_expr_count >= save_expr_size)
5414	    return 0;
5415	  save_expr_rewritten[save_expr_count++] = exp;
5416
5417	  nops = tree_code_length[(int) SAVE_EXPR];
5418	  for (i = 0; i < nops; i++)
5419	    {
5420	      tree operand = TREE_OPERAND (exp, i);
5421	      if (operand == NULL_TREE)
5422		continue;
5423	      TREE_SET_CODE (exp, ERROR_MARK);
5424	      if (!safe_from_p (x, operand, 0))
5425		return 0;
5426	      TREE_SET_CODE (exp, SAVE_EXPR);
5427	    }
5428	  TREE_SET_CODE (exp, ERROR_MARK);
5429	  return 1;
5430
5431	case BIND_EXPR:
5432	  /* The only operand we look at is operand 1.  The rest aren't
5433	     part of the expression.  */
5434	  return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5435
5436	case METHOD_CALL_EXPR:
5437	  /* This takes a rtx argument, but shouldn't appear here.  */
5438	  abort ();
5439
5440	default:
5441	  break;
5442	}
5443
5444      /* If we have an rtx, we do not need to scan our operands.  */
5445      if (exp_rtl)
5446	break;
5447
5448      nops = tree_code_length[(int) TREE_CODE (exp)];
5449      for (i = 0; i < nops; i++)
5450	if (TREE_OPERAND (exp, i) != 0
5451	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5452	  return 0;
5453    }
5454
5455  /* If we have an rtl, find any enclosed object.  Then see if we conflict
5456     with it.  */
5457  if (exp_rtl)
5458    {
5459      if (GET_CODE (exp_rtl) == SUBREG)
5460	{
5461	  exp_rtl = SUBREG_REG (exp_rtl);
5462	  if (GET_CODE (exp_rtl) == REG
5463	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5464	    return 0;
5465	}
5466
5467      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
5468	 are memory and EXP is not readonly.  */
5469      return ! (rtx_equal_p (x, exp_rtl)
5470		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5471		    && ! TREE_READONLY (exp)));
5472    }
5473
5474  /* If we reach here, it is safe.  */
5475  return 1;
5476}
5477
5478/* Subroutine of expand_expr: return nonzero iff EXP is an
5479   expression whose type is statically determinable.  */
5480
5481static int
5482fixed_type_p (exp)
5483     tree exp;
5484{
5485  if (TREE_CODE (exp) == PARM_DECL
5486      || TREE_CODE (exp) == VAR_DECL
5487      || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5488      || TREE_CODE (exp) == COMPONENT_REF
5489      || TREE_CODE (exp) == ARRAY_REF)
5490    return 1;
5491  return 0;
5492}
5493
5494/* Subroutine of expand_expr: return rtx if EXP is a
5495   variable or parameter; else return 0.  */
5496
5497static rtx
5498var_rtx (exp)
5499     tree exp;
5500{
5501  STRIP_NOPS (exp);
5502  switch (TREE_CODE (exp))
5503    {
5504    case PARM_DECL:
5505    case VAR_DECL:
5506      return DECL_RTL (exp);
5507    default:
5508      return 0;
5509    }
5510}
5511
5512#ifdef MAX_INTEGER_COMPUTATION_MODE
5513void
5514check_max_integer_computation_mode (exp)
5515    tree exp;
5516{
5517  enum tree_code code = TREE_CODE (exp);
5518  enum machine_mode mode;
5519
5520  /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE.  */
5521  if (code == NOP_EXPR
5522      && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5523    return;
5524
5525  /* First check the type of the overall operation.   We need only look at
5526     unary, binary and relational operations.  */
5527  if (TREE_CODE_CLASS (code) == '1'
5528      || TREE_CODE_CLASS (code) == '2'
5529      || TREE_CODE_CLASS (code) == '<')
5530    {
5531      mode = TYPE_MODE (TREE_TYPE (exp));
5532      if (GET_MODE_CLASS (mode) == MODE_INT
5533	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5534	fatal ("unsupported wide integer operation");
5535    }
5536
5537  /* Check operand of a unary op.  */
5538  if (TREE_CODE_CLASS (code) == '1')
5539    {
5540      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5541      if (GET_MODE_CLASS (mode) == MODE_INT
5542	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5543	fatal ("unsupported wide integer operation");
5544    }
5545
5546  /* Check operands of a binary/comparison op.  */
5547  if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5548    {
5549      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5550      if (GET_MODE_CLASS (mode) == MODE_INT
5551	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5552	fatal ("unsupported wide integer operation");
5553
5554      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5555      if (GET_MODE_CLASS (mode) == MODE_INT
5556	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5557	fatal ("unsupported wide integer operation");
5558    }
5559}
5560#endif
5561
5562
5563/* expand_expr: generate code for computing expression EXP.
5564   An rtx for the computed value is returned.  The value is never null.
5565   In the case of a void EXP, const0_rtx is returned.
5566
5567   The value may be stored in TARGET if TARGET is nonzero.
5568   TARGET is just a suggestion; callers must assume that
5569   the rtx returned may not be the same as TARGET.
5570
5571   If TARGET is CONST0_RTX, it means that the value will be ignored.
5572
5573   If TMODE is not VOIDmode, it suggests generating the
5574   result in mode TMODE.  But this is done only when convenient.
5575   Otherwise, TMODE is ignored and the value generated in its natural mode.
5576   TMODE is just a suggestion; callers must assume that
5577   the rtx returned may not have mode TMODE.
5578
5579   Note that TARGET may have neither TMODE nor MODE.  In that case, it
5580   probably will not be used.
5581
5582   If MODIFIER is EXPAND_SUM then when EXP is an addition
5583   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5584   or a nest of (PLUS ...) and (MINUS ...) where the terms are
5585   products as above, or REG or MEM, or constant.
5586   Ordinarily in such cases we would output mul or add instructions
5587   and then return a pseudo reg containing the sum.
5588
5589   EXPAND_INITIALIZER is much like EXPAND_SUM except that
5590   it also marks a label as absolutely required (it can't be dead).
5591   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5592   This is used for outputting expressions used in initializers.
5593
5594   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5595   with a constant address even if that address is not normally legitimate.
5596   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
5597
5598rtx
5599expand_expr (exp, target, tmode, modifier)
5600     register tree exp;
5601     rtx target;
5602     enum machine_mode tmode;
5603     enum expand_modifier modifier;
5604{
5605  /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5606     This is static so it will be accessible to our recursive callees.  */
5607  static tree placeholder_list = 0;
5608  register rtx op0, op1, temp;
5609  tree type = TREE_TYPE (exp);
5610  int unsignedp = TREE_UNSIGNED (type);
5611  register enum machine_mode mode;
5612  register enum tree_code code = TREE_CODE (exp);
5613  optab this_optab;
5614  rtx subtarget, original_target;
5615  int ignore;
5616  tree context;
5617  /* Used by check-memory-usage to make modifier read only.  */
5618  enum expand_modifier ro_modifier;
5619
5620  /* Handle ERROR_MARK before anybody tries to access its type. */
5621  if (TREE_CODE (exp) == ERROR_MARK)
5622    {
5623      op0 = CONST0_RTX (tmode);
5624      if (op0 != 0)
5625	return op0;
5626      return const0_rtx;
5627    }
5628
5629  mode = TYPE_MODE (type);
5630  /* Use subtarget as the target for operand 0 of a binary operation.  */
5631  subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5632  original_target = target;
5633  ignore = (target == const0_rtx
5634	    || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5635		 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5636		 || code == COND_EXPR)
5637		&& TREE_CODE (type) == VOID_TYPE));
5638
5639  /* Make a read-only version of the modifier.  */
5640  if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5641      || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5642    ro_modifier = modifier;
5643  else
5644    ro_modifier = EXPAND_NORMAL;
5645
5646  /* Don't use hard regs as subtargets, because the combiner
5647     can only handle pseudo regs.  */
5648  if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5649    subtarget = 0;
5650  /* Avoid subtargets inside loops,
5651     since they hide some invariant expressions.  */
5652  if (preserve_subexpressions_p ())
5653    subtarget = 0;
5654
5655  /* If we are going to ignore this result, we need only do something
5656     if there is a side-effect somewhere in the expression.  If there
5657     is, short-circuit the most common cases here.  Note that we must
5658     not call expand_expr with anything but const0_rtx in case this
5659     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
5660
5661  if (ignore)
5662    {
5663      if (! TREE_SIDE_EFFECTS (exp))
5664	return const0_rtx;
5665
5666      /* Ensure we reference a volatile object even if value is ignored.  */
5667      if (TREE_THIS_VOLATILE (exp)
5668	  && TREE_CODE (exp) != FUNCTION_DECL
5669	  && mode != VOIDmode && mode != BLKmode)
5670	{
5671	  temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5672	  if (GET_CODE (temp) == MEM)
5673	    temp = copy_to_reg (temp);
5674	  return const0_rtx;
5675	}
5676
5677      if (TREE_CODE_CLASS (code) == '1')
5678	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5679			    VOIDmode, ro_modifier);
5680      else if (TREE_CODE_CLASS (code) == '2'
5681	       || TREE_CODE_CLASS (code) == '<')
5682	{
5683	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5684	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5685	  return const0_rtx;
5686	}
5687      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5688	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5689	/* If the second operand has no side effects, just evaluate
5690	   the first.  */
5691	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5692			    VOIDmode, ro_modifier);
5693
5694      target = 0;
5695    }
5696
5697#ifdef MAX_INTEGER_COMPUTATION_MODE
5698  if (target
5699      && TREE_CODE (exp) != INTEGER_CST
5700      && TREE_CODE (exp) != PARM_DECL
5701      && TREE_CODE (exp) != ARRAY_REF
5702      && TREE_CODE (exp) != COMPONENT_REF
5703      && TREE_CODE (exp) != BIT_FIELD_REF
5704      && TREE_CODE (exp) != INDIRECT_REF
5705      && TREE_CODE (exp) != CALL_EXPR
5706      && TREE_CODE (exp) != VAR_DECL)
5707    {
5708      enum machine_mode mode = GET_MODE (target);
5709
5710      if (GET_MODE_CLASS (mode) == MODE_INT
5711	  && mode > MAX_INTEGER_COMPUTATION_MODE)
5712	fatal ("unsupported wide integer operation");
5713    }
5714
5715  if (TREE_CODE (exp) != INTEGER_CST
5716      && TREE_CODE (exp) != PARM_DECL
5717      && TREE_CODE (exp) != ARRAY_REF
5718      && TREE_CODE (exp) != COMPONENT_REF
5719      && TREE_CODE (exp) != BIT_FIELD_REF
5720      && TREE_CODE (exp) != INDIRECT_REF
5721      && TREE_CODE (exp) != VAR_DECL
5722      && TREE_CODE (exp) != CALL_EXPR
5723      && GET_MODE_CLASS (tmode) == MODE_INT
5724      && tmode > MAX_INTEGER_COMPUTATION_MODE)
5725    fatal ("unsupported wide integer operation");
5726
5727  check_max_integer_computation_mode (exp);
5728#endif
5729
5730  /* If will do cse, generate all results into pseudo registers
5731     since 1) that allows cse to find more things
5732     and 2) otherwise cse could produce an insn the machine
5733     cannot support.  */
5734
5735  if (! cse_not_expected && mode != BLKmode && target
5736      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5737    target = subtarget;
5738
5739  switch (code)
5740    {
5741    case LABEL_DECL:
5742      {
5743	tree function = decl_function_context (exp);
5744	/* Handle using a label in a containing function.  */
5745	if (function != current_function_decl
5746	    && function != inline_function_decl && function != 0)
5747	  {
5748	    struct function *p = find_function_data (function);
5749	    /* Allocate in the memory associated with the function
5750	       that the label is in.  */
5751	    push_obstacks (p->function_obstack,
5752			   p->function_maybepermanent_obstack);
5753
5754	    p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5755						  label_rtx (exp),
5756						  p->forced_labels);
5757	    pop_obstacks ();
5758	  }
5759	else
5760	  {
5761	    if (modifier == EXPAND_INITIALIZER)
5762	      forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5763						 label_rtx (exp),
5764						 forced_labels);
5765	  }
5766	temp = gen_rtx_MEM (FUNCTION_MODE,
5767			    gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5768	if (function != current_function_decl
5769	    && function != inline_function_decl && function != 0)
5770	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5771	return temp;
5772      }
5773
5774    case PARM_DECL:
5775      if (DECL_RTL (exp) == 0)
5776	{
5777	  error_with_decl (exp, "prior parameter's size depends on `%s'");
5778	  return CONST0_RTX (mode);
5779	}
5780
5781      /* ... fall through ...  */
5782
5783    case VAR_DECL:
5784      /* If a static var's type was incomplete when the decl was written,
5785	 but the type is complete now, lay out the decl now.  */
5786      if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5787	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5788	{
5789	  push_obstacks_nochange ();
5790	  end_temporary_allocation ();
5791	  layout_decl (exp, 0);
5792	  PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5793	  pop_obstacks ();
5794	}
5795
5796      /* Although static-storage variables start off initialized, according to
5797	 ANSI C, a memcpy could overwrite them with uninitialized values.  So
5798	 we check them too.  This also lets us check for read-only variables
5799	 accessed via a non-const declaration, in case it won't be detected
5800	 any other way (e.g., in an embedded system or OS kernel without
5801	 memory protection).
5802
5803	 Aggregates are not checked here; they're handled elsewhere.  */
5804      if (current_function_check_memory_usage && code == VAR_DECL
5805	  && GET_CODE (DECL_RTL (exp)) == MEM
5806	  && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5807	{
5808	  enum memory_use_mode memory_usage;
5809	  memory_usage = get_memory_usage_from_modifier (modifier);
5810
5811	  if (memory_usage != MEMORY_USE_DONT)
5812	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5813			       XEXP (DECL_RTL (exp), 0), Pmode,
5814			       GEN_INT (int_size_in_bytes (type)),
5815			       TYPE_MODE (sizetype),
5816			       GEN_INT (memory_usage),
5817			       TYPE_MODE (integer_type_node));
5818	}
5819
5820      /* ... fall through ...  */
5821
5822    case FUNCTION_DECL:
5823    case RESULT_DECL:
5824      if (DECL_RTL (exp) == 0)
5825	abort ();
5826
5827      /* Ensure variable marked as used even if it doesn't go through
5828	 a parser.  If it hasn't be used yet, write out an external
5829	 definition.  */
5830      if (! TREE_USED (exp))
5831	{
5832	  assemble_external (exp);
5833	  TREE_USED (exp) = 1;
5834	}
5835
5836      /* Show we haven't gotten RTL for this yet.  */
5837      temp = 0;
5838
5839      /* Handle variables inherited from containing functions.  */
5840      context = decl_function_context (exp);
5841
5842      /* We treat inline_function_decl as an alias for the current function
5843	 because that is the inline function whose vars, types, etc.
5844	 are being merged into the current function.
5845	 See expand_inline_function.  */
5846
5847      if (context != 0 && context != current_function_decl
5848	  && context != inline_function_decl
5849	  /* If var is static, we don't need a static chain to access it.  */
5850	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
5851		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5852	{
5853	  rtx addr;
5854
5855	  /* Mark as non-local and addressable.  */
5856	  DECL_NONLOCAL (exp) = 1;
5857	  if (DECL_NO_STATIC_CHAIN (current_function_decl))
5858	    abort ();
5859	  mark_addressable (exp);
5860	  if (GET_CODE (DECL_RTL (exp)) != MEM)
5861	    abort ();
5862	  addr = XEXP (DECL_RTL (exp), 0);
5863	  if (GET_CODE (addr) == MEM)
5864	    addr = gen_rtx_MEM (Pmode,
5865				fix_lexical_addr (XEXP (addr, 0), exp));
5866	  else
5867	    addr = fix_lexical_addr (addr, exp);
5868	  temp = change_address (DECL_RTL (exp), mode, addr);
5869	}
5870
5871      /* This is the case of an array whose size is to be determined
5872	 from its initializer, while the initializer is still being parsed.
5873	 See expand_decl.  */
5874
5875      else if (GET_CODE (DECL_RTL (exp)) == MEM
5876	       && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5877	temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5878			       XEXP (DECL_RTL (exp), 0));
5879
5880      /* If DECL_RTL is memory, we are in the normal case and either
5881	 the address is not valid or it is not a register and -fforce-addr
5882	 is specified, get the address into a register.  */
5883
5884      else if (GET_CODE (DECL_RTL (exp)) == MEM
5885	       && modifier != EXPAND_CONST_ADDRESS
5886	       && modifier != EXPAND_SUM
5887	       && modifier != EXPAND_INITIALIZER
5888	       && (! memory_address_p (DECL_MODE (exp),
5889				       XEXP (DECL_RTL (exp), 0))
5890		   || (flag_force_addr
5891		       && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5892	temp = change_address (DECL_RTL (exp), VOIDmode,
5893			       copy_rtx (XEXP (DECL_RTL (exp), 0)));
5894
5895      /* If we got something, return it.  But first, set the alignment
5896	 the address is a register.  */
5897      if (temp != 0)
5898	{
5899	  if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5900	    mark_reg_pointer (XEXP (temp, 0),
5901			      DECL_ALIGN (exp) / BITS_PER_UNIT);
5902
5903	  return temp;
5904	}
5905
5906      /* If the mode of DECL_RTL does not match that of the decl, it
5907	 must be a promoted value.  We return a SUBREG of the wanted mode,
5908	 but mark it so that we know that it was already extended.  */
5909
5910      if (GET_CODE (DECL_RTL (exp)) == REG
5911	  && GET_MODE (DECL_RTL (exp)) != mode)
5912	{
5913	  /* Get the signedness used for this variable.  Ensure we get the
5914	     same mode we got when the variable was declared.  */
5915	  if (GET_MODE (DECL_RTL (exp))
5916	      != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5917	    abort ();
5918
5919	  temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5920	  SUBREG_PROMOTED_VAR_P (temp) = 1;
5921	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5922	  return temp;
5923	}
5924
5925      return DECL_RTL (exp);
5926
5927    case INTEGER_CST:
5928      return immed_double_const (TREE_INT_CST_LOW (exp),
5929				 TREE_INT_CST_HIGH (exp),
5930				 mode);
5931
5932    case CONST_DECL:
5933      return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5934      			  EXPAND_MEMORY_USE_BAD);
5935
5936    case REAL_CST:
5937      /* If optimized, generate immediate CONST_DOUBLE
5938	 which will be turned into memory by reload if necessary.
5939
5940	 We used to force a register so that loop.c could see it.  But
5941	 this does not allow gen_* patterns to perform optimizations with
5942	 the constants.  It also produces two insns in cases like "x = 1.0;".
5943	 On most machines, floating-point constants are not permitted in
5944	 many insns, so we'd end up copying it to a register in any case.
5945
5946	 Now, we do the copying in expand_binop, if appropriate.  */
5947      return immed_real_const (exp);
5948
5949    case COMPLEX_CST:
5950    case STRING_CST:
5951      if (! TREE_CST_RTL (exp))
5952	output_constant_def (exp);
5953
5954      /* TREE_CST_RTL probably contains a constant address.
5955	 On RISC machines where a constant address isn't valid,
5956	 make some insns to get that address into a register.  */
5957      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5958	  && modifier != EXPAND_CONST_ADDRESS
5959	  && modifier != EXPAND_INITIALIZER
5960	  && modifier != EXPAND_SUM
5961	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5962	      || (flag_force_addr
5963		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5964	return change_address (TREE_CST_RTL (exp), VOIDmode,
5965			       copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5966      return TREE_CST_RTL (exp);
5967
5968    case EXPR_WITH_FILE_LOCATION:
5969      {
5970	rtx to_return;
5971	char *saved_input_filename = input_filename;
5972	int saved_lineno = lineno;
5973	input_filename = EXPR_WFL_FILENAME (exp);
5974	lineno = EXPR_WFL_LINENO (exp);
5975	if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5976	  emit_line_note (input_filename, lineno);
5977	/* Possibly avoid switching back and force here */
5978	to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5979	input_filename = saved_input_filename;
5980	lineno = saved_lineno;
5981	return to_return;
5982      }
5983
5984    case SAVE_EXPR:
5985      context = decl_function_context (exp);
5986
5987      /* If this SAVE_EXPR was at global context, assume we are an
5988	 initialization function and move it into our context.  */
5989      if (context == 0)
5990	SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5991
5992      /* We treat inline_function_decl as an alias for the current function
5993	 because that is the inline function whose vars, types, etc.
5994	 are being merged into the current function.
5995	 See expand_inline_function.  */
5996      if (context == current_function_decl || context == inline_function_decl)
5997	context = 0;
5998
5999      /* If this is non-local, handle it.  */
6000      if (context)
6001	{
6002	  /* The following call just exists to abort if the context is
6003	     not of a containing function.  */
6004	  find_function_data (context);
6005
6006	  temp = SAVE_EXPR_RTL (exp);
6007	  if (temp && GET_CODE (temp) == REG)
6008	    {
6009	      put_var_into_stack (exp);
6010	      temp = SAVE_EXPR_RTL (exp);
6011	    }
6012	  if (temp == 0 || GET_CODE (temp) != MEM)
6013	    abort ();
6014	  return change_address (temp, mode,
6015				 fix_lexical_addr (XEXP (temp, 0), exp));
6016	}
6017      if (SAVE_EXPR_RTL (exp) == 0)
6018	{
6019	  if (mode == VOIDmode)
6020	    temp = const0_rtx;
6021	  else
6022	    temp = assign_temp (type, 3, 0, 0);
6023
6024	  SAVE_EXPR_RTL (exp) = temp;
6025	  if (!optimize && GET_CODE (temp) == REG)
6026	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6027						save_expr_regs);
6028
6029	  /* If the mode of TEMP does not match that of the expression, it
6030	     must be a promoted value.  We pass store_expr a SUBREG of the
6031	     wanted mode but mark it so that we know that it was already
6032	     extended.  Note that `unsignedp' was modified above in
6033	     this case.  */
6034
6035	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6036	    {
6037	      temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6038	      SUBREG_PROMOTED_VAR_P (temp) = 1;
6039	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6040	    }
6041
6042	  if (temp == const0_rtx)
6043	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6044			 EXPAND_MEMORY_USE_BAD);
6045	  else
6046	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
6047
6048	  TREE_USED (exp) = 1;
6049	}
6050
6051      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6052	 must be a promoted value.  We return a SUBREG of the wanted mode,
6053	 but mark it so that we know that it was already extended.  */
6054
6055      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6056	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6057	{
6058	  /* Compute the signedness and make the proper SUBREG.  */
6059	  promote_mode (type, mode, &unsignedp, 0);
6060	  temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6061	  SUBREG_PROMOTED_VAR_P (temp) = 1;
6062	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6063	  return temp;
6064	}
6065
6066      return SAVE_EXPR_RTL (exp);
6067
6068    case UNSAVE_EXPR:
6069      {
6070	rtx temp;
6071	temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6072	TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6073	return temp;
6074      }
6075
6076    case PLACEHOLDER_EXPR:
6077      {
6078	tree placeholder_expr;
6079
6080	/* If there is an object on the head of the placeholder list,
6081	   see if some object in it of type TYPE or a pointer to it.  For
6082	   further information, see tree.def.  */
6083	for (placeholder_expr = placeholder_list;
6084	     placeholder_expr != 0;
6085	     placeholder_expr = TREE_CHAIN (placeholder_expr))
6086	  {
6087	    tree need_type = TYPE_MAIN_VARIANT (type);
6088	    tree object = 0;
6089	    tree old_list = placeholder_list;
6090	    tree elt;
6091
6092	    /* Find the outermost reference that is of the type we want.
6093	       If none, see if any object has a type that is a pointer to
6094	       the type we want.  */
6095	    for (elt = TREE_PURPOSE (placeholder_expr);
6096		 elt != 0 && object == 0;
6097		 elt
6098		 = ((TREE_CODE (elt) == COMPOUND_EXPR
6099		     || TREE_CODE (elt) == COND_EXPR)
6100		    ? TREE_OPERAND (elt, 1)
6101		    : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6102		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6103		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6104		       || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6105		    ? TREE_OPERAND (elt, 0) : 0))
6106	      if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6107		object = elt;
6108
6109	    for (elt = TREE_PURPOSE (placeholder_expr);
6110		 elt != 0 && object == 0;
6111		 elt
6112		 = ((TREE_CODE (elt) == COMPOUND_EXPR
6113		     || TREE_CODE (elt) == COND_EXPR)
6114		    ? TREE_OPERAND (elt, 1)
6115		    : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6116		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6117		       || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6118		       || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6119		    ? TREE_OPERAND (elt, 0) : 0))
6120	      if (POINTER_TYPE_P (TREE_TYPE (elt))
6121		  && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6122		      == need_type))
6123		object = build1 (INDIRECT_REF, need_type, elt);
6124
6125	    if (object != 0)
6126	      {
6127		/* Expand this object skipping the list entries before
6128		   it was found in case it is also a PLACEHOLDER_EXPR.
6129		   In that case, we want to translate it using subsequent
6130		   entries.  */
6131		placeholder_list = TREE_CHAIN (placeholder_expr);
6132		temp = expand_expr (object, original_target, tmode,
6133				    ro_modifier);
6134		placeholder_list = old_list;
6135		return temp;
6136	      }
6137	  }
6138      }
6139
6140      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
6141      abort ();
6142
6143    case WITH_RECORD_EXPR:
6144      /* Put the object on the placeholder list, expand our first operand,
6145	 and pop the list.  */
6146      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6147				    placeholder_list);
6148      target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6149			    tmode, ro_modifier);
6150      placeholder_list = TREE_CHAIN (placeholder_list);
6151      return target;
6152
6153    case GOTO_EXPR:
6154      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6155	expand_goto (TREE_OPERAND (exp, 0));
6156      else
6157	expand_computed_goto (TREE_OPERAND (exp, 0));
6158      return const0_rtx;
6159
6160    case EXIT_EXPR:
6161      expand_exit_loop_if_false (NULL_PTR,
6162				 invert_truthvalue (TREE_OPERAND (exp, 0)));
6163      return const0_rtx;
6164
6165    case LABELED_BLOCK_EXPR:
6166      if (LABELED_BLOCK_BODY (exp))
6167	expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6168      emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6169      return const0_rtx;
6170
6171    case EXIT_BLOCK_EXPR:
6172      if (EXIT_BLOCK_RETURN (exp))
6173	sorry ("returned value in block_exit_expr");
6174      expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6175      return const0_rtx;
6176
6177    case LOOP_EXPR:
6178      push_temp_slots ();
6179      expand_start_loop (1);
6180      expand_expr_stmt (TREE_OPERAND (exp, 0));
6181      expand_end_loop ();
6182      pop_temp_slots ();
6183
6184      return const0_rtx;
6185
6186    case BIND_EXPR:
6187      {
6188	tree vars = TREE_OPERAND (exp, 0);
6189	int vars_need_expansion = 0;
6190
6191	/* Need to open a binding contour here because
6192	   if there are any cleanups they must be contained here.  */
6193	expand_start_bindings (0);
6194
6195	/* Mark the corresponding BLOCK for output in its proper place.  */
6196	if (TREE_OPERAND (exp, 2) != 0
6197	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
6198	  insert_block (TREE_OPERAND (exp, 2));
6199
6200	/* If VARS have not yet been expanded, expand them now.  */
6201	while (vars)
6202	  {
6203	    if (DECL_RTL (vars) == 0)
6204	      {
6205		vars_need_expansion = 1;
6206		expand_decl (vars);
6207	      }
6208	    expand_decl_init (vars);
6209	    vars = TREE_CHAIN (vars);
6210	  }
6211
6212	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6213
6214	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6215
6216	return temp;
6217      }
6218
6219    case RTL_EXPR:
6220      if (RTL_EXPR_SEQUENCE (exp))
6221	{
6222	  if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6223	    abort ();
6224	  emit_insns (RTL_EXPR_SEQUENCE (exp));
6225	  RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6226	}
6227      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6228      free_temps_for_rtl_expr (exp);
6229      return RTL_EXPR_RTL (exp);
6230
6231    case CONSTRUCTOR:
6232      /* If we don't need the result, just ensure we evaluate any
6233	 subexpressions.  */
6234      if (ignore)
6235	{
6236	  tree elt;
6237	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6238	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6239	    		 EXPAND_MEMORY_USE_BAD);
6240	  return const0_rtx;
6241	}
6242
6243      /* All elts simple constants => refer to a constant in memory.  But
6244	 if this is a non-BLKmode mode, let it store a field at a time
6245	 since that should make a CONST_INT or CONST_DOUBLE when we
6246	 fold.  Likewise, if we have a target we can use, it is best to
6247	 store directly into the target unless the type is large enough
6248	 that memcpy will be used.  If we are making an initializer and
6249	 all operands are constant, put it in memory as well.  */
6250      else if ((TREE_STATIC (exp)
6251		&& ((mode == BLKmode
6252		     && ! (target != 0 && safe_from_p (target, exp, 1)))
6253		    || TREE_ADDRESSABLE (exp)
6254		    || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6255			&& (!MOVE_BY_PIECES_P
6256                             (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6257			     TYPE_ALIGN (type) / BITS_PER_UNIT))
6258			&& ! mostly_zeros_p (exp))))
6259	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6260	{
6261	  rtx constructor = output_constant_def (exp);
6262	  if (modifier != EXPAND_CONST_ADDRESS
6263	      && modifier != EXPAND_INITIALIZER
6264	      && modifier != EXPAND_SUM
6265	      && (! memory_address_p (GET_MODE (constructor),
6266				      XEXP (constructor, 0))
6267		  || (flag_force_addr
6268		      && GET_CODE (XEXP (constructor, 0)) != REG)))
6269	    constructor = change_address (constructor, VOIDmode,
6270					  XEXP (constructor, 0));
6271	  return constructor;
6272	}
6273
6274      else
6275	{
6276	  /* Handle calls that pass values in multiple non-contiguous
6277	     locations.  The Irix 6 ABI has examples of this.  */
6278	  if (target == 0 || ! safe_from_p (target, exp, 1)
6279	      || GET_CODE (target) == PARALLEL)
6280	    {
6281	      if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6282		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6283	      else
6284		target = assign_temp (type, 0, 1, 1);
6285	    }
6286
6287	  if (TREE_READONLY (exp))
6288	    {
6289	      if (GET_CODE (target) == MEM)
6290		target = copy_rtx (target);
6291
6292	      RTX_UNCHANGING_P (target) = 1;
6293	    }
6294
6295	  store_constructor (exp, target, 0);
6296	  return target;
6297	}
6298
6299    case INDIRECT_REF:
6300      {
6301	tree exp1 = TREE_OPERAND (exp, 0);
6302	tree exp2;
6303	tree index;
6304 	tree string = string_constant (exp1, &index);
6305 	int i;
6306
6307	/* Try to optimize reads from const strings.  */
6308 	if (string
6309 	    && TREE_CODE (string) == STRING_CST
6310 	    && TREE_CODE (index) == INTEGER_CST
6311 	    && !TREE_INT_CST_HIGH (index)
6312 	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6313 	    && GET_MODE_CLASS (mode) == MODE_INT
6314 	    && GET_MODE_SIZE (mode) == 1
6315	    && modifier != EXPAND_MEMORY_USE_WO)
6316 	  return GEN_INT (TREE_STRING_POINTER (string)[i]);
6317
6318	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6319	op0 = memory_address (mode, op0);
6320
6321	if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6322	  {
6323	    enum memory_use_mode memory_usage;
6324	    memory_usage = get_memory_usage_from_modifier (modifier);
6325
6326            if (memory_usage != MEMORY_USE_DONT)
6327	      {
6328		in_check_memory_usage = 1;
6329		emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6330				   op0, Pmode,
6331				   GEN_INT (int_size_in_bytes (type)),
6332				   TYPE_MODE (sizetype),
6333				   GEN_INT (memory_usage),
6334				   TYPE_MODE (integer_type_node));
6335		in_check_memory_usage = 0;
6336	      }
6337	  }
6338
6339	temp = gen_rtx_MEM (mode, op0);
6340
6341	if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6342	    || (TREE_CODE (exp1) == ADDR_EXPR
6343		&& (exp2 = TREE_OPERAND (exp1, 0))
6344		&& AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6345	  MEM_SET_IN_STRUCT_P (temp, 1);
6346
6347	MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6348	MEM_ALIAS_SET (temp) = get_alias_set (exp);
6349
6350	/* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6351	   here, because, in C and C++, the fact that a location is accessed
6352	   through a pointer to const does not mean that the value there can
6353	   never change.  Languages where it can never change should
6354	   also set TREE_STATIC.  */
6355	RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6356	return temp;
6357      }
6358
6359    case ARRAY_REF:
6360      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6361	abort ();
6362
6363      {
6364	tree array = TREE_OPERAND (exp, 0);
6365	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6366	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6367	tree index = TREE_OPERAND (exp, 1);
6368	tree index_type = TREE_TYPE (index);
6369	HOST_WIDE_INT i;
6370
6371	/* Optimize the special-case of a zero lower bound.
6372
6373	   We convert the low_bound to sizetype to avoid some problems
6374	   with constant folding.  (E.g. suppose the lower bound is 1,
6375	   and its mode is QI.  Without the conversion,  (ARRAY
6376	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6377	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)
6378
6379	   But sizetype isn't quite right either (especially if
6380	   the lowbound is negative).  FIXME */
6381
6382	if (! integer_zerop (low_bound))
6383	  index = fold (build (MINUS_EXPR, index_type, index,
6384			       convert (sizetype, low_bound)));
6385
6386	/* Fold an expression like: "foo"[2].
6387	   This is not done in fold so it won't happen inside &.
6388	   Don't fold if this is for wide characters since it's too
6389	   difficult to do correctly and this is a very rare case.  */
6390
6391	if (TREE_CODE (array) == STRING_CST
6392	    && TREE_CODE (index) == INTEGER_CST
6393	    && !TREE_INT_CST_HIGH (index)
6394	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6395	    && GET_MODE_CLASS (mode) == MODE_INT
6396	    && GET_MODE_SIZE (mode) == 1)
6397	  return GEN_INT (TREE_STRING_POINTER (array)[i]);
6398
6399	/* If this is a constant index into a constant array,
6400	   just get the value from the array.  Handle both the cases when
6401	   we have an explicit constructor and when our operand is a variable
6402	   that was declared const.  */
6403
6404	if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6405	  {
6406	    if (TREE_CODE (index) == INTEGER_CST
6407		&& TREE_INT_CST_HIGH (index) == 0)
6408	      {
6409		tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6410
6411		i = TREE_INT_CST_LOW (index);
6412		while (elem && i--)
6413		  elem = TREE_CHAIN (elem);
6414		if (elem)
6415		  return expand_expr (fold (TREE_VALUE (elem)), target,
6416				      tmode, ro_modifier);
6417	      }
6418	  }
6419
6420	else if (optimize >= 1
6421		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6422		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6423		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6424	  {
6425	    if (TREE_CODE (index) == INTEGER_CST)
6426	      {
6427		tree init = DECL_INITIAL (array);
6428
6429		i = TREE_INT_CST_LOW (index);
6430		if (TREE_CODE (init) == CONSTRUCTOR)
6431		  {
6432		    tree elem = CONSTRUCTOR_ELTS (init);
6433
6434		    while (elem
6435			   && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6436		      elem = TREE_CHAIN (elem);
6437		    if (elem)
6438		      return expand_expr (fold (TREE_VALUE (elem)), target,
6439					  tmode, ro_modifier);
6440		  }
6441		else if (TREE_CODE (init) == STRING_CST
6442			 && TREE_INT_CST_HIGH (index) == 0
6443			 && (TREE_INT_CST_LOW (index)
6444			     < TREE_STRING_LENGTH (init)))
6445		  return (GEN_INT
6446			  (TREE_STRING_POINTER
6447			   (init)[TREE_INT_CST_LOW (index)]));
6448	      }
6449	  }
6450      }
6451
6452      /* ... fall through ... */
6453
6454    case COMPONENT_REF:
6455    case BIT_FIELD_REF:
6456      /* If the operand is a CONSTRUCTOR, we can just extract the
6457	 appropriate field if it is present.  Don't do this if we have
6458	 already written the data since we want to refer to that copy
6459	 and varasm.c assumes that's what we'll do.  */
6460      if (code != ARRAY_REF
6461	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6462	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6463	{
6464	  tree elt;
6465
6466	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6467	       elt = TREE_CHAIN (elt))
6468	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6469		/* We can normally use the value of the field in the
6470		   CONSTRUCTOR.  However, if this is a bitfield in
6471		   an integral mode that we can fit in a HOST_WIDE_INT,
6472		   we must mask only the number of bits in the bitfield,
6473		   since this is done implicitly by the constructor.  If
6474		   the bitfield does not meet either of those conditions,
6475		   we can't do this optimization.  */
6476		&& (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6477		    || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6478			 == MODE_INT)
6479			&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6480			    <= HOST_BITS_PER_WIDE_INT))))
6481	      {
6482		op0 =  expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6483		if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6484		  {
6485		    int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6486
6487		    if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6488		      {
6489			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6490			op0 = expand_and (op0, op1, target);
6491		      }
6492		    else
6493		      {
6494			enum machine_mode imode
6495			  = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6496			tree count
6497			  = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6498					 0);
6499
6500			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6501					    target, 0);
6502			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6503					    target, 0);
6504		      }
6505		  }
6506
6507		return op0;
6508	      }
6509	}
6510
6511      {
6512	enum machine_mode mode1;
6513	int bitsize;
6514	int bitpos;
6515	tree offset;
6516	int volatilep = 0;
6517	int alignment;
6518	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6519					&mode1, &unsignedp, &volatilep,
6520					&alignment);
6521
6522	/* If we got back the original object, something is wrong.  Perhaps
6523	   we are evaluating an expression too early.  In any event, don't
6524	   infinitely recurse.  */
6525	if (tem == exp)
6526	  abort ();
6527
6528	/* If TEM's type is a union of variable size, pass TARGET to the inner
6529	   computation, since it will need a temporary and TARGET is known
6530	   to have to do.  This occurs in unchecked conversion in Ada.  */
6531
6532	op0 = expand_expr (tem,
6533			   (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6534			    && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6535				!= INTEGER_CST)
6536			    ? target : NULL_RTX),
6537			   VOIDmode,
6538			   modifier == EXPAND_INITIALIZER
6539			   ? modifier : EXPAND_NORMAL);
6540
6541	/* If this is a constant, put it into a register if it is a
6542	   legitimate constant and memory if it isn't.  */
6543	if (CONSTANT_P (op0))
6544	  {
6545	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6546	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6547	      op0 = force_reg (mode, op0);
6548	    else
6549	      op0 = validize_mem (force_const_mem (mode, op0));
6550	  }
6551
6552	if (offset != 0)
6553	  {
6554	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6555
6556	    if (GET_CODE (op0) != MEM)
6557	      abort ();
6558
6559	    if (GET_MODE (offset_rtx) != ptr_mode)
6560	      {
6561#ifdef POINTERS_EXTEND_UNSIGNED
6562		offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6563#else
6564		offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6565#endif
6566	      }
6567
6568	    /* A constant address in TO_RTX can have VOIDmode, we must not try
6569	       to call force_reg for that case.  Avoid that case.  */
6570	    if (GET_CODE (op0) == MEM
6571		&& GET_MODE (op0) == BLKmode
6572		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
6573		&& bitsize
6574		&& (bitpos % bitsize) == 0
6575		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6576		&& (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6577	      {
6578		rtx temp = change_address (op0, mode1,
6579					   plus_constant (XEXP (op0, 0),
6580							  (bitpos /
6581							   BITS_PER_UNIT)));
6582		if (GET_CODE (XEXP (temp, 0)) == REG)
6583		  op0 = temp;
6584		else
6585		  op0 = change_address (op0, mode1,
6586					force_reg (GET_MODE (XEXP (temp, 0)),
6587						   XEXP (temp, 0)));
6588		bitpos = 0;
6589	      }
6590
6591
6592	    op0 = change_address (op0, VOIDmode,
6593				  gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6594						force_reg (ptr_mode, offset_rtx)));
6595	  }
6596
6597	/* Don't forget about volatility even if this is a bitfield.  */
6598	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6599	  {
6600	    op0 = copy_rtx (op0);
6601	    MEM_VOLATILE_P (op0) = 1;
6602	  }
6603
6604	/* Check the access.  */
6605	if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6606          {
6607	    enum memory_use_mode memory_usage;
6608	    memory_usage = get_memory_usage_from_modifier (modifier);
6609
6610	    if (memory_usage != MEMORY_USE_DONT)
6611	      {
6612		rtx to;
6613		int size;
6614
6615		to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6616		size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6617
6618        	/* Check the access right of the pointer.  */
6619		if (size > BITS_PER_UNIT)
6620		  emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6621				     to, Pmode,
6622				     GEN_INT (size / BITS_PER_UNIT),
6623				     TYPE_MODE (sizetype),
6624				     GEN_INT (memory_usage),
6625				     TYPE_MODE (integer_type_node));
6626	      }
6627	  }
6628
6629	/* In cases where an aligned union has an unaligned object
6630	   as a field, we might be extracting a BLKmode value from
6631	   an integer-mode (e.g., SImode) object.  Handle this case
6632	   by doing the extract into an object as wide as the field
6633	   (which we know to be the width of a basic mode), then
6634	   storing into memory, and changing the mode to BLKmode.
6635	   If we ultimately want the address (EXPAND_CONST_ADDRESS or
6636	   EXPAND_INITIALIZER), then we must not copy to a temporary.  */
6637	if (mode1 == VOIDmode
6638	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6639	    || (modifier != EXPAND_CONST_ADDRESS
6640		&& modifier != EXPAND_INITIALIZER
6641		&& ((mode1 != BLKmode && ! direct_load[(int) mode1]
6642		     && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6643		     && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6644		    /* If the field isn't aligned enough to fetch as a memref,
6645		       fetch it as a bit field.  */
6646		    || (SLOW_UNALIGNED_ACCESS
6647			&& ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6648			    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6649	  {
6650	    enum machine_mode ext_mode = mode;
6651
6652	    if (ext_mode == BLKmode)
6653	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6654
6655	    if (ext_mode == BLKmode)
6656	      {
6657		/* In this case, BITPOS must start at a byte boundary and
6658		   TARGET, if specified, must be a MEM.  */
6659		if (GET_CODE (op0) != MEM
6660		    || (target != 0 && GET_CODE (target) != MEM)
6661		    || bitpos % BITS_PER_UNIT != 0)
6662		  abort ();
6663
6664		op0 = change_address (op0, VOIDmode,
6665				      plus_constant (XEXP (op0, 0),
6666						     bitpos / BITS_PER_UNIT));
6667		if (target == 0)
6668		  target = assign_temp (type, 0, 1, 1);
6669
6670		emit_block_move (target, op0,
6671				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6672					  / BITS_PER_UNIT),
6673				 1);
6674
6675		return target;
6676	      }
6677
6678	    op0 = validize_mem (op0);
6679
6680	    if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6681	      mark_reg_pointer (XEXP (op0, 0), alignment);
6682
6683	    op0 = extract_bit_field (op0, bitsize, bitpos,
6684				     unsignedp, target, ext_mode, ext_mode,
6685				     alignment,
6686				     int_size_in_bytes (TREE_TYPE (tem)));
6687
6688	    /* If the result is a record type and BITSIZE is narrower than
6689	       the mode of OP0, an integral mode, and this is a big endian
6690	       machine, we must put the field into the high-order bits.  */
6691	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6692		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6693		&& bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6694	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6695				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6696					    - bitsize),
6697				  op0, 1);
6698
6699	    if (mode == BLKmode)
6700	      {
6701		rtx new = assign_stack_temp (ext_mode,
6702					     bitsize / BITS_PER_UNIT, 0);
6703
6704		emit_move_insn (new, op0);
6705		op0 = copy_rtx (new);
6706		PUT_MODE (op0, BLKmode);
6707		MEM_SET_IN_STRUCT_P (op0, 1);
6708	      }
6709
6710	    return op0;
6711	  }
6712
6713	/* If the result is BLKmode, use that to access the object
6714	   now as well.  */
6715	if (mode == BLKmode)
6716	  mode1 = BLKmode;
6717
6718	/* Get a reference to just this component.  */
6719	if (modifier == EXPAND_CONST_ADDRESS
6720	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6721	  op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6722						   (bitpos / BITS_PER_UNIT)));
6723	else
6724	  op0 = change_address (op0, mode1,
6725				plus_constant (XEXP (op0, 0),
6726					       (bitpos / BITS_PER_UNIT)));
6727
6728	if (GET_CODE (op0) == MEM)
6729	  MEM_ALIAS_SET (op0) = get_alias_set (exp);
6730
6731	if (GET_CODE (XEXP (op0, 0)) == REG)
6732	  mark_reg_pointer (XEXP (op0, 0), alignment);
6733
6734	MEM_SET_IN_STRUCT_P (op0, 1);
6735	MEM_VOLATILE_P (op0) |= volatilep;
6736	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6737	    || modifier == EXPAND_CONST_ADDRESS
6738	    || modifier == EXPAND_INITIALIZER)
6739	  return op0;
6740	else if (target == 0)
6741	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6742
6743	convert_move (target, op0, unsignedp);
6744	return target;
6745      }
6746
6747      /* Intended for a reference to a buffer of a file-object in Pascal.
6748	 But it's not certain that a special tree code will really be
6749	 necessary for these.  INDIRECT_REF might work for them.  */
6750    case BUFFER_REF:
6751      abort ();
6752
6753    case IN_EXPR:
6754      {
6755	/* Pascal set IN expression.
6756
6757	   Algorithm:
6758	       rlo       = set_low - (set_low%bits_per_word);
6759	       the_word  = set [ (index - rlo)/bits_per_word ];
6760	       bit_index = index % bits_per_word;
6761	       bitmask   = 1 << bit_index;
6762	       return !!(the_word & bitmask);  */
6763
6764	tree set = TREE_OPERAND (exp, 0);
6765	tree index = TREE_OPERAND (exp, 1);
6766	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6767	tree set_type = TREE_TYPE (set);
6768	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6769	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6770	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6771	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6772	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6773	rtx setval = expand_expr (set, 0, VOIDmode, 0);
6774	rtx setaddr = XEXP (setval, 0);
6775	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6776	rtx rlow;
6777	rtx diff, quo, rem, addr, bit, result;
6778
6779	preexpand_calls (exp);
6780
6781	/* If domain is empty, answer is no.  Likewise if index is constant
6782	   and out of bounds.  */
6783	if (((TREE_CODE (set_high_bound) == INTEGER_CST
6784	     && TREE_CODE (set_low_bound) == INTEGER_CST
6785	     && tree_int_cst_lt (set_high_bound, set_low_bound))
6786	     || (TREE_CODE (index) == INTEGER_CST
6787		 && TREE_CODE (set_low_bound) == INTEGER_CST
6788		 && tree_int_cst_lt (index, set_low_bound))
6789	     || (TREE_CODE (set_high_bound) == INTEGER_CST
6790		 && TREE_CODE (index) == INTEGER_CST
6791		 && tree_int_cst_lt (set_high_bound, index))))
6792	  return const0_rtx;
6793
6794	if (target == 0)
6795	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6796
6797	/* If we get here, we have to generate the code for both cases
6798	   (in range and out of range).  */
6799
6800	op0 = gen_label_rtx ();
6801	op1 = gen_label_rtx ();
6802
6803	if (! (GET_CODE (index_val) == CONST_INT
6804	       && GET_CODE (lo_r) == CONST_INT))
6805	  {
6806	    emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6807				     GET_MODE (index_val), iunsignedp, 0, op1);
6808	  }
6809
6810	if (! (GET_CODE (index_val) == CONST_INT
6811	       && GET_CODE (hi_r) == CONST_INT))
6812	  {
6813	    emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6814				     GET_MODE (index_val), iunsignedp, 0, op1);
6815	  }
6816
6817	/* Calculate the element number of bit zero in the first word
6818	   of the set.  */
6819	if (GET_CODE (lo_r) == CONST_INT)
6820	  rlow = GEN_INT (INTVAL (lo_r)
6821			  & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6822	else
6823	  rlow = expand_binop (index_mode, and_optab, lo_r,
6824			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6825			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6826
6827	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6828			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6829
6830	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6831			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6832	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6833			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6834
6835	addr = memory_address (byte_mode,
6836			       expand_binop (index_mode, add_optab, diff,
6837					     setaddr, NULL_RTX, iunsignedp,
6838					     OPTAB_LIB_WIDEN));
6839
6840	/* Extract the bit we want to examine */
6841	bit = expand_shift (RSHIFT_EXPR, byte_mode,
6842			    gen_rtx_MEM (byte_mode, addr),
6843			    make_tree (TREE_TYPE (index), rem),
6844			    NULL_RTX, 1);
6845	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6846			       GET_MODE (target) == byte_mode ? target : 0,
6847			       1, OPTAB_LIB_WIDEN);
6848
6849	if (result != target)
6850	  convert_move (target, result, 1);
6851
6852	/* Output the code to handle the out-of-range case.  */
6853	emit_jump (op0);
6854	emit_label (op1);
6855	emit_move_insn (target, const0_rtx);
6856	emit_label (op0);
6857	return target;
6858      }
6859
6860    case WITH_CLEANUP_EXPR:
6861      if (RTL_EXPR_RTL (exp) == 0)
6862	{
6863	  RTL_EXPR_RTL (exp)
6864	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6865	  expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6866
6867	  /* That's it for this cleanup.  */
6868	  TREE_OPERAND (exp, 2) = 0;
6869	}
6870      return RTL_EXPR_RTL (exp);
6871
6872    case CLEANUP_POINT_EXPR:
6873      {
6874	/* Start a new binding layer that will keep track of all cleanup
6875	   actions to be performed.  */
6876	expand_start_bindings (0);
6877
6878	target_temp_slot_level = temp_slot_level;
6879
6880	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6881	/* If we're going to use this value, load it up now.  */
6882	if (! ignore)
6883	  op0 = force_not_mem (op0);
6884	preserve_temp_slots (op0);
6885	expand_end_bindings (NULL_TREE, 0, 0);
6886      }
6887      return op0;
6888
6889    case CALL_EXPR:
6890      /* Check for a built-in function.  */
6891      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6892	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6893	      == FUNCTION_DECL)
6894	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6895	return expand_builtin (exp, target, subtarget, tmode, ignore);
6896
6897      /* If this call was expanded already by preexpand_calls,
6898	 just return the result we got.  */
6899      if (CALL_EXPR_RTL (exp) != 0)
6900	return CALL_EXPR_RTL (exp);
6901
6902      return expand_call (exp, target, ignore);
6903
6904    case NON_LVALUE_EXPR:
6905    case NOP_EXPR:
6906    case CONVERT_EXPR:
6907    case REFERENCE_EXPR:
6908      if (TREE_CODE (type) == UNION_TYPE)
6909	{
6910	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6911	  if (target == 0)
6912	    {
6913	      if (mode != BLKmode)
6914		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6915	      else
6916		target = assign_temp (type, 0, 1, 1);
6917	    }
6918
6919	  if (GET_CODE (target) == MEM)
6920	    /* Store data into beginning of memory target.  */
6921	    store_expr (TREE_OPERAND (exp, 0),
6922			change_address (target, TYPE_MODE (valtype), 0), 0);
6923
6924	  else if (GET_CODE (target) == REG)
6925	    /* Store this field into a union of the proper type.  */
6926	    store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6927			 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6928			 VOIDmode, 0, 1,
6929			 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6930			 0);
6931	  else
6932	    abort ();
6933
6934	  /* Return the entire union.  */
6935	  return target;
6936	}
6937
6938      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6939	{
6940	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6941			     ro_modifier);
6942
6943	  /* If the signedness of the conversion differs and OP0 is
6944	     a promoted SUBREG, clear that indication since we now
6945	     have to do the proper extension.  */
6946	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6947	      && GET_CODE (op0) == SUBREG)
6948	    SUBREG_PROMOTED_VAR_P (op0) = 0;
6949
6950	  return op0;
6951	}
6952
6953      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6954      if (GET_MODE (op0) == mode)
6955	return op0;
6956
6957      /* If OP0 is a constant, just convert it into the proper mode.  */
6958      if (CONSTANT_P (op0))
6959	return
6960	  convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6961			 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6962
6963      if (modifier == EXPAND_INITIALIZER)
6964	return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6965
6966      if (target == 0)
6967	return
6968	  convert_to_mode (mode, op0,
6969			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6970      else
6971	convert_move (target, op0,
6972		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6973      return target;
6974
6975    case PLUS_EXPR:
6976      /* We come here from MINUS_EXPR when the second operand is a
6977         constant.  */
6978    plus_expr:
6979      this_optab = add_optab;
6980
6981      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6982	 something else, make sure we add the register to the constant and
6983	 then to the other thing.  This case can occur during strength
6984	 reduction and doing it this way will produce better code if the
6985	 frame pointer or argument pointer is eliminated.
6986
6987	 fold-const.c will ensure that the constant is always in the inner
6988	 PLUS_EXPR, so the only case we need to do anything about is if
6989	 sp, ap, or fp is our second argument, in which case we must swap
6990	 the innermost first argument and our second argument.  */
6991
6992      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6993	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6994	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6995	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6996	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6997	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6998	{
6999	  tree t = TREE_OPERAND (exp, 1);
7000
7001	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7002	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7003	}
7004
7005      /* If the result is to be ptr_mode and we are adding an integer to
7006	 something, we might be forming a constant.  So try to use
7007	 plus_constant.  If it produces a sum and we can't accept it,
7008	 use force_operand.  This allows P = &ARR[const] to generate
7009	 efficient code on machines where a SYMBOL_REF is not a valid
7010	 address.
7011
7012	 If this is an EXPAND_SUM call, always return the sum.  */
7013      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7014	  || mode == ptr_mode)
7015	{
7016	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7017	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7018	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7019	    {
7020	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7021				 EXPAND_SUM);
7022	      op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
7023	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7024		op1 = force_operand (op1, target);
7025	      return op1;
7026	    }
7027
7028	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7029		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7030		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7031	    {
7032	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7033				 EXPAND_SUM);
7034	      if (! CONSTANT_P (op0))
7035		{
7036		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7037				     VOIDmode, modifier);
7038		  /* Don't go to both_summands if modifier
7039		     says it's not right to return a PLUS.  */
7040		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7041		    goto binop2;
7042		  goto both_summands;
7043		}
7044	      op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
7045	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7046		op0 = force_operand (op0, target);
7047	      return op0;
7048	    }
7049	}
7050
7051      /* No sense saving up arithmetic to be done
7052	 if it's all in the wrong mode to form part of an address.
7053	 And force_operand won't know whether to sign-extend or
7054	 zero-extend.  */
7055      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7056	  || mode != ptr_mode)
7057	goto binop;
7058
7059      preexpand_calls (exp);
7060      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7061	subtarget = 0;
7062
7063      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7064      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7065
7066    both_summands:
7067      /* Make sure any term that's a sum with a constant comes last.  */
7068      if (GET_CODE (op0) == PLUS
7069	  && CONSTANT_P (XEXP (op0, 1)))
7070	{
7071	  temp = op0;
7072	  op0 = op1;
7073	  op1 = temp;
7074	}
7075      /* If adding to a sum including a constant,
7076	 associate it to put the constant outside.  */
7077      if (GET_CODE (op1) == PLUS
7078	  && CONSTANT_P (XEXP (op1, 1)))
7079	{
7080	  rtx constant_term = const0_rtx;
7081
7082	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7083	  if (temp != 0)
7084	    op0 = temp;
7085	  /* Ensure that MULT comes first if there is one.  */
7086	  else if (GET_CODE (op0) == MULT)
7087	    op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7088	  else
7089	    op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7090
7091	  /* Let's also eliminate constants from op0 if possible.  */
7092	  op0 = eliminate_constant_term (op0, &constant_term);
7093
7094	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7095	     their sum should be a constant.  Form it into OP1, since the
7096	     result we want will then be OP0 + OP1.  */
7097
7098	  temp = simplify_binary_operation (PLUS, mode, constant_term,
7099					    XEXP (op1, 1));
7100	  if (temp != 0)
7101	    op1 = temp;
7102	  else
7103	    op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7104	}
7105
7106      /* Put a constant term last and put a multiplication first.  */
7107      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7108	temp = op1, op1 = op0, op0 = temp;
7109
7110      temp = simplify_binary_operation (PLUS, mode, op0, op1);
7111      return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7112
7113    case MINUS_EXPR:
7114      /* For initializers, we are allowed to return a MINUS of two
7115	 symbolic constants.  Here we handle all cases when both operands
7116	 are constant.  */
7117      /* Handle difference of two symbolic constants,
7118	 for the sake of an initializer.  */
7119      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7120	  && really_constant_p (TREE_OPERAND (exp, 0))
7121	  && really_constant_p (TREE_OPERAND (exp, 1)))
7122	{
7123	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7124				 VOIDmode, ro_modifier);
7125	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7126				 VOIDmode, ro_modifier);
7127
7128	  /* If the last operand is a CONST_INT, use plus_constant of
7129	     the negated constant.  Else make the MINUS.  */
7130	  if (GET_CODE (op1) == CONST_INT)
7131	    return plus_constant (op0, - INTVAL (op1));
7132	  else
7133	    return gen_rtx_MINUS (mode, op0, op1);
7134	}
7135      /* Convert A - const to A + (-const).  */
7136      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7137	{
7138	  tree negated = fold (build1 (NEGATE_EXPR, type,
7139				       TREE_OPERAND (exp, 1)));
7140
7141	  /* Deal with the case where we can't negate the constant
7142	     in TYPE.  */
7143	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7144	    {
7145	      tree newtype = signed_type (type);
7146	      tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7147	      tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7148	      tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7149
7150	      if (! TREE_OVERFLOW (newneg))
7151		return expand_expr (convert (type,
7152					     build (PLUS_EXPR, newtype,
7153						    newop0, newneg)),
7154				    target, tmode, ro_modifier);
7155	    }
7156	  else
7157	    {
7158	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7159	      goto plus_expr;
7160	    }
7161	}
7162      this_optab = sub_optab;
7163      goto binop;
7164
7165    case MULT_EXPR:
7166      preexpand_calls (exp);
7167      /* If first operand is constant, swap them.
7168	 Thus the following special case checks need only
7169	 check the second operand.  */
7170      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7171	{
7172	  register tree t1 = TREE_OPERAND (exp, 0);
7173	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7174	  TREE_OPERAND (exp, 1) = t1;
7175	}
7176
7177      /* Attempt to return something suitable for generating an
7178	 indexed address, for machines that support that.  */
7179
7180      if (modifier == EXPAND_SUM && mode == ptr_mode
7181	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7182	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7183	{
7184	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7185			     EXPAND_SUM);
7186
7187	  /* Apply distributive law if OP0 is x+c.  */
7188	  if (GET_CODE (op0) == PLUS
7189	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7190	    return gen_rtx_PLUS (mode,
7191				 gen_rtx_MULT (mode, XEXP (op0, 0),
7192					       GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7193			    GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7194				     * INTVAL (XEXP (op0, 1))));
7195
7196	  if (GET_CODE (op0) != REG)
7197	    op0 = force_operand (op0, NULL_RTX);
7198	  if (GET_CODE (op0) != REG)
7199	    op0 = copy_to_mode_reg (mode, op0);
7200
7201	  return gen_rtx_MULT (mode, op0,
7202			       GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7203	}
7204
7205      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7206	subtarget = 0;
7207
7208      /* Check for multiplying things that have been extended
7209	 from a narrower type.  If this machine supports multiplying
7210	 in that narrower type with a result in the desired type,
7211	 do it that way, and avoid the explicit type-conversion.  */
7212      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7213	  && TREE_CODE (type) == INTEGER_TYPE
7214	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7215	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7216	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7217	       && int_fits_type_p (TREE_OPERAND (exp, 1),
7218				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7219	       /* Don't use a widening multiply if a shift will do.  */
7220	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7221		    > HOST_BITS_PER_WIDE_INT)
7222		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7223	      ||
7224	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7225	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7226		   ==
7227		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7228	       /* If both operands are extended, they must either both
7229		  be zero-extended or both be sign-extended.  */
7230	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7231		   ==
7232		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7233	{
7234	  enum machine_mode innermode
7235	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7236	  optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7237			? smul_widen_optab : umul_widen_optab);
7238	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7239			? umul_widen_optab : smul_widen_optab);
7240	  if (mode == GET_MODE_WIDER_MODE (innermode))
7241	    {
7242	      if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7243		{
7244		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7245				     NULL_RTX, VOIDmode, 0);
7246		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7247		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7248				       VOIDmode, 0);
7249		  else
7250		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7251				       NULL_RTX, VOIDmode, 0);
7252		  goto binop2;
7253		}
7254	      else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7255		       && innermode == word_mode)
7256		{
7257		  rtx htem;
7258		  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7259				     NULL_RTX, VOIDmode, 0);
7260		  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7261		    op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7262				       VOIDmode, 0);
7263		  else
7264		    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7265				       NULL_RTX, VOIDmode, 0);
7266		  temp = expand_binop (mode, other_optab, op0, op1, target,
7267				       unsignedp, OPTAB_LIB_WIDEN);
7268		  htem = expand_mult_highpart_adjust (innermode,
7269						      gen_highpart (innermode, temp),
7270						      op0, op1,
7271						      gen_highpart (innermode, temp),
7272						      unsignedp);
7273		  emit_move_insn (gen_highpart (innermode, temp), htem);
7274		  return temp;
7275		}
7276	    }
7277	}
7278      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7279      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7280      return expand_mult (mode, op0, op1, target, unsignedp);
7281
7282    case TRUNC_DIV_EXPR:
7283    case FLOOR_DIV_EXPR:
7284    case CEIL_DIV_EXPR:
7285    case ROUND_DIV_EXPR:
7286    case EXACT_DIV_EXPR:
7287      preexpand_calls (exp);
7288      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7289	subtarget = 0;
7290      /* Possible optimization: compute the dividend with EXPAND_SUM
7291	 then if the divisor is constant can optimize the case
7292	 where some terms of the dividend have coeffs divisible by it.  */
7293      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7294      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7295      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7296
7297    case RDIV_EXPR:
7298      this_optab = flodiv_optab;
7299      goto binop;
7300
7301    case TRUNC_MOD_EXPR:
7302    case FLOOR_MOD_EXPR:
7303    case CEIL_MOD_EXPR:
7304    case ROUND_MOD_EXPR:
7305      preexpand_calls (exp);
7306      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7307	subtarget = 0;
7308      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7309      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7310      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7311
7312    case FIX_ROUND_EXPR:
7313    case FIX_FLOOR_EXPR:
7314    case FIX_CEIL_EXPR:
7315      abort ();			/* Not used for C.  */
7316
7317    case FIX_TRUNC_EXPR:
7318      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7319      if (target == 0)
7320	target = gen_reg_rtx (mode);
7321      expand_fix (target, op0, unsignedp);
7322      return target;
7323
7324    case FLOAT_EXPR:
7325      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7326      if (target == 0)
7327	target = gen_reg_rtx (mode);
7328      /* expand_float can't figure out what to do if FROM has VOIDmode.
7329	 So give it the correct mode.  With -O, cse will optimize this.  */
7330      if (GET_MODE (op0) == VOIDmode)
7331	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7332				op0);
7333      expand_float (target, op0,
7334		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7335      return target;
7336
7337    case NEGATE_EXPR:
7338      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7339      temp = expand_unop (mode, neg_optab, op0, target, 0);
7340      if (temp == 0)
7341	abort ();
7342      return temp;
7343
7344    case ABS_EXPR:
7345      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7346
7347      /* Handle complex values specially.  */
7348      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7349	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7350	return expand_complex_abs (mode, op0, target, unsignedp);
7351
7352      /* Unsigned abs is simply the operand.  Testing here means we don't
7353	 risk generating incorrect code below.  */
7354      if (TREE_UNSIGNED (type))
7355	return op0;
7356
7357      return expand_abs (mode, op0, target,
7358			 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7359
7360    case MAX_EXPR:
7361    case MIN_EXPR:
7362      target = original_target;
7363      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7364	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7365	  || GET_MODE (target) != mode
7366	  || (GET_CODE (target) == REG
7367	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
7368	target = gen_reg_rtx (mode);
7369      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7370      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7371
7372      /* First try to do it with a special MIN or MAX instruction.
7373	 If that does not win, use a conditional jump to select the proper
7374	 value.  */
7375      this_optab = (TREE_UNSIGNED (type)
7376		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
7377		    : (code == MIN_EXPR ? smin_optab : smax_optab));
7378
7379      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7380			   OPTAB_WIDEN);
7381      if (temp != 0)
7382	return temp;
7383
7384      /* At this point, a MEM target is no longer useful; we will get better
7385	 code without it.  */
7386
7387      if (GET_CODE (target) == MEM)
7388	target = gen_reg_rtx (mode);
7389
7390      if (target != op0)
7391	emit_move_insn (target, op0);
7392
7393      op0 = gen_label_rtx ();
7394
7395      /* If this mode is an integer too wide to compare properly,
7396	 compare word by word.  Rely on cse to optimize constant cases.  */
7397      if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7398	{
7399	  if (code == MAX_EXPR)
7400	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7401					  target, op1, NULL_RTX, op0);
7402	  else
7403	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7404					  op1, target, NULL_RTX, op0);
7405	  emit_move_insn (target, op1);
7406	}
7407      else
7408	{
7409	  if (code == MAX_EXPR)
7410	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7411		    ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7412		    : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7413	  else
7414	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7415		    ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7416		    : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7417	  if (temp == const0_rtx)
7418	    emit_move_insn (target, op1);
7419	  else if (temp != const_true_rtx)
7420	    {
7421	      if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7422		emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7423	      else
7424		abort ();
7425	      emit_move_insn (target, op1);
7426	    }
7427	}
7428      emit_label (op0);
7429      return target;
7430
7431    case BIT_NOT_EXPR:
7432      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7433      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7434      if (temp == 0)
7435	abort ();
7436      return temp;
7437
7438    case FFS_EXPR:
7439      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7440      temp = expand_unop (mode, ffs_optab, op0, target, 1);
7441      if (temp == 0)
7442	abort ();
7443      return temp;
7444
7445      /* ??? Can optimize bitwise operations with one arg constant.
7446	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7447	 and (a bitwise1 b) bitwise2 b (etc)
7448	 but that is probably not worth while.  */
7449
7450      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
7451	 boolean values when we want in all cases to compute both of them.  In
7452	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7453	 as actual zero-or-1 values and then bitwise anding.  In cases where
7454	 there cannot be any side effects, better code would be made by
7455	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7456	 how to recognize those cases.  */
7457
7458    case TRUTH_AND_EXPR:
7459    case BIT_AND_EXPR:
7460      this_optab = and_optab;
7461      goto binop;
7462
7463    case TRUTH_OR_EXPR:
7464    case BIT_IOR_EXPR:
7465      this_optab = ior_optab;
7466      goto binop;
7467
7468    case TRUTH_XOR_EXPR:
7469    case BIT_XOR_EXPR:
7470      this_optab = xor_optab;
7471      goto binop;
7472
7473    case LSHIFT_EXPR:
7474    case RSHIFT_EXPR:
7475    case LROTATE_EXPR:
7476    case RROTATE_EXPR:
7477      preexpand_calls (exp);
7478      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7479	subtarget = 0;
7480      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7481      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7482			   unsignedp);
7483
7484      /* Could determine the answer when only additive constants differ.  Also,
7485	 the addition of one can be handled by changing the condition.  */
7486    case LT_EXPR:
7487    case LE_EXPR:
7488    case GT_EXPR:
7489    case GE_EXPR:
7490    case EQ_EXPR:
7491    case NE_EXPR:
7492      preexpand_calls (exp);
7493      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7494      if (temp != 0)
7495	return temp;
7496
7497      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
7498      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7499	  && original_target
7500	  && GET_CODE (original_target) == REG
7501	  && (GET_MODE (original_target)
7502	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7503	{
7504	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7505			      VOIDmode, 0);
7506
7507	  if (temp != original_target)
7508	    temp = copy_to_reg (temp);
7509
7510	  op1 = gen_label_rtx ();
7511	  emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7512				   GET_MODE (temp), unsignedp, 0, op1);
7513	  emit_move_insn (temp, const1_rtx);
7514	  emit_label (op1);
7515	  return temp;
7516	}
7517
7518      /* If no set-flag instruction, must generate a conditional
7519	 store into a temporary variable.  Drop through
7520	 and handle this like && and ||.  */
7521
7522    case TRUTH_ANDIF_EXPR:
7523    case TRUTH_ORIF_EXPR:
7524      if (! ignore
7525	  && (target == 0 || ! safe_from_p (target, exp, 1)
7526	      /* Make sure we don't have a hard reg (such as function's return
7527		 value) live across basic blocks, if not optimizing.  */
7528	      || (!optimize && GET_CODE (target) == REG
7529		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7530	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7531
7532      if (target)
7533	emit_clr_insn (target);
7534
7535      op1 = gen_label_rtx ();
7536      jumpifnot (exp, op1);
7537
7538      if (target)
7539	emit_0_to_1_insn (target);
7540
7541      emit_label (op1);
7542      return ignore ? const0_rtx : target;
7543
7544    case TRUTH_NOT_EXPR:
7545      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7546      /* The parser is careful to generate TRUTH_NOT_EXPR
7547	 only with operands that are always zero or one.  */
7548      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7549			   target, 1, OPTAB_LIB_WIDEN);
7550      if (temp == 0)
7551	abort ();
7552      return temp;
7553
7554    case COMPOUND_EXPR:
7555      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7556      emit_queue ();
7557      return expand_expr (TREE_OPERAND (exp, 1),
7558			  (ignore ? const0_rtx : target),
7559			  VOIDmode, 0);
7560
7561    case COND_EXPR:
7562      /* If we would have a "singleton" (see below) were it not for a
7563	 conversion in each arm, bring that conversion back out.  */
7564      if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7565	  && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7566	  && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7567	      == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7568	{
7569	  tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7570	  tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7571
7572	  if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7573	       && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7574	      || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7575		  && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7576	      || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7577		  && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7578	      || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7579		  && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7580	    return expand_expr (build1 (NOP_EXPR, type,
7581					build (COND_EXPR, TREE_TYPE (true),
7582					       TREE_OPERAND (exp, 0),
7583					       true, false)),
7584				target, tmode, modifier);
7585	}
7586
7587      {
7588	/* Note that COND_EXPRs whose type is a structure or union
7589	   are required to be constructed to contain assignments of
7590	   a temporary variable, so that we can evaluate them here
7591	   for side effect only.  If type is void, we must do likewise.  */
7592
7593	/* If an arm of the branch requires a cleanup,
7594	   only that cleanup is performed.  */
7595
7596	tree singleton = 0;
7597	tree binary_op = 0, unary_op = 0;
7598
7599	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7600	   convert it to our mode, if necessary.  */
7601	if (integer_onep (TREE_OPERAND (exp, 1))
7602	    && integer_zerop (TREE_OPERAND (exp, 2))
7603	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7604	  {
7605	    if (ignore)
7606	      {
7607		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7608			     ro_modifier);
7609		return const0_rtx;
7610	      }
7611
7612	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7613	    if (GET_MODE (op0) == mode)
7614	      return op0;
7615
7616	    if (target == 0)
7617	      target = gen_reg_rtx (mode);
7618	    convert_move (target, op0, unsignedp);
7619	    return target;
7620	  }
7621
7622	/* Check for X ? A + B : A.  If we have this, we can copy A to the
7623	   output and conditionally add B.  Similarly for unary operations.
7624	   Don't do this if X has side-effects because those side effects
7625	   might affect A or B and the "?" operation is a sequence point in
7626	   ANSI.  (operand_equal_p tests for side effects.)  */
7627
7628	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7629	    && operand_equal_p (TREE_OPERAND (exp, 2),
7630				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7631	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7632	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7633		 && operand_equal_p (TREE_OPERAND (exp, 1),
7634				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7635	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7636	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7637		 && operand_equal_p (TREE_OPERAND (exp, 2),
7638				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7639	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7640	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7641		 && operand_equal_p (TREE_OPERAND (exp, 1),
7642				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7643	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7644
7645	/* If we are not to produce a result, we have no target.  Otherwise,
7646	   if a target was specified use it; it will not be used as an
7647	   intermediate target unless it is safe.  If no target, use a
7648	   temporary.  */
7649
7650	if (ignore)
7651	  temp = 0;
7652	else if (original_target
7653		 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7654		     || (singleton && GET_CODE (original_target) == REG
7655			 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7656			 && original_target == var_rtx (singleton)))
7657		 && GET_MODE (original_target) == mode
7658#ifdef HAVE_conditional_move
7659		 && (! can_conditionally_move_p (mode)
7660		     || GET_CODE (original_target) == REG
7661		     || TREE_ADDRESSABLE (type))
7662#endif
7663		 && ! (GET_CODE (original_target) == MEM
7664		       && MEM_VOLATILE_P (original_target)))
7665	  temp = original_target;
7666	else if (TREE_ADDRESSABLE (type))
7667	  abort ();
7668	else
7669	  temp = assign_temp (type, 0, 0, 1);
7670
7671	/* If we had X ? A + C : A, with C a constant power of 2, and we can
7672	   do the test of X as a store-flag operation, do this as
7673	   A + ((X != 0) << log C).  Similarly for other simple binary
7674	   operators.  Only do for C == 1 if BRANCH_COST is low.  */
7675	if (temp && singleton && binary_op
7676	    && (TREE_CODE (binary_op) == PLUS_EXPR
7677		|| TREE_CODE (binary_op) == MINUS_EXPR
7678		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
7679		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
7680	    && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7681		: integer_onep (TREE_OPERAND (binary_op, 1)))
7682	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7683	  {
7684	    rtx result;
7685	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7686			    : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7687			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7688			    : xor_optab);
7689
7690	    /* If we had X ? A : A + 1, do this as A + (X == 0).
7691
7692	       We have to invert the truth value here and then put it
7693	       back later if do_store_flag fails.  We cannot simply copy
7694	       TREE_OPERAND (exp, 0) to another variable and modify that
7695	       because invert_truthvalue can modify the tree pointed to
7696	       by its argument.  */
7697	    if (singleton == TREE_OPERAND (exp, 1))
7698	      TREE_OPERAND (exp, 0)
7699		= invert_truthvalue (TREE_OPERAND (exp, 0));
7700
7701	    result = do_store_flag (TREE_OPERAND (exp, 0),
7702				    (safe_from_p (temp, singleton, 1)
7703				     ? temp : NULL_RTX),
7704				    mode, BRANCH_COST <= 1);
7705
7706	    if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7707	      result = expand_shift (LSHIFT_EXPR, mode, result,
7708				     build_int_2 (tree_log2
7709						  (TREE_OPERAND
7710						   (binary_op, 1)),
7711						  0),
7712				     (safe_from_p (temp, singleton, 1)
7713				      ? temp : NULL_RTX), 0);
7714
7715	    if (result)
7716	      {
7717		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7718		return expand_binop (mode, boptab, op1, result, temp,
7719				     unsignedp, OPTAB_LIB_WIDEN);
7720	      }
7721	    else if (singleton == TREE_OPERAND (exp, 1))
7722	      TREE_OPERAND (exp, 0)
7723		= invert_truthvalue (TREE_OPERAND (exp, 0));
7724	  }
7725
7726	do_pending_stack_adjust ();
7727	NO_DEFER_POP;
7728	op0 = gen_label_rtx ();
7729
7730	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7731	  {
7732	    if (temp != 0)
7733	      {
7734		/* If the target conflicts with the other operand of the
7735		   binary op, we can't use it.  Also, we can't use the target
7736		   if it is a hard register, because evaluating the condition
7737		   might clobber it.  */
7738		if ((binary_op
7739		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7740		    || (GET_CODE (temp) == REG
7741			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
7742		  temp = gen_reg_rtx (mode);
7743		store_expr (singleton, temp, 0);
7744	      }
7745	    else
7746	      expand_expr (singleton,
7747			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7748	    if (singleton == TREE_OPERAND (exp, 1))
7749	      jumpif (TREE_OPERAND (exp, 0), op0);
7750	    else
7751	      jumpifnot (TREE_OPERAND (exp, 0), op0);
7752
7753	    start_cleanup_deferral ();
7754	    if (binary_op && temp == 0)
7755	      /* Just touch the other operand.  */
7756	      expand_expr (TREE_OPERAND (binary_op, 1),
7757			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7758	    else if (binary_op)
7759	      store_expr (build (TREE_CODE (binary_op), type,
7760				 make_tree (type, temp),
7761				 TREE_OPERAND (binary_op, 1)),
7762			  temp, 0);
7763	    else
7764	      store_expr (build1 (TREE_CODE (unary_op), type,
7765				  make_tree (type, temp)),
7766			  temp, 0);
7767	    op1 = op0;
7768	  }
7769	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7770	   comparison operator.  If we have one of these cases, set the
7771	   output to A, branch on A (cse will merge these two references),
7772	   then set the output to FOO.  */
7773	else if (temp
7774		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7775		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7776		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7777				     TREE_OPERAND (exp, 1), 0)
7778		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7779		     || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7780		 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7781	  {
7782	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7783	      temp = gen_reg_rtx (mode);
7784	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
7785	    jumpif (TREE_OPERAND (exp, 0), op0);
7786
7787	    start_cleanup_deferral ();
7788	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
7789	    op1 = op0;
7790	  }
7791	else if (temp
7792		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7793		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7794		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7795				     TREE_OPERAND (exp, 2), 0)
7796		 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7797		     || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7798		 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7799	  {
7800	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7801	      temp = gen_reg_rtx (mode);
7802	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
7803	    jumpifnot (TREE_OPERAND (exp, 0), op0);
7804
7805	    start_cleanup_deferral ();
7806	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
7807	    op1 = op0;
7808	  }
7809	else
7810	  {
7811	    op1 = gen_label_rtx ();
7812	    jumpifnot (TREE_OPERAND (exp, 0), op0);
7813
7814	    start_cleanup_deferral ();
7815	    if (temp != 0)
7816	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
7817	    else
7818	      expand_expr (TREE_OPERAND (exp, 1),
7819			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7820	    end_cleanup_deferral ();
7821	    emit_queue ();
7822	    emit_jump_insn (gen_jump (op1));
7823	    emit_barrier ();
7824	    emit_label (op0);
7825	    start_cleanup_deferral ();
7826	    if (temp != 0)
7827	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
7828	    else
7829	      expand_expr (TREE_OPERAND (exp, 2),
7830			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7831	  }
7832
7833	end_cleanup_deferral ();
7834
7835	emit_queue ();
7836	emit_label (op1);
7837	OK_DEFER_POP;
7838
7839	return temp;
7840      }
7841
7842    case TARGET_EXPR:
7843      {
7844	/* Something needs to be initialized, but we didn't know
7845	   where that thing was when building the tree.  For example,
7846	   it could be the return value of a function, or a parameter
7847	   to a function which lays down in the stack, or a temporary
7848	   variable which must be passed by reference.
7849
7850	   We guarantee that the expression will either be constructed
7851	   or copied into our original target.  */
7852
7853	tree slot = TREE_OPERAND (exp, 0);
7854	tree cleanups = NULL_TREE;
7855	tree exp1;
7856
7857	if (TREE_CODE (slot) != VAR_DECL)
7858	  abort ();
7859
7860	if (! ignore)
7861	  target = original_target;
7862
7863	if (target == 0)
7864	  {
7865	    if (DECL_RTL (slot) != 0)
7866	      {
7867		target = DECL_RTL (slot);
7868		/* If we have already expanded the slot, so don't do
7869		   it again.  (mrs)  */
7870		if (TREE_OPERAND (exp, 1) == NULL_TREE)
7871		  return target;
7872	      }
7873	    else
7874	      {
7875		target = assign_temp (type, 2, 0, 1);
7876		/* All temp slots at this level must not conflict.  */
7877		preserve_temp_slots (target);
7878		DECL_RTL (slot) = target;
7879		if (TREE_ADDRESSABLE (slot))
7880		  {
7881		    TREE_ADDRESSABLE (slot) = 0;
7882		    mark_addressable (slot);
7883		  }
7884
7885		/* Since SLOT is not known to the called function
7886		   to belong to its stack frame, we must build an explicit
7887		   cleanup.  This case occurs when we must build up a reference
7888		   to pass the reference as an argument.  In this case,
7889		   it is very likely that such a reference need not be
7890		   built here.  */
7891
7892		if (TREE_OPERAND (exp, 2) == 0)
7893		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7894		cleanups = TREE_OPERAND (exp, 2);
7895	      }
7896	  }
7897	else
7898	  {
7899	    /* This case does occur, when expanding a parameter which
7900	       needs to be constructed on the stack.  The target
7901	       is the actual stack address that we want to initialize.
7902	       The function we call will perform the cleanup in this case.  */
7903
7904	    /* If we have already assigned it space, use that space,
7905	       not target that we were passed in, as our target
7906	       parameter is only a hint.  */
7907	    if (DECL_RTL (slot) != 0)
7908              {
7909                target = DECL_RTL (slot);
7910                /* If we have already expanded the slot, so don't do
7911                   it again.  (mrs)  */
7912                if (TREE_OPERAND (exp, 1) == NULL_TREE)
7913                  return target;
7914	      }
7915	    else
7916	      {
7917		DECL_RTL (slot) = target;
7918		/* If we must have an addressable slot, then make sure that
7919		   the RTL that we just stored in slot is OK.  */
7920		if (TREE_ADDRESSABLE (slot))
7921		  {
7922		    TREE_ADDRESSABLE (slot) = 0;
7923		    mark_addressable (slot);
7924		  }
7925	      }
7926	  }
7927
7928	exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7929	/* Mark it as expanded.  */
7930	TREE_OPERAND (exp, 1) = NULL_TREE;
7931
7932	TREE_USED (slot) = 1;
7933	store_expr (exp1, target, 0);
7934
7935	expand_decl_cleanup (NULL_TREE, cleanups);
7936
7937	return target;
7938      }
7939
7940    case INIT_EXPR:
7941      {
7942	tree lhs = TREE_OPERAND (exp, 0);
7943	tree rhs = TREE_OPERAND (exp, 1);
7944	tree noncopied_parts = 0;
7945	tree lhs_type = TREE_TYPE (lhs);
7946
7947	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7948	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7949	  noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7950						  TYPE_NONCOPIED_PARTS (lhs_type));
7951	while (noncopied_parts != 0)
7952	  {
7953	    expand_assignment (TREE_VALUE (noncopied_parts),
7954			       TREE_PURPOSE (noncopied_parts), 0, 0);
7955	    noncopied_parts = TREE_CHAIN (noncopied_parts);
7956	  }
7957	return temp;
7958      }
7959
7960    case MODIFY_EXPR:
7961      {
7962	/* If lhs is complex, expand calls in rhs before computing it.
7963	   That's so we don't compute a pointer and save it over a call.
7964	   If lhs is simple, compute it first so we can give it as a
7965	   target if the rhs is just a call.  This avoids an extra temp and copy
7966	   and that prevents a partial-subsumption which makes bad code.
7967	   Actually we could treat component_ref's of vars like vars.  */
7968
7969	tree lhs = TREE_OPERAND (exp, 0);
7970	tree rhs = TREE_OPERAND (exp, 1);
7971	tree noncopied_parts = 0;
7972	tree lhs_type = TREE_TYPE (lhs);
7973
7974	temp = 0;
7975
7976	if (TREE_CODE (lhs) != VAR_DECL
7977	    && TREE_CODE (lhs) != RESULT_DECL
7978	    && TREE_CODE (lhs) != PARM_DECL
7979	    && ! (TREE_CODE (lhs) == INDIRECT_REF
7980		  && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7981	  preexpand_calls (exp);
7982
7983	/* Check for |= or &= of a bitfield of size one into another bitfield
7984	   of size 1.  In this case, (unless we need the result of the
7985	   assignment) we can do this more efficiently with a
7986	   test followed by an assignment, if necessary.
7987
7988	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
7989	   things change so we do, this code should be enhanced to
7990	   support it.  */
7991	if (ignore
7992	    && TREE_CODE (lhs) == COMPONENT_REF
7993	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
7994		|| TREE_CODE (rhs) == BIT_AND_EXPR)
7995	    && TREE_OPERAND (rhs, 0) == lhs
7996	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7997	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7998	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7999	  {
8000	    rtx label = gen_label_rtx ();
8001
8002	    do_jump (TREE_OPERAND (rhs, 1),
8003		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8004		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8005	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
8006					     (TREE_CODE (rhs) == BIT_IOR_EXPR
8007					      ? integer_one_node
8008					      : integer_zero_node)),
8009			       0, 0);
8010	    do_pending_stack_adjust ();
8011	    emit_label (label);
8012	    return const0_rtx;
8013	  }
8014
8015	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8016	    && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8017	  noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8018						  TYPE_NONCOPIED_PARTS (lhs_type));
8019
8020	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8021	while (noncopied_parts != 0)
8022	  {
8023	    expand_assignment (TREE_PURPOSE (noncopied_parts),
8024			       TREE_VALUE (noncopied_parts), 0, 0);
8025	    noncopied_parts = TREE_CHAIN (noncopied_parts);
8026	  }
8027	return temp;
8028      }
8029
8030    case RETURN_EXPR:
8031      if (!TREE_OPERAND (exp, 0))
8032	expand_null_return ();
8033      else
8034	expand_return (TREE_OPERAND (exp, 0));
8035      return const0_rtx;
8036
8037    case PREINCREMENT_EXPR:
8038    case PREDECREMENT_EXPR:
8039      return expand_increment (exp, 0, ignore);
8040
8041    case POSTINCREMENT_EXPR:
8042    case POSTDECREMENT_EXPR:
8043      /* Faster to treat as pre-increment if result is not used.  */
8044      return expand_increment (exp, ! ignore, ignore);
8045
8046    case ADDR_EXPR:
8047      /* If nonzero, TEMP will be set to the address of something that might
8048	 be a MEM corresponding to a stack slot.  */
8049      temp = 0;
8050
8051      /* Are we taking the address of a nested function?  */
8052      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8053	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8054	  && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8055	  && ! TREE_STATIC (exp))
8056	{
8057	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
8058	  op0 = force_operand (op0, target);
8059	}
8060      /* If we are taking the address of something erroneous, just
8061	 return a zero.  */
8062      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8063	return const0_rtx;
8064      else
8065	{
8066	  /* We make sure to pass const0_rtx down if we came in with
8067	     ignore set, to avoid doing the cleanups twice for something.  */
8068	  op0 = expand_expr (TREE_OPERAND (exp, 0),
8069			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
8070			     (modifier == EXPAND_INITIALIZER
8071			      ? modifier : EXPAND_CONST_ADDRESS));
8072
8073	  /* If we are going to ignore the result, OP0 will have been set
8074	     to const0_rtx, so just return it.  Don't get confused and
8075	     think we are taking the address of the constant.  */
8076	  if (ignore)
8077	    return op0;
8078
8079	  op0 = protect_from_queue (op0, 0);
8080
8081	  /* We would like the object in memory.  If it is a constant,
8082	     we can have it be statically allocated into memory.  For
8083	     a non-constant (REG, SUBREG or CONCAT), we need to allocate some
8084	     memory and store the value into it.  */
8085
8086	  if (CONSTANT_P (op0))
8087	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8088				   op0);
8089	  else if (GET_CODE (op0) == MEM)
8090	    {
8091	      mark_temp_addr_taken (op0);
8092	      temp = XEXP (op0, 0);
8093	    }
8094
8095	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8096		   || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8097	    {
8098	      /* If this object is in a register, it must be not
8099		 be BLKmode.  */
8100	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8101	      rtx memloc = assign_temp (inner_type, 1, 1, 1);
8102
8103	      mark_temp_addr_taken (memloc);
8104	      emit_move_insn (memloc, op0);
8105	      op0 = memloc;
8106	    }
8107
8108	  if (GET_CODE (op0) != MEM)
8109	    abort ();
8110
8111	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8112	    {
8113	      temp = XEXP (op0, 0);
8114#ifdef POINTERS_EXTEND_UNSIGNED
8115	      if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8116		  && mode == ptr_mode)
8117		temp = convert_memory_address (ptr_mode, temp);
8118#endif
8119	      return temp;
8120	    }
8121
8122	  op0 = force_operand (XEXP (op0, 0), target);
8123	}
8124
8125      if (flag_force_addr && GET_CODE (op0) != REG)
8126	op0 = force_reg (Pmode, op0);
8127
8128      if (GET_CODE (op0) == REG
8129	  && ! REG_USERVAR_P (op0))
8130	mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8131
8132      /* If we might have had a temp slot, add an equivalent address
8133	 for it.  */
8134      if (temp != 0)
8135	update_temp_slot_address (temp, op0);
8136
8137#ifdef POINTERS_EXTEND_UNSIGNED
8138      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8139	  && mode == ptr_mode)
8140	op0 = convert_memory_address (ptr_mode, op0);
8141#endif
8142
8143      return op0;
8144
8145    case ENTRY_VALUE_EXPR:
8146      abort ();
8147
8148    /* COMPLEX type for Extended Pascal & Fortran  */
8149    case COMPLEX_EXPR:
8150      {
8151	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8152	rtx insns;
8153
8154	/* Get the rtx code of the operands.  */
8155	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8156	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8157
8158	if (! target)
8159	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8160
8161	start_sequence ();
8162
8163	/* Move the real (op0) and imaginary (op1) parts to their location.  */
8164	emit_move_insn (gen_realpart (mode, target), op0);
8165	emit_move_insn (gen_imagpart (mode, target), op1);
8166
8167	insns = get_insns ();
8168	end_sequence ();
8169
8170	/* Complex construction should appear as a single unit.  */
8171	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8172	   each with a separate pseudo as destination.
8173	   It's not correct for flow to treat them as a unit.  */
8174	if (GET_CODE (target) != CONCAT)
8175	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8176	else
8177	  emit_insns (insns);
8178
8179	return target;
8180      }
8181
8182    case REALPART_EXPR:
8183      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8184      return gen_realpart (mode, op0);
8185
8186    case IMAGPART_EXPR:
8187      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8188      return gen_imagpart (mode, op0);
8189
8190    case CONJ_EXPR:
8191      {
8192	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8193	rtx imag_t;
8194	rtx insns;
8195
8196	op0  = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8197
8198	if (! target)
8199	  target = gen_reg_rtx (mode);
8200
8201	start_sequence ();
8202
8203	/* Store the realpart and the negated imagpart to target.  */
8204	emit_move_insn (gen_realpart (partmode, target),
8205			gen_realpart (partmode, op0));
8206
8207	imag_t = gen_imagpart (partmode, target);
8208	temp = expand_unop (partmode, neg_optab,
8209			       gen_imagpart (partmode, op0), imag_t, 0);
8210	if (temp != imag_t)
8211	  emit_move_insn (imag_t, temp);
8212
8213	insns = get_insns ();
8214	end_sequence ();
8215
8216	/* Conjugate should appear as a single unit
8217	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8218	   each with a separate pseudo as destination.
8219	   It's not correct for flow to treat them as a unit.  */
8220	if (GET_CODE (target) != CONCAT)
8221	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8222	else
8223	  emit_insns (insns);
8224
8225	return target;
8226      }
8227
8228    case TRY_CATCH_EXPR:
8229      {
8230	tree handler = TREE_OPERAND (exp, 1);
8231
8232	expand_eh_region_start ();
8233
8234	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8235
8236	expand_eh_region_end (handler);
8237
8238	return op0;
8239      }
8240
8241    case TRY_FINALLY_EXPR:
8242      {
8243	tree try_block = TREE_OPERAND (exp, 0);
8244	tree finally_block = TREE_OPERAND (exp, 1);
8245	rtx finally_label = gen_label_rtx ();
8246	rtx done_label = gen_label_rtx ();
8247	rtx return_link = gen_reg_rtx (Pmode);
8248	tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8249			      (tree) finally_label, (tree) return_link);
8250	TREE_SIDE_EFFECTS (cleanup) = 1;
8251
8252	/* Start a new binding layer that will keep track of all cleanup
8253	   actions to be performed.  */
8254	expand_start_bindings (0);
8255
8256	target_temp_slot_level = temp_slot_level;
8257
8258	expand_decl_cleanup (NULL_TREE, cleanup);
8259	op0 = expand_expr (try_block, target, tmode, modifier);
8260
8261	preserve_temp_slots (op0);
8262	expand_end_bindings (NULL_TREE, 0, 0);
8263	emit_jump (done_label);
8264	emit_label (finally_label);
8265	expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8266	emit_indirect_jump (return_link);
8267	emit_label (done_label);
8268	return op0;
8269      }
8270
8271      case GOTO_SUBROUTINE_EXPR:
8272      {
8273	rtx subr = (rtx) TREE_OPERAND (exp, 0);
8274	rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8275	rtx return_address = gen_label_rtx ();
8276	emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8277	emit_jump (subr);
8278	emit_label (return_address);
8279	return const0_rtx;
8280      }
8281
8282    case POPDCC_EXPR:
8283      {
8284	rtx dcc = get_dynamic_cleanup_chain ();
8285	emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8286	return const0_rtx;
8287      }
8288
8289    case POPDHC_EXPR:
8290      {
8291	rtx dhc = get_dynamic_handler_chain ();
8292	emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8293	return const0_rtx;
8294      }
8295
8296    default:
8297      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8298    }
8299
8300  /* Here to do an ordinary binary operator, generating an instruction
8301     from the optab already placed in `this_optab'.  */
8302 binop:
8303  preexpand_calls (exp);
8304  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8305    subtarget = 0;
8306  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8307  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8308 binop2:
8309  temp = expand_binop (mode, this_optab, op0, op1, target,
8310		       unsignedp, OPTAB_LIB_WIDEN);
8311  if (temp == 0)
8312    abort ();
8313  return temp;
8314}
8315
8316
8317
8318/* Return the alignment in bits of EXP, a pointer valued expression.
8319   But don't return more than MAX_ALIGN no matter what.
8320   The alignment returned is, by default, the alignment of the thing that
8321   EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8322
8323   Otherwise, look at the expression to see if we can do better, i.e., if the
8324   expression is actually pointing at an object whose alignment is tighter.  */
8325
8326static int
8327get_pointer_alignment (exp, max_align)
8328     tree exp;
8329     unsigned max_align;
8330{
8331  unsigned align, inner;
8332
8333  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8334    return 0;
8335
8336  align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8337  align = MIN (align, max_align);
8338
8339  while (1)
8340    {
8341      switch (TREE_CODE (exp))
8342	{
8343	case NOP_EXPR:
8344	case CONVERT_EXPR:
8345	case NON_LVALUE_EXPR:
8346	  exp = TREE_OPERAND (exp, 0);
8347	  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8348	    return align;
8349	  inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8350	  align = MIN (inner, max_align);
8351	  break;
8352
8353	case PLUS_EXPR:
8354	  /* If sum of pointer + int, restrict our maximum alignment to that
8355	     imposed by the integer.  If not, we can't do any better than
8356	     ALIGN.  */
8357	  if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8358	    return align;
8359
8360	  while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8361		  & (max_align - 1))
8362		 != 0)
8363	    max_align >>= 1;
8364
8365	  exp = TREE_OPERAND (exp, 0);
8366	  break;
8367
8368	case ADDR_EXPR:
8369	  /* See what we are pointing at and look at its alignment.  */
8370	  exp = TREE_OPERAND (exp, 0);
8371	  if (TREE_CODE (exp) == FUNCTION_DECL)
8372	    align = FUNCTION_BOUNDARY;
8373	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8374	    align = DECL_ALIGN (exp);
8375#ifdef CONSTANT_ALIGNMENT
8376	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8377	    align = CONSTANT_ALIGNMENT (exp, align);
8378#endif
8379	  return MIN (align, max_align);
8380
8381	default:
8382	  return align;
8383	}
8384    }
8385}
8386
8387/* Return the tree node and offset if a given argument corresponds to
8388   a string constant.  */
8389
8390static tree
8391string_constant (arg, ptr_offset)
8392     tree arg;
8393     tree *ptr_offset;
8394{
8395  STRIP_NOPS (arg);
8396
8397  if (TREE_CODE (arg) == ADDR_EXPR
8398      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8399    {
8400      *ptr_offset = integer_zero_node;
8401      return TREE_OPERAND (arg, 0);
8402    }
8403  else if (TREE_CODE (arg) == PLUS_EXPR)
8404    {
8405      tree arg0 = TREE_OPERAND (arg, 0);
8406      tree arg1 = TREE_OPERAND (arg, 1);
8407
8408      STRIP_NOPS (arg0);
8409      STRIP_NOPS (arg1);
8410
8411      if (TREE_CODE (arg0) == ADDR_EXPR
8412	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8413	{
8414	  *ptr_offset = arg1;
8415	  return TREE_OPERAND (arg0, 0);
8416	}
8417      else if (TREE_CODE (arg1) == ADDR_EXPR
8418	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8419	{
8420	  *ptr_offset = arg0;
8421	  return TREE_OPERAND (arg1, 0);
8422	}
8423    }
8424
8425  return 0;
8426}
8427
8428/* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
8429   way, because it could contain a zero byte in the middle.
8430   TREE_STRING_LENGTH is the size of the character array, not the string.
8431
8432   Unfortunately, string_constant can't access the values of const char
8433   arrays with initializers, so neither can we do so here.  */
8434
8435static tree
8436c_strlen (src)
8437     tree src;
8438{
8439  tree offset_node;
8440  int offset, max;
8441  char *ptr;
8442
8443  src = string_constant (src, &offset_node);
8444  if (src == 0)
8445    return 0;
8446  max = TREE_STRING_LENGTH (src);
8447  ptr = TREE_STRING_POINTER (src);
8448  if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8449    {
8450      /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8451	 compute the offset to the following null if we don't know where to
8452	 start searching for it.  */
8453      int i;
8454      for (i = 0; i < max; i++)
8455	if (ptr[i] == 0)
8456	  return 0;
8457      /* We don't know the starting offset, but we do know that the string
8458	 has no internal zero bytes.  We can assume that the offset falls
8459	 within the bounds of the string; otherwise, the programmer deserves
8460	 what he gets.  Subtract the offset from the length of the string,
8461	 and return that.  */
8462      /* This would perhaps not be valid if we were dealing with named
8463         arrays in addition to literal string constants.  */
8464      return size_binop (MINUS_EXPR, size_int (max), offset_node);
8465    }
8466
8467  /* We have a known offset into the string.  Start searching there for
8468     a null character.  */
8469  if (offset_node == 0)
8470    offset = 0;
8471  else
8472    {
8473      /* Did we get a long long offset?  If so, punt.  */
8474      if (TREE_INT_CST_HIGH (offset_node) != 0)
8475	return 0;
8476      offset = TREE_INT_CST_LOW (offset_node);
8477    }
8478  /* If the offset is known to be out of bounds, warn, and call strlen at
8479     runtime.  */
8480  if (offset < 0 || offset > max)
8481    {
8482      warning ("offset outside bounds of constant string");
8483      return 0;
8484    }
8485  /* Use strlen to search for the first zero byte.  Since any strings
8486     constructed with build_string will have nulls appended, we win even
8487     if we get handed something like (char[4])"abcd".
8488
8489     Since OFFSET is our starting index into the string, no further
8490     calculation is needed.  */
8491  return size_int (strlen (ptr + offset));
8492}
8493
8494rtx
8495expand_builtin_return_addr (fndecl_code, count, tem)
8496     enum built_in_function fndecl_code;
8497     int count;
8498     rtx tem;
8499{
8500  int i;
8501
8502  /* Some machines need special handling before we can access
8503     arbitrary frames.  For example, on the sparc, we must first flush
8504     all register windows to the stack.  */
8505#ifdef SETUP_FRAME_ADDRESSES
8506  if (count > 0)
8507    SETUP_FRAME_ADDRESSES ();
8508#endif
8509
8510  /* On the sparc, the return address is not in the frame, it is in a
8511     register.  There is no way to access it off of the current frame
8512     pointer, but it can be accessed off the previous frame pointer by
8513     reading the value from the register window save area.  */
8514#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8515  if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8516    count--;
8517#endif
8518
8519  /* Scan back COUNT frames to the specified frame.  */
8520  for (i = 0; i < count; i++)
8521    {
8522      /* Assume the dynamic chain pointer is in the word that the
8523	 frame address points to, unless otherwise specified.  */
8524#ifdef DYNAMIC_CHAIN_ADDRESS
8525      tem = DYNAMIC_CHAIN_ADDRESS (tem);
8526#endif
8527      tem = memory_address (Pmode, tem);
8528      tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8529    }
8530
8531  /* For __builtin_frame_address, return what we've got.  */
8532  if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8533    return tem;
8534
8535  /* For __builtin_return_address, Get the return address from that
8536     frame.  */
8537#ifdef RETURN_ADDR_RTX
8538  tem = RETURN_ADDR_RTX (count, tem);
8539#else
8540  tem = memory_address (Pmode,
8541			plus_constant (tem, GET_MODE_SIZE (Pmode)));
8542  tem = gen_rtx_MEM (Pmode, tem);
8543#endif
8544  return tem;
8545}
8546
8547/* Construct the leading half of a __builtin_setjmp call.  Control will
8548   return to RECEIVER_LABEL.  This is used directly by sjlj exception
8549   handling code.  */
8550
8551void
8552expand_builtin_setjmp_setup (buf_addr, receiver_label)
8553     rtx buf_addr;
8554     rtx receiver_label;
8555{
8556  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8557  rtx stack_save;
8558
8559#ifdef POINTERS_EXTEND_UNSIGNED
8560  buf_addr = convert_memory_address (Pmode, buf_addr);
8561#endif
8562
8563  buf_addr = force_reg (Pmode, buf_addr);
8564
8565  emit_queue ();
8566
8567  /* We store the frame pointer and the address of receiver_label in
8568     the buffer and use the rest of it for the stack save area, which
8569     is machine-dependent.  */
8570
8571#ifndef BUILTIN_SETJMP_FRAME_VALUE
8572#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8573#endif
8574
8575  emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8576		  BUILTIN_SETJMP_FRAME_VALUE);
8577  emit_move_insn (validize_mem
8578		  (gen_rtx_MEM (Pmode,
8579				plus_constant (buf_addr,
8580					       GET_MODE_SIZE (Pmode)))),
8581		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
8582
8583  stack_save = gen_rtx_MEM (sa_mode,
8584			    plus_constant (buf_addr,
8585					   2 * GET_MODE_SIZE (Pmode)));
8586  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8587
8588  /* If there is further processing to do, do it.  */
8589#ifdef HAVE_builtin_setjmp_setup
8590  if (HAVE_builtin_setjmp_setup)
8591    emit_insn (gen_builtin_setjmp_setup (buf_addr));
8592#endif
8593
8594  /* Tell optimize_save_area_alloca that extra work is going to
8595     need to go on during alloca.  */
8596  current_function_calls_setjmp = 1;
8597
8598  /* Set this so all the registers get saved in our frame; we need to be
8599     able to copy the saved values for any registers from frames we unwind. */
8600  current_function_has_nonlocal_label = 1;
8601}
8602
8603/* Construct the trailing part of a __builtin_setjmp call.
8604   This is used directly by sjlj exception handling code.  */
8605
8606void
8607expand_builtin_setjmp_receiver (receiver_label)
8608      rtx receiver_label ATTRIBUTE_UNUSED;
8609{
8610  /* Clobber the FP when we get here, so we have to make sure it's
8611     marked as used by this function.  */
8612  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8613
8614  /* Mark the static chain as clobbered here so life information
8615     doesn't get messed up for it.  */
8616  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8617
8618  /* Now put in the code to restore the frame pointer, and argument
8619     pointer, if needed.  The code below is from expand_end_bindings
8620     in stmt.c; see detailed documentation there.  */
8621#ifdef HAVE_nonlocal_goto
8622  if (! HAVE_nonlocal_goto)
8623#endif
8624    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8625
8626#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8627  if (fixed_regs[ARG_POINTER_REGNUM])
8628    {
8629#ifdef ELIMINABLE_REGS
8630      size_t i;
8631      static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8632
8633      for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8634	if (elim_regs[i].from == ARG_POINTER_REGNUM
8635	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8636	  break;
8637
8638      if (i == sizeof elim_regs / sizeof elim_regs [0])
8639#endif
8640	{
8641	  /* Now restore our arg pointer from the address at which it
8642	     was saved in our stack frame.
8643	     If there hasn't be space allocated for it yet, make
8644	     some now.  */
8645	  if (arg_pointer_save_area == 0)
8646	    arg_pointer_save_area
8647	      = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8648	  emit_move_insn (virtual_incoming_args_rtx,
8649			  copy_to_reg (arg_pointer_save_area));
8650	}
8651    }
8652#endif
8653
8654#ifdef HAVE_builtin_setjmp_receiver
8655  if (HAVE_builtin_setjmp_receiver)
8656    emit_insn (gen_builtin_setjmp_receiver (receiver_label));
8657  else
8658#endif
8659#ifdef HAVE_nonlocal_goto_receiver
8660    if (HAVE_nonlocal_goto_receiver)
8661      emit_insn (gen_nonlocal_goto_receiver ());
8662    else
8663#endif
8664      {
8665	; /* Nothing */
8666      }
8667}
8668
8669
8670/* __builtin_setjmp is passed a pointer to an array of five words (not
8671   all will be used on all machines).  It operates similarly to the C
8672   library function of the same name, but is more efficient.  Much of
8673   the code below (and for longjmp) is copied from the handling of
8674   non-local gotos.
8675
8676   NOTE: This is intended for use by GNAT and the exception handling
8677   scheme in the compiler and will only work in the method used by
8678   them.  */
8679
8680static rtx
8681expand_builtin_setjmp (arglist, target)
8682     tree arglist;
8683     rtx target;
8684{
8685  rtx buf_addr, next_lab, cont_lab;
8686
8687  if (arglist == 0
8688      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8689    return NULL_RTX;
8690
8691  if (target == 0 || GET_CODE (target) != REG
8692      || REGNO (target) < FIRST_PSEUDO_REGISTER)
8693    target = gen_reg_rtx (TYPE_MODE (integer_type_node));
8694
8695  buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8696
8697  next_lab = gen_label_rtx ();
8698  cont_lab = gen_label_rtx ();
8699
8700  expand_builtin_setjmp_setup (buf_addr, next_lab);
8701
8702  /* Set TARGET to zero and branch to the continue label.  */
8703  emit_move_insn (target, const0_rtx);
8704  emit_jump_insn (gen_jump (cont_lab));
8705  emit_barrier ();
8706  emit_label (next_lab);
8707
8708  expand_builtin_setjmp_receiver (next_lab);
8709
8710  /* Set TARGET to one.  */
8711  emit_move_insn (target, const1_rtx);
8712  emit_label (cont_lab);
8713
8714  /* Tell flow about the strange goings on.  Putting `next_lab' on
8715     `nonlocal_goto_handler_labels' to indicates that function
8716     calls may traverse the arc back to this label.  */
8717
8718  current_function_has_nonlocal_label = 1;
8719  nonlocal_goto_handler_labels
8720    = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
8721
8722  return target;
8723}
8724
8725void
8726expand_builtin_longjmp (buf_addr, value)
8727     rtx buf_addr, value;
8728{
8729  rtx fp, lab, stack;
8730  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8731
8732#ifdef POINTERS_EXTEND_UNSIGNED
8733  buf_addr = convert_memory_address (Pmode, buf_addr);
8734#endif
8735  buf_addr = force_reg (Pmode, buf_addr);
8736
8737  /* We used to store value in static_chain_rtx, but that fails if pointers
8738     are smaller than integers.  We instead require that the user must pass
8739     a second argument of 1, because that is what builtin_setjmp will
8740     return.  This also makes EH slightly more efficient, since we are no
8741     longer copying around a value that we don't care about.  */
8742  if (value != const1_rtx)
8743    abort ();
8744
8745#ifdef HAVE_builtin_longjmp
8746  if (HAVE_builtin_longjmp)
8747    emit_insn (gen_builtin_longjmp (buf_addr));
8748  else
8749#endif
8750    {
8751      fp = gen_rtx_MEM (Pmode, buf_addr);
8752      lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8753					       GET_MODE_SIZE (Pmode)));
8754
8755      stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8756						   2 * GET_MODE_SIZE (Pmode)));
8757
8758      /* Pick up FP, label, and SP from the block and jump.  This code is
8759	 from expand_goto in stmt.c; see there for detailed comments.  */
8760#if HAVE_nonlocal_goto
8761      if (HAVE_nonlocal_goto)
8762	/* We have to pass a value to the nonlocal_goto pattern that will
8763	   get copied into the static_chain pointer, but it does not matter
8764	   what that value is, because builtin_setjmp does not use it.  */
8765	emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8766      else
8767#endif
8768	{
8769	  lab = copy_to_reg (lab);
8770
8771	  emit_move_insn (hard_frame_pointer_rtx, fp);
8772	  emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8773
8774	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8775	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8776	  emit_indirect_jump (lab);
8777	}
8778    }
8779}
8780
8781static rtx
8782get_memory_rtx (exp)
8783     tree exp;
8784{
8785  rtx mem;
8786  int is_aggregate;
8787
8788  mem = gen_rtx_MEM (BLKmode,
8789		     memory_address (BLKmode,
8790				     expand_expr (exp, NULL_RTX,
8791						  ptr_mode, EXPAND_SUM)));
8792
8793  RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8794
8795  /* Figure out the type of the object pointed to.  Set MEM_IN_STRUCT_P
8796     if the value is the address of a structure or if the expression is
8797     cast to a pointer to structure type.  */
8798  is_aggregate = 0;
8799
8800  while (TREE_CODE (exp) == NOP_EXPR)
8801    {
8802      tree cast_type = TREE_TYPE (exp);
8803      if (TREE_CODE (cast_type) == POINTER_TYPE
8804	  && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8805	{
8806	  is_aggregate = 1;
8807	  break;
8808	}
8809      exp = TREE_OPERAND (exp, 0);
8810    }
8811
8812  if (is_aggregate == 0)
8813    {
8814      tree type;
8815
8816      if (TREE_CODE (exp) == ADDR_EXPR)
8817	/* If this is the address of an object, check whether the
8818	   object is an array.  */
8819	type = TREE_TYPE (TREE_OPERAND (exp, 0));
8820      else
8821	type = TREE_TYPE (TREE_TYPE (exp));
8822      is_aggregate = AGGREGATE_TYPE_P (type);
8823    }
8824
8825  MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8826  return mem;
8827}
8828
8829
8830/* Expand an expression EXP that calls a built-in function,
8831   with result going to TARGET if that's convenient
8832   (and in mode MODE if that's convenient).
8833   SUBTARGET may be used as the target for computing one of EXP's operands.
8834   IGNORE is nonzero if the value is to be ignored.  */
8835
8836#define CALLED_AS_BUILT_IN(NODE) \
8837   (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8838
8839static rtx
8840expand_builtin (exp, target, subtarget, mode, ignore)
8841     tree exp;
8842     rtx target;
8843     rtx subtarget;
8844     enum machine_mode mode;
8845     int ignore;
8846{
8847  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8848  tree arglist = TREE_OPERAND (exp, 1);
8849  rtx op0;
8850  rtx lab1, insns;
8851  enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8852  optab builtin_optab;
8853
8854  switch (DECL_FUNCTION_CODE (fndecl))
8855    {
8856    case BUILT_IN_ABS:
8857    case BUILT_IN_LABS:
8858    case BUILT_IN_FABS:
8859      /* build_function_call changes these into ABS_EXPR.  */
8860      abort ();
8861
8862    case BUILT_IN_SIN:
8863    case BUILT_IN_COS:
8864      /* Treat these like sqrt, but only if the user asks for them.  */
8865      if (! flag_fast_math)
8866	break;
8867    case BUILT_IN_FSQRT:
8868      /* If not optimizing, call the library function.  */
8869      if (! optimize)
8870	break;
8871
8872      if (arglist == 0
8873	  /* Arg could be wrong type if user redeclared this fcn wrong.  */
8874	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8875	break;
8876
8877      /* Stabilize and compute the argument.  */
8878      if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8879	  && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8880	{
8881	  exp = copy_node (exp);
8882	  arglist = copy_node (arglist);
8883	  TREE_OPERAND (exp, 1) = arglist;
8884	  TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8885	}
8886      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8887
8888      /* Make a suitable register to place result in.  */
8889      target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8890
8891      emit_queue ();
8892      start_sequence ();
8893
8894      switch (DECL_FUNCTION_CODE (fndecl))
8895	{
8896	case BUILT_IN_SIN:
8897	  builtin_optab = sin_optab; break;
8898	case BUILT_IN_COS:
8899	  builtin_optab = cos_optab; break;
8900	case BUILT_IN_FSQRT:
8901	  builtin_optab = sqrt_optab; break;
8902	default:
8903	  abort ();
8904	}
8905
8906      /* Compute into TARGET.
8907	 Set TARGET to wherever the result comes back.  */
8908      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8909			    builtin_optab, op0, target, 0);
8910
8911      /* If we were unable to expand via the builtin, stop the
8912	 sequence (without outputting the insns) and break, causing
8913	 a call to the library function.  */
8914      if (target == 0)
8915	{
8916	  end_sequence ();
8917	  break;
8918        }
8919
8920      /* Check the results by default.  But if flag_fast_math is turned on,
8921	 then assume sqrt will always be called with valid arguments.  */
8922
8923      if (flag_errno_math && ! flag_fast_math)
8924	{
8925	  /* Don't define the builtin FP instructions
8926	     if your machine is not IEEE.  */
8927	  if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8928	    abort ();
8929
8930	  lab1 = gen_label_rtx ();
8931
8932	  /* Test the result; if it is NaN, set errno=EDOM because
8933	     the argument was not in the domain.  */
8934	  emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8935				   0, 0, lab1);
8936
8937#ifdef TARGET_EDOM
8938	  {
8939#ifdef GEN_ERRNO_RTX
8940	    rtx errno_rtx = GEN_ERRNO_RTX;
8941#else
8942	    rtx errno_rtx
8943	      = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8944#endif
8945
8946	    emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8947	  }
8948#else
8949	  /* We can't set errno=EDOM directly; let the library call do it.
8950	     Pop the arguments right away in case the call gets deleted.  */
8951	  NO_DEFER_POP;
8952	  expand_call (exp, target, 0);
8953	  OK_DEFER_POP;
8954#endif
8955
8956	  emit_label (lab1);
8957	}
8958
8959      /* Output the entire sequence.  */
8960      insns = get_insns ();
8961      end_sequence ();
8962      emit_insns (insns);
8963
8964      return target;
8965
8966    case BUILT_IN_FMOD:
8967      break;
8968
8969      /* __builtin_apply_args returns block of memory allocated on
8970	 the stack into which is stored the arg pointer, structure
8971	 value address, static chain, and all the registers that might
8972	 possibly be used in performing a function call.  The code is
8973	 moved to the start of the function so the incoming values are
8974	 saved.  */
8975    case BUILT_IN_APPLY_ARGS:
8976      /* Don't do __builtin_apply_args more than once in a function.
8977	 Save the result of the first call and reuse it.  */
8978      if (apply_args_value != 0)
8979	return apply_args_value;
8980      {
8981	/* When this function is called, it means that registers must be
8982	   saved on entry to this function.  So we migrate the
8983	   call to the first insn of this function.  */
8984	rtx temp;
8985	rtx seq;
8986
8987	start_sequence ();
8988	temp = expand_builtin_apply_args ();
8989	seq = get_insns ();
8990	end_sequence ();
8991
8992	apply_args_value = temp;
8993
8994	/* Put the sequence after the NOTE that starts the function.
8995	   If this is inside a SEQUENCE, make the outer-level insn
8996	   chain current, so the code is placed at the start of the
8997	   function.  */
8998	push_topmost_sequence ();
8999	emit_insns_before (seq, NEXT_INSN (get_insns ()));
9000	pop_topmost_sequence ();
9001	return temp;
9002      }
9003
9004      /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9005	 FUNCTION with a copy of the parameters described by
9006	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
9007	 allocated on the stack into which is stored all the registers
9008	 that might possibly be used for returning the result of a
9009	 function.  ARGUMENTS is the value returned by
9010	 __builtin_apply_args.  ARGSIZE is the number of bytes of
9011	 arguments that must be copied.  ??? How should this value be
9012	 computed?  We'll also need a safe worst case value for varargs
9013	 functions.  */
9014    case BUILT_IN_APPLY:
9015      if (arglist == 0
9016	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9017	  || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
9018	  || TREE_CHAIN (arglist) == 0
9019	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9020	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9021	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9022	return const0_rtx;
9023      else
9024	{
9025	  int i;
9026	  tree t;
9027	  rtx ops[3];
9028
9029	  for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
9030	    ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
9031
9032	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
9033	}
9034
9035      /* __builtin_return (RESULT) causes the function to return the
9036	 value described by RESULT.  RESULT is address of the block of
9037	 memory returned by __builtin_apply.  */
9038    case BUILT_IN_RETURN:
9039      if (arglist
9040	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9041	  && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
9042	expand_builtin_return (expand_expr (TREE_VALUE (arglist),
9043					    NULL_RTX, VOIDmode, 0));
9044      return const0_rtx;
9045
9046    case BUILT_IN_SAVEREGS:
9047      /* Don't do __builtin_saveregs more than once in a function.
9048	 Save the result of the first call and reuse it.  */
9049      if (saveregs_value != 0)
9050	return saveregs_value;
9051      {
9052	/* When this function is called, it means that registers must be
9053	   saved on entry to this function.  So we migrate the
9054	   call to the first insn of this function.  */
9055	rtx temp;
9056	rtx seq;
9057
9058	/* Now really call the function.  `expand_call' does not call
9059	   expand_builtin, so there is no danger of infinite recursion here.  */
9060	start_sequence ();
9061
9062#ifdef EXPAND_BUILTIN_SAVEREGS
9063	/* Do whatever the machine needs done in this case.  */
9064	temp = EXPAND_BUILTIN_SAVEREGS (arglist);
9065#else
9066	/* The register where the function returns its value
9067	   is likely to have something else in it, such as an argument.
9068	   So preserve that register around the call.  */
9069
9070	if (value_mode != VOIDmode)
9071	  {
9072	    rtx valreg = hard_libcall_value (value_mode);
9073	    rtx saved_valreg = gen_reg_rtx (value_mode);
9074
9075	    emit_move_insn (saved_valreg, valreg);
9076	    temp = expand_call (exp, target, ignore);
9077	    emit_move_insn (valreg, saved_valreg);
9078	  }
9079	else
9080	  /* Generate the call, putting the value in a pseudo.  */
9081	  temp = expand_call (exp, target, ignore);
9082#endif
9083
9084	seq = get_insns ();
9085	end_sequence ();
9086
9087	saveregs_value = temp;
9088
9089	/* Put the sequence after the NOTE that starts the function.
9090	   If this is inside a SEQUENCE, make the outer-level insn
9091	   chain current, so the code is placed at the start of the
9092	   function.  */
9093	push_topmost_sequence ();
9094	emit_insns_before (seq, NEXT_INSN (get_insns ()));
9095	pop_topmost_sequence ();
9096	return temp;
9097      }
9098
9099      /* __builtin_args_info (N) returns word N of the arg space info
9100	 for the current function.  The number and meanings of words
9101	 is controlled by the definition of CUMULATIVE_ARGS.  */
9102    case BUILT_IN_ARGS_INFO:
9103      {
9104	int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
9105	int *word_ptr = (int *) &current_function_args_info;
9106#if 0
9107	/* These are used by the code below that is if 0'ed away */
9108	int i;
9109	tree type, elts, result;
9110#endif
9111
9112	if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
9113	  fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
9114		 __FILE__, __LINE__);
9115
9116	if (arglist != 0)
9117	  {
9118	    tree arg = TREE_VALUE (arglist);
9119	    if (TREE_CODE (arg) != INTEGER_CST)
9120	      error ("argument of `__builtin_args_info' must be constant");
9121	    else
9122	      {
9123		int wordnum = TREE_INT_CST_LOW (arg);
9124
9125		if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
9126		  error ("argument of `__builtin_args_info' out of range");
9127		else
9128		  return GEN_INT (word_ptr[wordnum]);
9129	      }
9130	  }
9131	else
9132	  error ("missing argument in `__builtin_args_info'");
9133
9134	return const0_rtx;
9135
9136#if 0
9137	for (i = 0; i < nwords; i++)
9138	  elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
9139
9140	type = build_array_type (integer_type_node,
9141				 build_index_type (build_int_2 (nwords, 0)));
9142	result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
9143	TREE_CONSTANT (result) = 1;
9144	TREE_STATIC (result) = 1;
9145	result = build (INDIRECT_REF, build_pointer_type (type), result);
9146	TREE_CONSTANT (result) = 1;
9147	return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
9148#endif
9149      }
9150
9151      /* Return the address of the first anonymous stack arg.  */
9152    case BUILT_IN_NEXT_ARG:
9153      {
9154	tree fntype = TREE_TYPE (current_function_decl);
9155
9156	if ((TYPE_ARG_TYPES (fntype) == 0
9157	     || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9158		 == void_type_node))
9159	    && ! current_function_varargs)
9160	  {
9161	    error ("`va_start' used in function with fixed args");
9162	    return const0_rtx;
9163	  }
9164
9165	if (arglist)
9166	  {
9167	    tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9168	    tree arg = TREE_VALUE (arglist);
9169
9170	    /* Strip off all nops for the sake of the comparison.  This
9171	       is not quite the same as STRIP_NOPS.  It does more.
9172	       We must also strip off INDIRECT_EXPR for C++ reference
9173	       parameters.  */
9174	    while (TREE_CODE (arg) == NOP_EXPR
9175		   || TREE_CODE (arg) == CONVERT_EXPR
9176		   || TREE_CODE (arg) == NON_LVALUE_EXPR
9177		   || TREE_CODE (arg) == INDIRECT_REF)
9178	      arg = TREE_OPERAND (arg, 0);
9179	    if (arg != last_parm)
9180	      warning ("second parameter of `va_start' not last named argument");
9181	  }
9182	else if (! current_function_varargs)
9183	  /* Evidently an out of date version of <stdarg.h>; can't validate
9184	     va_start's second argument, but can still work as intended.  */
9185	  warning ("`__builtin_next_arg' called without an argument");
9186      }
9187
9188      return expand_binop (Pmode, add_optab,
9189			   current_function_internal_arg_pointer,
9190			   current_function_arg_offset_rtx,
9191			   NULL_RTX, 0, OPTAB_LIB_WIDEN);
9192
9193    case BUILT_IN_CLASSIFY_TYPE:
9194      if (arglist != 0)
9195	{
9196	  tree type = TREE_TYPE (TREE_VALUE (arglist));
9197	  enum tree_code code = TREE_CODE (type);
9198	  if (code == VOID_TYPE)
9199	    return GEN_INT (void_type_class);
9200	  if (code == INTEGER_TYPE)
9201	    return GEN_INT (integer_type_class);
9202	  if (code == CHAR_TYPE)
9203	    return GEN_INT (char_type_class);
9204	  if (code == ENUMERAL_TYPE)
9205	    return GEN_INT (enumeral_type_class);
9206	  if (code == BOOLEAN_TYPE)
9207	    return GEN_INT (boolean_type_class);
9208	  if (code == POINTER_TYPE)
9209	    return GEN_INT (pointer_type_class);
9210	  if (code == REFERENCE_TYPE)
9211	    return GEN_INT (reference_type_class);
9212	  if (code == OFFSET_TYPE)
9213	    return GEN_INT (offset_type_class);
9214	  if (code == REAL_TYPE)
9215	    return GEN_INT (real_type_class);
9216	  if (code == COMPLEX_TYPE)
9217	    return GEN_INT (complex_type_class);
9218	  if (code == FUNCTION_TYPE)
9219	    return GEN_INT (function_type_class);
9220	  if (code == METHOD_TYPE)
9221	    return GEN_INT (method_type_class);
9222	  if (code == RECORD_TYPE)
9223	    return GEN_INT (record_type_class);
9224	  if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9225	    return GEN_INT (union_type_class);
9226	  if (code == ARRAY_TYPE)
9227	    {
9228	      if (TYPE_STRING_FLAG (type))
9229		return GEN_INT (string_type_class);
9230	      else
9231		return GEN_INT (array_type_class);
9232	    }
9233	  if (code == SET_TYPE)
9234	    return GEN_INT (set_type_class);
9235	  if (code == FILE_TYPE)
9236	    return GEN_INT (file_type_class);
9237	  if (code == LANG_TYPE)
9238	    return GEN_INT (lang_type_class);
9239	}
9240      return GEN_INT (no_type_class);
9241
9242    case BUILT_IN_CONSTANT_P:
9243      if (arglist == 0)
9244	return const0_rtx;
9245      else
9246	{
9247	  tree arg = TREE_VALUE (arglist);
9248	  rtx tmp;
9249
9250	  /* We return 1 for a numeric type that's known to be a constant
9251	     value at compile-time or for an aggregate type that's a
9252	     literal constant.  */
9253	  STRIP_NOPS (arg);
9254
9255	  /* If we know this is a constant, emit the constant of one.  */
9256	  if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9257	      || (TREE_CODE (arg) == CONSTRUCTOR
9258		  && TREE_CONSTANT (arg))
9259	      || (TREE_CODE (arg) == ADDR_EXPR
9260		  && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9261	    return const1_rtx;
9262
9263	  /* If we aren't going to be running CSE or this expression
9264	     has side effects, show we don't know it to be a constant.
9265	     Likewise if it's a pointer or aggregate type since in those
9266	     case we only want literals, since those are only optimized
9267	     when generating RTL, not later.  */
9268	  if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9269	      || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9270	      || POINTER_TYPE_P (TREE_TYPE (arg)))
9271	    return const0_rtx;
9272
9273	  /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9274	     chance to see if it can deduce whether ARG is constant.  */
9275
9276	  tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9277	  tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9278	  return tmp;
9279	}
9280
9281    case BUILT_IN_FRAME_ADDRESS:
9282      /* The argument must be a nonnegative integer constant.
9283	 It counts the number of frames to scan up the stack.
9284	 The value is the address of that frame.  */
9285    case BUILT_IN_RETURN_ADDRESS:
9286      /* The argument must be a nonnegative integer constant.
9287	 It counts the number of frames to scan up the stack.
9288	 The value is the return address saved in that frame.  */
9289      if (arglist == 0)
9290	/* Warning about missing arg was already issued.  */
9291	return const0_rtx;
9292      else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9293	       || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9294	{
9295	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9296	    error ("invalid arg to `__builtin_frame_address'");
9297	  else
9298	    error ("invalid arg to `__builtin_return_address'");
9299	  return const0_rtx;
9300	}
9301      else
9302	{
9303	  rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9304						TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9305						hard_frame_pointer_rtx);
9306
9307	  /* Some ports cannot access arbitrary stack frames.  */
9308	  if (tem == NULL)
9309	    {
9310	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9311		warning ("unsupported arg to `__builtin_frame_address'");
9312	      else
9313		warning ("unsupported arg to `__builtin_return_address'");
9314	      return const0_rtx;
9315	    }
9316
9317	  /* For __builtin_frame_address, return what we've got.  */
9318	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9319	    return tem;
9320
9321	  if (GET_CODE (tem) != REG
9322	      && ! CONSTANT_P (tem))
9323	    tem = copy_to_mode_reg (Pmode, tem);
9324	  return tem;
9325	}
9326
9327    /* Returns the address of the area where the structure is returned.
9328       0 otherwise.  */
9329    case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9330      if (arglist != 0
9331          || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9332          || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9333        return const0_rtx;
9334      else
9335        return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9336
9337    case BUILT_IN_ALLOCA:
9338      if (arglist == 0
9339	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
9340	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9341	break;
9342
9343      /* Compute the argument.  */
9344      op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9345
9346      /* Allocate the desired space.  */
9347      return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9348
9349    case BUILT_IN_FFS:
9350      /* If not optimizing, call the library function.  */
9351      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9352	break;
9353
9354      if (arglist == 0
9355	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
9356	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9357	break;
9358
9359      /* Compute the argument.  */
9360      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9361      /* Compute ffs, into TARGET if possible.
9362	 Set TARGET to wherever the result comes back.  */
9363      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9364			    ffs_optab, op0, target, 1);
9365      if (target == 0)
9366	abort ();
9367      return target;
9368
9369    case BUILT_IN_STRLEN:
9370      /* If not optimizing, call the library function.  */
9371      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9372	break;
9373
9374      if (arglist == 0
9375	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9376	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9377	break;
9378      else
9379	{
9380	  tree src = TREE_VALUE (arglist);
9381	  tree len = c_strlen (src);
9382
9383	  int align
9384	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9385
9386	  rtx result, src_rtx, char_rtx;
9387	  enum machine_mode insn_mode = value_mode, char_mode;
9388	  enum insn_code icode;
9389
9390	  /* If the length is known, just return it.  */
9391	  if (len != 0)
9392	    return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9393
9394	  /* If SRC is not a pointer type, don't do this operation inline.  */
9395	  if (align == 0)
9396	    break;
9397
9398	  /* Call a function if we can't compute strlen in the right mode.  */
9399
9400	  while (insn_mode != VOIDmode)
9401	    {
9402	      icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9403	      if (icode != CODE_FOR_nothing)
9404		break;
9405
9406	      insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9407	    }
9408	  if (insn_mode == VOIDmode)
9409	    break;
9410
9411	  /* Make a place to write the result of the instruction.  */
9412	  result = target;
9413	  if (! (result != 0
9414		 && GET_CODE (result) == REG
9415		 && GET_MODE (result) == insn_mode
9416		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9417	    result = gen_reg_rtx (insn_mode);
9418
9419	  /* Make sure the operands are acceptable to the predicates.  */
9420
9421	  if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9422	    result = gen_reg_rtx (insn_mode);
9423	  src_rtx = memory_address (BLKmode,
9424				    expand_expr (src, NULL_RTX, ptr_mode,
9425						 EXPAND_NORMAL));
9426
9427	  if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9428	    src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9429
9430	  /* Check the string is readable and has an end.  */
9431	  if (current_function_check_memory_usage)
9432	    emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9433			       src_rtx, Pmode,
9434			       GEN_INT (MEMORY_USE_RO),
9435			       TYPE_MODE (integer_type_node));
9436
9437	  char_rtx = const0_rtx;
9438	  char_mode = insn_operand_mode[(int)icode][2];
9439	  if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9440	    char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9441
9442	  emit_insn (GEN_FCN (icode) (result,
9443				      gen_rtx_MEM (BLKmode, src_rtx),
9444				      char_rtx, GEN_INT (align)));
9445
9446	  /* Return the value in the proper mode for this function.  */
9447	  if (GET_MODE (result) == value_mode)
9448	    return result;
9449	  else if (target != 0)
9450	    {
9451	      convert_move (target, result, 0);
9452	      return target;
9453	    }
9454	  else
9455	    return convert_to_mode (value_mode, result, 0);
9456	}
9457
9458    case BUILT_IN_STRCPY:
9459      /* If not optimizing, call the library function.  */
9460      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9461	break;
9462
9463      if (arglist == 0
9464	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9465	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9466	  || TREE_CHAIN (arglist) == 0
9467	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9468	break;
9469      else
9470	{
9471	  tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9472
9473	  if (len == 0)
9474	    break;
9475
9476	  len = size_binop (PLUS_EXPR, len, integer_one_node);
9477
9478	  chainon (arglist, build_tree_list (NULL_TREE, len));
9479	}
9480
9481      /* Drops in.  */
9482    case BUILT_IN_MEMCPY:
9483      /* If not optimizing, call the library function.  */
9484      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9485	break;
9486
9487      if (arglist == 0
9488	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9489	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9490	  || TREE_CHAIN (arglist) == 0
9491	  || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9492	      != POINTER_TYPE)
9493	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9494	  || (TREE_CODE (TREE_TYPE (TREE_VALUE
9495				    (TREE_CHAIN (TREE_CHAIN (arglist)))))
9496	      != INTEGER_TYPE))
9497	break;
9498      else
9499	{
9500	  tree dest = TREE_VALUE (arglist);
9501	  tree src = TREE_VALUE (TREE_CHAIN (arglist));
9502	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9503
9504	  int src_align
9505	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9506	  int dest_align
9507	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9508	  rtx dest_mem, src_mem, dest_addr, len_rtx;
9509
9510	  /* If either SRC or DEST is not a pointer type, don't do
9511	     this operation in-line.  */
9512	  if (src_align == 0 || dest_align == 0)
9513	    {
9514	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9515		TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9516	      break;
9517	    }
9518
9519	  dest_mem = get_memory_rtx (dest);
9520	  src_mem = get_memory_rtx (src);
9521	  len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9522
9523	  /* Just copy the rights of SRC to the rights of DEST.  */
9524	  if (current_function_check_memory_usage)
9525	    emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9526			       XEXP (dest_mem, 0), Pmode,
9527			       XEXP (src_mem, 0), Pmode,
9528			       len_rtx, TYPE_MODE (sizetype));
9529
9530	  /* Copy word part most expediently.  */
9531	  dest_addr
9532	    = emit_block_move (dest_mem, src_mem, len_rtx,
9533			       MIN (src_align, dest_align));
9534
9535	  if (dest_addr == 0)
9536	    dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9537
9538	  return dest_addr;
9539	}
9540
9541    case BUILT_IN_MEMSET:
9542      /* If not optimizing, call the library function.  */
9543      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9544	break;
9545
9546      if (arglist == 0
9547	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9548	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9549	  || TREE_CHAIN (arglist) == 0
9550	  || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9551	      != INTEGER_TYPE)
9552	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9553	  || (INTEGER_TYPE
9554	      != (TREE_CODE (TREE_TYPE
9555			     (TREE_VALUE
9556			      (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9557	break;
9558      else
9559	{
9560	  tree dest = TREE_VALUE (arglist);
9561	  tree val = TREE_VALUE (TREE_CHAIN (arglist));
9562	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9563
9564	  int dest_align
9565	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9566	  rtx dest_mem, dest_addr, len_rtx;
9567
9568	  /* If DEST is not a pointer type, don't do this
9569	     operation in-line.  */
9570	  if (dest_align == 0)
9571	    break;
9572
9573	  /* If the arguments have side-effects, then we can only evaluate
9574	     them at most once.  The following code evaluates them twice if
9575	     they are not constants because we break out to expand_call
9576	     in that case.  They can't be constants if they have side-effects
9577	     so we can check for that first.  Alternatively, we could call
9578	     save_expr to make multiple evaluation safe.  */
9579	  if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9580	    break;
9581
9582	  /* If VAL is not 0, don't do this operation in-line. */
9583	  if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9584	    break;
9585
9586	  /* If LEN does not expand to a constant, don't do this
9587	     operation in-line.  */
9588	  len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9589	  if (GET_CODE (len_rtx) != CONST_INT)
9590	    break;
9591
9592	  dest_mem = get_memory_rtx (dest);
9593
9594	  /* Just check DST is writable and mark it as readable.  */
9595	  if (current_function_check_memory_usage)
9596	    emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9597			       XEXP (dest_mem, 0), Pmode,
9598			       len_rtx, TYPE_MODE (sizetype),
9599			       GEN_INT (MEMORY_USE_WO),
9600			       TYPE_MODE (integer_type_node));
9601
9602
9603	  dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9604
9605	  if (dest_addr == 0)
9606	    dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9607
9608	  return dest_addr;
9609	}
9610
9611/* These comparison functions need an instruction that returns an actual
9612   index.  An ordinary compare that just sets the condition codes
9613   is not enough.  */
9614#ifdef HAVE_cmpstrsi
9615    case BUILT_IN_STRCMP:
9616      /* If not optimizing, call the library function.  */
9617      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9618	break;
9619
9620      /* If we need to check memory accesses, call the library function.  */
9621      if (current_function_check_memory_usage)
9622	break;
9623
9624      if (arglist == 0
9625	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9626	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9627	  || TREE_CHAIN (arglist) == 0
9628	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9629	break;
9630      else if (!HAVE_cmpstrsi)
9631	break;
9632      {
9633	tree arg1 = TREE_VALUE (arglist);
9634	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9635	tree len, len2;
9636
9637	len = c_strlen (arg1);
9638	if (len)
9639	  len = size_binop (PLUS_EXPR, integer_one_node, len);
9640	len2 = c_strlen (arg2);
9641	if (len2)
9642	  len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9643
9644	/* If we don't have a constant length for the first, use the length
9645	   of the second, if we know it.  We don't require a constant for
9646	   this case; some cost analysis could be done if both are available
9647	   but neither is constant.  For now, assume they're equally cheap.
9648
9649	   If both strings have constant lengths, use the smaller.  This
9650	   could arise if optimization results in strcpy being called with
9651	   two fixed strings, or if the code was machine-generated.  We should
9652	   add some code to the `memcmp' handler below to deal with such
9653	   situations, someday.  */
9654	if (!len || TREE_CODE (len) != INTEGER_CST)
9655	  {
9656	    if (len2)
9657	      len = len2;
9658	    else if (len == 0)
9659	      break;
9660	  }
9661	else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9662	  {
9663	    if (tree_int_cst_lt (len2, len))
9664	      len = len2;
9665	  }
9666
9667	chainon (arglist, build_tree_list (NULL_TREE, len));
9668      }
9669
9670      /* Drops in.  */
9671    case BUILT_IN_MEMCMP:
9672      /* If not optimizing, call the library function.  */
9673      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9674	break;
9675
9676      /* If we need to check memory accesses, call the library function.  */
9677      if (current_function_check_memory_usage)
9678	break;
9679
9680      if (arglist == 0
9681	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
9682	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9683	  || TREE_CHAIN (arglist) == 0
9684	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9685	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9686	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9687	break;
9688      else if (!HAVE_cmpstrsi)
9689	break;
9690      {
9691	tree arg1 = TREE_VALUE (arglist);
9692	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9693	tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9694	rtx result;
9695
9696	int arg1_align
9697	  = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9698	int arg2_align
9699	  = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9700	enum machine_mode insn_mode
9701	  = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9702
9703	/* If we don't have POINTER_TYPE, call the function.  */
9704	if (arg1_align == 0 || arg2_align == 0)
9705	  {
9706	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9707	      TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9708	    break;
9709	  }
9710
9711	/* Make a place to write the result of the instruction.  */
9712	result = target;
9713	if (! (result != 0
9714	       && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9715	       && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9716	  result = gen_reg_rtx (insn_mode);
9717
9718	emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9719				 get_memory_rtx (arg2),
9720				 expand_expr (len, NULL_RTX, VOIDmode, 0),
9721				 GEN_INT (MIN (arg1_align, arg2_align))));
9722
9723	/* Return the value in the proper mode for this function.  */
9724	mode = TYPE_MODE (TREE_TYPE (exp));
9725	if (GET_MODE (result) == mode)
9726	  return result;
9727	else if (target != 0)
9728	  {
9729	    convert_move (target, result, 0);
9730	    return target;
9731	  }
9732	else
9733	  return convert_to_mode (mode, result, 0);
9734      }
9735#else
9736    case BUILT_IN_STRCMP:
9737    case BUILT_IN_MEMCMP:
9738      break;
9739#endif
9740
9741    case BUILT_IN_SETJMP:
9742      target = expand_builtin_setjmp (arglist, target);
9743      if (target)
9744	return target;
9745      break;
9746
9747      /* __builtin_longjmp is passed a pointer to an array of five words.
9748	 It's similar to the C library longjmp function but works with
9749	 __builtin_setjmp above.  */
9750    case BUILT_IN_LONGJMP:
9751      if (arglist == 0 || TREE_CHAIN (arglist) == 0
9752	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9753	break;
9754      else
9755	{
9756	  rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9757				      VOIDmode, 0);
9758	  rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9759				   NULL_RTX, VOIDmode, 0);
9760
9761	  if (value != const1_rtx)
9762	    {
9763	      error ("__builtin_longjmp second argument must be 1");
9764	      return const0_rtx;
9765	    }
9766
9767	  expand_builtin_longjmp (buf_addr, value);
9768	  return const0_rtx;
9769	}
9770
9771    case BUILT_IN_TRAP:
9772#ifdef HAVE_trap
9773      if (HAVE_trap)
9774	emit_insn (gen_trap ());
9775      else
9776#endif
9777	error ("__builtin_trap not supported by this target");
9778      emit_barrier ();
9779      return const0_rtx;
9780
9781      /* Various hooks for the DWARF 2 __throw routine.  */
9782    case BUILT_IN_UNWIND_INIT:
9783      expand_builtin_unwind_init ();
9784      return const0_rtx;
9785    case BUILT_IN_DWARF_CFA:
9786      return virtual_cfa_rtx;
9787#ifdef DWARF2_UNWIND_INFO
9788    case BUILT_IN_DWARF_FP_REGNUM:
9789      return expand_builtin_dwarf_fp_regnum ();
9790    case BUILT_IN_DWARF_REG_SIZE:
9791      return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9792#endif
9793    case BUILT_IN_FROB_RETURN_ADDR:
9794      return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9795    case BUILT_IN_EXTRACT_RETURN_ADDR:
9796      return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9797    case BUILT_IN_EH_RETURN:
9798      expand_builtin_eh_return (TREE_VALUE (arglist),
9799				TREE_VALUE (TREE_CHAIN (arglist)),
9800				TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9801      return const0_rtx;
9802
9803    default:			/* just do library call, if unknown builtin */
9804      error ("built-in function `%s' not currently supported",
9805	     IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9806    }
9807
9808  /* The switch statement above can drop through to cause the function
9809     to be called normally.  */
9810
9811  return expand_call (exp, target, ignore);
9812}
9813
9814/* Built-in functions to perform an untyped call and return.  */
9815
9816/* For each register that may be used for calling a function, this
9817   gives a mode used to copy the register's value.  VOIDmode indicates
9818   the register is not used for calling a function.  If the machine
9819   has register windows, this gives only the outbound registers.
9820   INCOMING_REGNO gives the corresponding inbound register.  */
9821static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9822
9823/* For each register that may be used for returning values, this gives
9824   a mode used to copy the register's value.  VOIDmode indicates the
9825   register is not used for returning values.  If the machine has
9826   register windows, this gives only the outbound registers.
9827   INCOMING_REGNO gives the corresponding inbound register.  */
9828static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9829
9830/* For each register that may be used for calling a function, this
9831   gives the offset of that register into the block returned by
9832   __builtin_apply_args.  0 indicates that the register is not
9833   used for calling a function.  */
9834static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9835
9836/* Return the offset of register REGNO into the block returned by
9837   __builtin_apply_args.  This is not declared static, since it is
9838   needed in objc-act.c.  */
9839
9840int
9841apply_args_register_offset (regno)
9842     int regno;
9843{
9844  apply_args_size ();
9845
9846  /* Arguments are always put in outgoing registers (in the argument
9847     block) if such make sense.  */
9848#ifdef OUTGOING_REGNO
9849  regno = OUTGOING_REGNO(regno);
9850#endif
9851  return apply_args_reg_offset[regno];
9852}
9853
9854/* Return the size required for the block returned by __builtin_apply_args,
9855   and initialize apply_args_mode.  */
9856
9857static int
9858apply_args_size ()
9859{
9860  static int size = -1;
9861  int align, regno;
9862  enum machine_mode mode;
9863
9864  /* The values computed by this function never change.  */
9865  if (size < 0)
9866    {
9867      /* The first value is the incoming arg-pointer.  */
9868      size = GET_MODE_SIZE (Pmode);
9869
9870      /* The second value is the structure value address unless this is
9871	 passed as an "invisible" first argument.  */
9872      if (struct_value_rtx)
9873	size += GET_MODE_SIZE (Pmode);
9874
9875      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9876	if (FUNCTION_ARG_REGNO_P (regno))
9877	  {
9878	    /* Search for the proper mode for copying this register's
9879	       value.  I'm not sure this is right, but it works so far.  */
9880	    enum machine_mode best_mode = VOIDmode;
9881
9882	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9883		 mode != VOIDmode;
9884		 mode = GET_MODE_WIDER_MODE (mode))
9885	      if (HARD_REGNO_MODE_OK (regno, mode)
9886		  && HARD_REGNO_NREGS (regno, mode) == 1)
9887		best_mode = mode;
9888
9889	    if (best_mode == VOIDmode)
9890	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9891		   mode != VOIDmode;
9892		   mode = GET_MODE_WIDER_MODE (mode))
9893		if (HARD_REGNO_MODE_OK (regno, mode)
9894		    && (mov_optab->handlers[(int) mode].insn_code
9895			!= CODE_FOR_nothing))
9896		  best_mode = mode;
9897
9898	    mode = best_mode;
9899	    if (mode == VOIDmode)
9900	      abort ();
9901
9902	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9903	    if (size % align != 0)
9904	      size = CEIL (size, align) * align;
9905	    apply_args_reg_offset[regno] = size;
9906	    size += GET_MODE_SIZE (mode);
9907	    apply_args_mode[regno] = mode;
9908	  }
9909	else
9910	  {
9911	    apply_args_mode[regno] = VOIDmode;
9912	    apply_args_reg_offset[regno] = 0;
9913	  }
9914    }
9915  return size;
9916}
9917
9918/* Return the size required for the block returned by __builtin_apply,
9919   and initialize apply_result_mode.  */
9920
9921static int
9922apply_result_size ()
9923{
9924  static int size = -1;
9925  int align, regno;
9926  enum machine_mode mode;
9927
9928  /* The values computed by this function never change.  */
9929  if (size < 0)
9930    {
9931      size = 0;
9932
9933      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9934	if (FUNCTION_VALUE_REGNO_P (regno))
9935	  {
9936	    /* Search for the proper mode for copying this register's
9937	       value.  I'm not sure this is right, but it works so far.  */
9938	    enum machine_mode best_mode = VOIDmode;
9939
9940	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9941		 mode != TImode;
9942		 mode = GET_MODE_WIDER_MODE (mode))
9943	      if (HARD_REGNO_MODE_OK (regno, mode))
9944		best_mode = mode;
9945
9946	    if (best_mode == VOIDmode)
9947	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9948		   mode != VOIDmode;
9949		   mode = GET_MODE_WIDER_MODE (mode))
9950		if (HARD_REGNO_MODE_OK (regno, mode)
9951		    && (mov_optab->handlers[(int) mode].insn_code
9952			!= CODE_FOR_nothing))
9953		  best_mode = mode;
9954
9955	    mode = best_mode;
9956	    if (mode == VOIDmode)
9957	      abort ();
9958
9959	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9960	    if (size % align != 0)
9961	      size = CEIL (size, align) * align;
9962	    size += GET_MODE_SIZE (mode);
9963	    apply_result_mode[regno] = mode;
9964	  }
9965	else
9966	  apply_result_mode[regno] = VOIDmode;
9967
9968      /* Allow targets that use untyped_call and untyped_return to override
9969	 the size so that machine-specific information can be stored here.  */
9970#ifdef APPLY_RESULT_SIZE
9971      size = APPLY_RESULT_SIZE;
9972#endif
9973    }
9974  return size;
9975}
9976
9977#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9978/* Create a vector describing the result block RESULT.  If SAVEP is true,
9979   the result block is used to save the values; otherwise it is used to
9980   restore the values.  */
9981
9982static rtx
9983result_vector (savep, result)
9984     int savep;
9985     rtx result;
9986{
9987  int regno, size, align, nelts;
9988  enum machine_mode mode;
9989  rtx reg, mem;
9990  rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9991
9992  size = nelts = 0;
9993  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9994    if ((mode = apply_result_mode[regno]) != VOIDmode)
9995      {
9996	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9997	if (size % align != 0)
9998	  size = CEIL (size, align) * align;
9999	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
10000	mem = change_address (result, mode,
10001			      plus_constant (XEXP (result, 0), size));
10002	savevec[nelts++] = (savep
10003			    ? gen_rtx_SET (VOIDmode, mem, reg)
10004			    : gen_rtx_SET (VOIDmode, reg, mem));
10005	size += GET_MODE_SIZE (mode);
10006      }
10007  return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
10008}
10009#endif /* HAVE_untyped_call or HAVE_untyped_return */
10010
10011/* Save the state required to perform an untyped call with the same
10012   arguments as were passed to the current function.  */
10013
10014static rtx
10015expand_builtin_apply_args ()
10016{
10017  rtx registers;
10018  int size, align, regno;
10019  enum machine_mode mode;
10020
10021  /* Create a block where the arg-pointer, structure value address,
10022     and argument registers can be saved.  */
10023  registers = assign_stack_local (BLKmode, apply_args_size (), -1);
10024
10025  /* Walk past the arg-pointer and structure value address.  */
10026  size = GET_MODE_SIZE (Pmode);
10027  if (struct_value_rtx)
10028    size += GET_MODE_SIZE (Pmode);
10029
10030  /* Save each register used in calling a function to the block.  */
10031  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10032    if ((mode = apply_args_mode[regno]) != VOIDmode)
10033      {
10034	rtx tem;
10035
10036	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10037	if (size % align != 0)
10038	  size = CEIL (size, align) * align;
10039
10040	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10041
10042#ifdef STACK_REGS
10043        /* For reg-stack.c's stack register household.
10044	   Compare with a similar piece of code in function.c.  */
10045
10046        emit_insn (gen_rtx_USE (mode, tem));
10047#endif
10048
10049	emit_move_insn (change_address (registers, mode,
10050					plus_constant (XEXP (registers, 0),
10051						       size)),
10052			tem);
10053	size += GET_MODE_SIZE (mode);
10054      }
10055
10056  /* Save the arg pointer to the block.  */
10057  emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
10058		  copy_to_reg (virtual_incoming_args_rtx));
10059  size = GET_MODE_SIZE (Pmode);
10060
10061  /* Save the structure value address unless this is passed as an
10062     "invisible" first argument.  */
10063  if (struct_value_incoming_rtx)
10064    {
10065      emit_move_insn (change_address (registers, Pmode,
10066				      plus_constant (XEXP (registers, 0),
10067						     size)),
10068		      copy_to_reg (struct_value_incoming_rtx));
10069      size += GET_MODE_SIZE (Pmode);
10070    }
10071
10072  /* Return the address of the block.  */
10073  return copy_addr_to_reg (XEXP (registers, 0));
10074}
10075
10076/* Perform an untyped call and save the state required to perform an
10077   untyped return of whatever value was returned by the given function.  */
10078
10079static rtx
10080expand_builtin_apply (function, arguments, argsize)
10081     rtx function, arguments, argsize;
10082{
10083  int size, align, regno;
10084  enum machine_mode mode;
10085  rtx incoming_args, result, reg, dest, call_insn;
10086  rtx old_stack_level = 0;
10087  rtx call_fusage = 0;
10088
10089  /* Create a block where the return registers can be saved.  */
10090  result = assign_stack_local (BLKmode, apply_result_size (), -1);
10091
10092  /* ??? The argsize value should be adjusted here.  */
10093
10094  /* Fetch the arg pointer from the ARGUMENTS block.  */
10095  incoming_args = gen_reg_rtx (Pmode);
10096  emit_move_insn (incoming_args,
10097		  gen_rtx_MEM (Pmode, arguments));
10098#ifndef STACK_GROWS_DOWNWARD
10099  incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
10100				incoming_args, 0, OPTAB_LIB_WIDEN);
10101#endif
10102
10103  /* Perform postincrements before actually calling the function.  */
10104  emit_queue ();
10105
10106  /* Push a new argument block and copy the arguments.  */
10107  do_pending_stack_adjust ();
10108
10109  /* Save the stack with nonlocal if available */
10110#ifdef HAVE_save_stack_nonlocal
10111  if (HAVE_save_stack_nonlocal)
10112    emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
10113  else
10114#endif
10115    emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
10116
10117  /* Push a block of memory onto the stack to store the memory arguments.
10118     Save the address in a register, and copy the memory arguments.  ??? I
10119     haven't figured out how the calling convention macros effect this,
10120     but it's likely that the source and/or destination addresses in
10121     the block copy will need updating in machine specific ways.  */
10122  dest = allocate_dynamic_stack_space (argsize, 0, 0);
10123  emit_block_move (gen_rtx_MEM (BLKmode, dest),
10124		   gen_rtx_MEM (BLKmode, incoming_args),
10125		   argsize,
10126		   PARM_BOUNDARY / BITS_PER_UNIT);
10127
10128  /* Refer to the argument block.  */
10129  apply_args_size ();
10130  arguments = gen_rtx_MEM (BLKmode, arguments);
10131
10132  /* Walk past the arg-pointer and structure value address.  */
10133  size = GET_MODE_SIZE (Pmode);
10134  if (struct_value_rtx)
10135    size += GET_MODE_SIZE (Pmode);
10136
10137  /* Restore each of the registers previously saved.  Make USE insns
10138     for each of these registers for use in making the call.  */
10139  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10140    if ((mode = apply_args_mode[regno]) != VOIDmode)
10141      {
10142	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10143	if (size % align != 0)
10144	  size = CEIL (size, align) * align;
10145	reg = gen_rtx_REG (mode, regno);
10146	emit_move_insn (reg,
10147			change_address (arguments, mode,
10148					plus_constant (XEXP (arguments, 0),
10149						       size)));
10150
10151	use_reg (&call_fusage, reg);
10152	size += GET_MODE_SIZE (mode);
10153      }
10154
10155  /* Restore the structure value address unless this is passed as an
10156     "invisible" first argument.  */
10157  size = GET_MODE_SIZE (Pmode);
10158  if (struct_value_rtx)
10159    {
10160      rtx value = gen_reg_rtx (Pmode);
10161      emit_move_insn (value,
10162		      change_address (arguments, Pmode,
10163				      plus_constant (XEXP (arguments, 0),
10164						     size)));
10165      emit_move_insn (struct_value_rtx, value);
10166      if (GET_CODE (struct_value_rtx) == REG)
10167	  use_reg (&call_fusage, struct_value_rtx);
10168      size += GET_MODE_SIZE (Pmode);
10169    }
10170
10171  /* All arguments and registers used for the call are set up by now!  */
10172  function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10173
10174  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
10175     and we don't want to load it into a register as an optimization,
10176     because prepare_call_address already did it if it should be done.  */
10177  if (GET_CODE (function) != SYMBOL_REF)
10178    function = memory_address (FUNCTION_MODE, function);
10179
10180  /* Generate the actual call instruction and save the return value.  */
10181#ifdef HAVE_untyped_call
10182  if (HAVE_untyped_call)
10183    emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10184				      result, result_vector (1, result)));
10185  else
10186#endif
10187#ifdef HAVE_call_value
10188  if (HAVE_call_value)
10189    {
10190      rtx valreg = 0;
10191
10192      /* Locate the unique return register.  It is not possible to
10193	 express a call that sets more than one return register using
10194	 call_value; use untyped_call for that.  In fact, untyped_call
10195	 only needs to save the return registers in the given block.  */
10196      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10197	if ((mode = apply_result_mode[regno]) != VOIDmode)
10198	  {
10199	    if (valreg)
10200	      abort (); /* HAVE_untyped_call required.  */
10201	    valreg = gen_rtx_REG (mode, regno);
10202	  }
10203
10204      emit_call_insn (gen_call_value (valreg,
10205				      gen_rtx_MEM (FUNCTION_MODE, function),
10206				      const0_rtx, NULL_RTX, const0_rtx));
10207
10208      emit_move_insn (change_address (result, GET_MODE (valreg),
10209				      XEXP (result, 0)),
10210		      valreg);
10211    }
10212  else
10213#endif
10214    abort ();
10215
10216  /* Find the CALL insn we just emitted.  */
10217  for (call_insn = get_last_insn ();
10218       call_insn && GET_CODE (call_insn) != CALL_INSN;
10219       call_insn = PREV_INSN (call_insn))
10220    ;
10221
10222  if (! call_insn)
10223    abort ();
10224
10225  /* Put the register usage information on the CALL.  If there is already
10226     some usage information, put ours at the end.  */
10227  if (CALL_INSN_FUNCTION_USAGE (call_insn))
10228    {
10229      rtx link;
10230
10231      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10232	   link = XEXP (link, 1))
10233	;
10234
10235      XEXP (link, 1) = call_fusage;
10236    }
10237  else
10238    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10239
10240  /* Restore the stack.  */
10241#ifdef HAVE_save_stack_nonlocal
10242  if (HAVE_save_stack_nonlocal)
10243    emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10244  else
10245#endif
10246    emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10247
10248  /* Return the address of the result block.  */
10249  return copy_addr_to_reg (XEXP (result, 0));
10250}
10251
10252/* Perform an untyped return.  */
10253
10254static void
10255expand_builtin_return (result)
10256     rtx result;
10257{
10258  int size, align, regno;
10259  enum machine_mode mode;
10260  rtx reg;
10261  rtx call_fusage = 0;
10262
10263  apply_result_size ();
10264  result = gen_rtx_MEM (BLKmode, result);
10265
10266#ifdef HAVE_untyped_return
10267  if (HAVE_untyped_return)
10268    {
10269      emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10270      emit_barrier ();
10271      return;
10272    }
10273#endif
10274
10275  /* Restore the return value and note that each value is used.  */
10276  size = 0;
10277  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10278    if ((mode = apply_result_mode[regno]) != VOIDmode)
10279      {
10280	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10281	if (size % align != 0)
10282	  size = CEIL (size, align) * align;
10283	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10284	emit_move_insn (reg,
10285			change_address (result, mode,
10286					plus_constant (XEXP (result, 0),
10287						       size)));
10288
10289	push_to_sequence (call_fusage);
10290	emit_insn (gen_rtx_USE (VOIDmode, reg));
10291	call_fusage = get_insns ();
10292	end_sequence ();
10293	size += GET_MODE_SIZE (mode);
10294      }
10295
10296  /* Put the USE insns before the return.  */
10297  emit_insns (call_fusage);
10298
10299  /* Return whatever values was restored by jumping directly to the end
10300     of the function.  */
10301  expand_null_return ();
10302}
10303
10304/* Expand code for a post- or pre- increment or decrement
10305   and return the RTX for the result.
10306   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
10307
10308static rtx
10309expand_increment (exp, post, ignore)
10310     register tree exp;
10311     int post, ignore;
10312{
10313  register rtx op0, op1;
10314  register rtx temp, value;
10315  register tree incremented = TREE_OPERAND (exp, 0);
10316  optab this_optab = add_optab;
10317  int icode;
10318  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10319  int op0_is_copy = 0;
10320  int single_insn = 0;
10321  /* 1 means we can't store into OP0 directly,
10322     because it is a subreg narrower than a word,
10323     and we don't dare clobber the rest of the word.  */
10324  int bad_subreg = 0;
10325
10326  /* Stabilize any component ref that might need to be
10327     evaluated more than once below.  */
10328  if (!post
10329      || TREE_CODE (incremented) == BIT_FIELD_REF
10330      || (TREE_CODE (incremented) == COMPONENT_REF
10331	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10332	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10333    incremented = stabilize_reference (incremented);
10334  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
10335     ones into save exprs so that they don't accidentally get evaluated
10336     more than once by the code below.  */
10337  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10338      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10339    incremented = save_expr (incremented);
10340
10341  /* Compute the operands as RTX.
10342     Note whether OP0 is the actual lvalue or a copy of it:
10343     I believe it is a copy iff it is a register or subreg
10344     and insns were generated in computing it.   */
10345
10346  temp = get_last_insn ();
10347  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10348
10349  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10350     in place but instead must do sign- or zero-extension during assignment,
10351     so we copy it into a new register and let the code below use it as
10352     a copy.
10353
10354     Note that we can safely modify this SUBREG since it is know not to be
10355     shared (it was made by the expand_expr call above).  */
10356
10357  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10358    {
10359      if (post)
10360	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10361      else
10362	bad_subreg = 1;
10363    }
10364  else if (GET_CODE (op0) == SUBREG
10365	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10366    {
10367      /* We cannot increment this SUBREG in place.  If we are
10368	 post-incrementing, get a copy of the old value.  Otherwise,
10369	 just mark that we cannot increment in place.  */
10370      if (post)
10371	op0 = copy_to_reg (op0);
10372      else
10373	bad_subreg = 1;
10374    }
10375
10376  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10377		 && temp != get_last_insn ());
10378  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10379		     EXPAND_MEMORY_USE_BAD);
10380
10381  /* Decide whether incrementing or decrementing.  */
10382  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10383      || TREE_CODE (exp) == PREDECREMENT_EXPR)
10384    this_optab = sub_optab;
10385
10386  /* Convert decrement by a constant into a negative increment.  */
10387  if (this_optab == sub_optab
10388      && GET_CODE (op1) == CONST_INT)
10389    {
10390      op1 = GEN_INT (- INTVAL (op1));
10391      this_optab = add_optab;
10392    }
10393
10394  /* For a preincrement, see if we can do this with a single instruction.  */
10395  if (!post)
10396    {
10397      icode = (int) this_optab->handlers[(int) mode].insn_code;
10398      if (icode != (int) CODE_FOR_nothing
10399	  /* Make sure that OP0 is valid for operands 0 and 1
10400	     of the insn we want to queue.  */
10401	  && (*insn_operand_predicate[icode][0]) (op0, mode)
10402	  && (*insn_operand_predicate[icode][1]) (op0, mode)
10403	  && (*insn_operand_predicate[icode][2]) (op1, mode))
10404	single_insn = 1;
10405    }
10406
10407  /* If OP0 is not the actual lvalue, but rather a copy in a register,
10408     then we cannot just increment OP0.  We must therefore contrive to
10409     increment the original value.  Then, for postincrement, we can return
10410     OP0 since it is a copy of the old value.  For preincrement, expand here
10411     unless we can do it with a single insn.
10412
10413     Likewise if storing directly into OP0 would clobber high bits
10414     we need to preserve (bad_subreg).  */
10415  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10416    {
10417      /* This is the easiest way to increment the value wherever it is.
10418	 Problems with multiple evaluation of INCREMENTED are prevented
10419	 because either (1) it is a component_ref or preincrement,
10420	 in which case it was stabilized above, or (2) it is an array_ref
10421	 with constant index in an array in a register, which is
10422	 safe to reevaluate.  */
10423      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10424			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
10425			    ? MINUS_EXPR : PLUS_EXPR),
10426			   TREE_TYPE (exp),
10427			   incremented,
10428			   TREE_OPERAND (exp, 1));
10429
10430      while (TREE_CODE (incremented) == NOP_EXPR
10431	     || TREE_CODE (incremented) == CONVERT_EXPR)
10432	{
10433	  newexp = convert (TREE_TYPE (incremented), newexp);
10434	  incremented = TREE_OPERAND (incremented, 0);
10435	}
10436
10437      temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10438      return post ? op0 : temp;
10439    }
10440
10441  if (post)
10442    {
10443      /* We have a true reference to the value in OP0.
10444	 If there is an insn to add or subtract in this mode, queue it.
10445	 Queueing the increment insn avoids the register shuffling
10446	 that often results if we must increment now and first save
10447	 the old value for subsequent use.  */
10448
10449#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
10450      op0 = stabilize (op0);
10451#endif
10452
10453      icode = (int) this_optab->handlers[(int) mode].insn_code;
10454      if (icode != (int) CODE_FOR_nothing
10455	  /* Make sure that OP0 is valid for operands 0 and 1
10456	     of the insn we want to queue.  */
10457	  && (*insn_operand_predicate[icode][0]) (op0, mode)
10458	  && (*insn_operand_predicate[icode][1]) (op0, mode))
10459	{
10460	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10461	    op1 = force_reg (mode, op1);
10462
10463	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10464	}
10465      if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10466	{
10467	  rtx addr = (general_operand (XEXP (op0, 0), mode)
10468		      ? force_reg (Pmode, XEXP (op0, 0))
10469		      : copy_to_reg (XEXP (op0, 0)));
10470	  rtx temp, result;
10471
10472	  op0 = change_address (op0, VOIDmode, addr);
10473	  temp = force_reg (GET_MODE (op0), op0);
10474	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10475	    op1 = force_reg (mode, op1);
10476
10477	  /* The increment queue is LIFO, thus we have to `queue'
10478	     the instructions in reverse order.  */
10479	  enqueue_insn (op0, gen_move_insn (op0, temp));
10480	  result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10481	  return result;
10482	}
10483    }
10484
10485  /* Preincrement, or we can't increment with one simple insn.  */
10486  if (post)
10487    /* Save a copy of the value before inc or dec, to return it later.  */
10488    temp = value = copy_to_reg (op0);
10489  else
10490    /* Arrange to return the incremented value.  */
10491    /* Copy the rtx because expand_binop will protect from the queue,
10492       and the results of that would be invalid for us to return
10493       if our caller does emit_queue before using our result.  */
10494    temp = copy_rtx (value = op0);
10495
10496  /* Increment however we can.  */
10497  op1 = expand_binop (mode, this_optab, value, op1,
10498  		      current_function_check_memory_usage ? NULL_RTX : op0,
10499		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10500  /* Make sure the value is stored into OP0.  */
10501  if (op1 != op0)
10502    emit_move_insn (op0, op1);
10503
10504  return temp;
10505}
10506
10507/* Expand all function calls contained within EXP, innermost ones first.
10508   But don't look within expressions that have sequence points.
10509   For each CALL_EXPR, record the rtx for its value
10510   in the CALL_EXPR_RTL field.  */
10511
10512static void
10513preexpand_calls (exp)
10514     tree exp;
10515{
10516  register int nops, i;
10517  int type = TREE_CODE_CLASS (TREE_CODE (exp));
10518
10519  if (! do_preexpand_calls)
10520    return;
10521
10522  /* Only expressions and references can contain calls.  */
10523
10524  if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10525    return;
10526
10527  switch (TREE_CODE (exp))
10528    {
10529    case CALL_EXPR:
10530      /* Do nothing if already expanded.  */
10531      if (CALL_EXPR_RTL (exp) != 0
10532	  /* Do nothing if the call returns a variable-sized object.  */
10533	  || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10534	  /* Do nothing to built-in functions.  */
10535	  || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10536	      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10537		  == FUNCTION_DECL)
10538	      && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10539	return;
10540
10541      CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10542      return;
10543
10544    case COMPOUND_EXPR:
10545    case COND_EXPR:
10546    case TRUTH_ANDIF_EXPR:
10547    case TRUTH_ORIF_EXPR:
10548      /* If we find one of these, then we can be sure
10549	 the adjust will be done for it (since it makes jumps).
10550	 Do it now, so that if this is inside an argument
10551	 of a function, we don't get the stack adjustment
10552	 after some other args have already been pushed.  */
10553      do_pending_stack_adjust ();
10554      return;
10555
10556    case BLOCK:
10557    case RTL_EXPR:
10558    case WITH_CLEANUP_EXPR:
10559    case CLEANUP_POINT_EXPR:
10560    case TRY_CATCH_EXPR:
10561      return;
10562
10563    case SAVE_EXPR:
10564      if (SAVE_EXPR_RTL (exp) != 0)
10565	return;
10566
10567    default:
10568      break;
10569    }
10570
10571  nops = tree_code_length[(int) TREE_CODE (exp)];
10572  for (i = 0; i < nops; i++)
10573    if (TREE_OPERAND (exp, i) != 0)
10574      {
10575	type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10576	if (type == 'e' || type == '<' || type == '1' || type == '2'
10577	    || type == 'r')
10578	  preexpand_calls (TREE_OPERAND (exp, i));
10579      }
10580}
10581
10582/* At the start of a function, record that we have no previously-pushed
10583   arguments waiting to be popped.  */
10584
10585void
10586init_pending_stack_adjust ()
10587{
10588  pending_stack_adjust = 0;
10589}
10590
10591/* When exiting from function, if safe, clear out any pending stack adjust
10592   so the adjustment won't get done.
10593
10594   Note, if the current function calls alloca, then it must have a
10595   frame pointer regardless of the value of flag_omit_frame_pointer.  */
10596
10597void
10598clear_pending_stack_adjust ()
10599{
10600#ifdef EXIT_IGNORE_STACK
10601  if (optimize > 0
10602      && (! flag_omit_frame_pointer || current_function_calls_alloca)
10603      && EXIT_IGNORE_STACK
10604      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10605      && ! flag_inline_functions)
10606    pending_stack_adjust = 0;
10607#endif
10608}
10609
10610/* Pop any previously-pushed arguments that have not been popped yet.  */
10611
10612void
10613do_pending_stack_adjust ()
10614{
10615  if (inhibit_defer_pop == 0)
10616    {
10617      if (pending_stack_adjust != 0)
10618	adjust_stack (GEN_INT (pending_stack_adjust));
10619      pending_stack_adjust = 0;
10620    }
10621}
10622
10623/* Expand conditional expressions.  */
10624
10625/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10626   LABEL is an rtx of code CODE_LABEL, in this function and all the
10627   functions here.  */
10628
10629void
10630jumpifnot (exp, label)
10631     tree exp;
10632     rtx label;
10633{
10634  do_jump (exp, label, NULL_RTX);
10635}
10636
10637/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
10638
10639void
10640jumpif (exp, label)
10641     tree exp;
10642     rtx label;
10643{
10644  do_jump (exp, NULL_RTX, label);
10645}
10646
10647/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10648   the result is zero, or IF_TRUE_LABEL if the result is one.
10649   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10650   meaning fall through in that case.
10651
10652   do_jump always does any pending stack adjust except when it does not
10653   actually perform a jump.  An example where there is no jump
10654   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10655
10656   This function is responsible for optimizing cases such as
10657   &&, || and comparison operators in EXP.  */
10658
10659void
10660do_jump (exp, if_false_label, if_true_label)
10661     tree exp;
10662     rtx if_false_label, if_true_label;
10663{
10664  register enum tree_code code = TREE_CODE (exp);
10665  /* Some cases need to create a label to jump to
10666     in order to properly fall through.
10667     These cases set DROP_THROUGH_LABEL nonzero.  */
10668  rtx drop_through_label = 0;
10669  rtx temp;
10670  rtx comparison = 0;
10671  int i;
10672  tree type;
10673  enum machine_mode mode;
10674
10675#ifdef MAX_INTEGER_COMPUTATION_MODE
10676  check_max_integer_computation_mode (exp);
10677#endif
10678
10679  emit_queue ();
10680
10681  switch (code)
10682    {
10683    case ERROR_MARK:
10684      break;
10685
10686    case INTEGER_CST:
10687      temp = integer_zerop (exp) ? if_false_label : if_true_label;
10688      if (temp)
10689	emit_jump (temp);
10690      break;
10691
10692#if 0
10693      /* This is not true with #pragma weak  */
10694    case ADDR_EXPR:
10695      /* The address of something can never be zero.  */
10696      if (if_true_label)
10697	emit_jump (if_true_label);
10698      break;
10699#endif
10700
10701    case NOP_EXPR:
10702      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10703	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10704	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10705	goto normal;
10706    case CONVERT_EXPR:
10707      /* If we are narrowing the operand, we have to do the compare in the
10708	 narrower mode.  */
10709      if ((TYPE_PRECISION (TREE_TYPE (exp))
10710	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10711	goto normal;
10712    case NON_LVALUE_EXPR:
10713    case REFERENCE_EXPR:
10714    case ABS_EXPR:
10715    case NEGATE_EXPR:
10716    case LROTATE_EXPR:
10717    case RROTATE_EXPR:
10718      /* These cannot change zero->non-zero or vice versa.  */
10719      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10720      break;
10721
10722#if 0
10723      /* This is never less insns than evaluating the PLUS_EXPR followed by
10724	 a test and can be longer if the test is eliminated.  */
10725    case PLUS_EXPR:
10726      /* Reduce to minus.  */
10727      exp = build (MINUS_EXPR, TREE_TYPE (exp),
10728		   TREE_OPERAND (exp, 0),
10729		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10730				 TREE_OPERAND (exp, 1))));
10731      /* Process as MINUS.  */
10732#endif
10733
10734    case MINUS_EXPR:
10735      /* Non-zero iff operands of minus differ.  */
10736      comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10737				   TREE_OPERAND (exp, 0),
10738				   TREE_OPERAND (exp, 1)),
10739			    NE, NE);
10740      break;
10741
10742    case BIT_AND_EXPR:
10743      /* If we are AND'ing with a small constant, do this comparison in the
10744	 smallest type that fits.  If the machine doesn't have comparisons
10745	 that small, it will be converted back to the wider comparison.
10746	 This helps if we are testing the sign bit of a narrower object.
10747	 combine can't do this for us because it can't know whether a
10748	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
10749
10750      if (! SLOW_BYTE_ACCESS
10751	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10752	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10753	  && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10754	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10755	  && (type = type_for_mode (mode, 1)) != 0
10756	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10757	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10758	      != CODE_FOR_nothing))
10759	{
10760	  do_jump (convert (type, exp), if_false_label, if_true_label);
10761	  break;
10762	}
10763      goto normal;
10764
10765    case TRUTH_NOT_EXPR:
10766      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10767      break;
10768
10769    case TRUTH_ANDIF_EXPR:
10770      if (if_false_label == 0)
10771	if_false_label = drop_through_label = gen_label_rtx ();
10772      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10773      start_cleanup_deferral ();
10774      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10775      end_cleanup_deferral ();
10776      break;
10777
10778    case TRUTH_ORIF_EXPR:
10779      if (if_true_label == 0)
10780	if_true_label = drop_through_label = gen_label_rtx ();
10781      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10782      start_cleanup_deferral ();
10783      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10784      end_cleanup_deferral ();
10785      break;
10786
10787    case COMPOUND_EXPR:
10788      push_temp_slots ();
10789      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10790      preserve_temp_slots (NULL_RTX);
10791      free_temp_slots ();
10792      pop_temp_slots ();
10793      emit_queue ();
10794      do_pending_stack_adjust ();
10795      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10796      break;
10797
10798    case COMPONENT_REF:
10799    case BIT_FIELD_REF:
10800    case ARRAY_REF:
10801      {
10802	int bitsize, bitpos, unsignedp;
10803	enum machine_mode mode;
10804	tree type;
10805	tree offset;
10806	int volatilep = 0;
10807	int alignment;
10808
10809	/* Get description of this reference.  We don't actually care
10810	   about the underlying object here.  */
10811	get_inner_reference (exp, &bitsize, &bitpos, &offset,
10812			     &mode, &unsignedp, &volatilep,
10813			     &alignment);
10814
10815	type = type_for_size (bitsize, unsignedp);
10816	if (! SLOW_BYTE_ACCESS
10817	    && type != 0 && bitsize >= 0
10818	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10819	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10820		!= CODE_FOR_nothing))
10821	  {
10822	    do_jump (convert (type, exp), if_false_label, if_true_label);
10823	    break;
10824	  }
10825	goto normal;
10826      }
10827
10828    case COND_EXPR:
10829      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
10830      if (integer_onep (TREE_OPERAND (exp, 1))
10831	  && integer_zerop (TREE_OPERAND (exp, 2)))
10832	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10833
10834      else if (integer_zerop (TREE_OPERAND (exp, 1))
10835	       && integer_onep (TREE_OPERAND (exp, 2)))
10836	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10837
10838      else
10839	{
10840	  register rtx label1 = gen_label_rtx ();
10841	  drop_through_label = gen_label_rtx ();
10842
10843	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10844
10845	  start_cleanup_deferral ();
10846	  /* Now the THEN-expression.  */
10847	  do_jump (TREE_OPERAND (exp, 1),
10848		   if_false_label ? if_false_label : drop_through_label,
10849		   if_true_label ? if_true_label : drop_through_label);
10850	  /* In case the do_jump just above never jumps.  */
10851	  do_pending_stack_adjust ();
10852	  emit_label (label1);
10853
10854	  /* Now the ELSE-expression.  */
10855	  do_jump (TREE_OPERAND (exp, 2),
10856		   if_false_label ? if_false_label : drop_through_label,
10857		   if_true_label ? if_true_label : drop_through_label);
10858	  end_cleanup_deferral ();
10859	}
10860      break;
10861
10862    case EQ_EXPR:
10863      {
10864	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10865
10866	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10867	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10868	  {
10869	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10870	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10871	    do_jump
10872	      (fold
10873	       (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10874		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10875				    fold (build1 (REALPART_EXPR,
10876						  TREE_TYPE (inner_type),
10877						  exp0)),
10878				    fold (build1 (REALPART_EXPR,
10879						  TREE_TYPE (inner_type),
10880						  exp1)))),
10881		       fold (build (EQ_EXPR, TREE_TYPE (exp),
10882				    fold (build1 (IMAGPART_EXPR,
10883						  TREE_TYPE (inner_type),
10884						  exp0)),
10885				    fold (build1 (IMAGPART_EXPR,
10886						  TREE_TYPE (inner_type),
10887						  exp1)))))),
10888	       if_false_label, if_true_label);
10889	  }
10890
10891	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10892	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10893
10894	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10895		 && !can_compare_p (TYPE_MODE (inner_type)))
10896	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10897	else
10898	  comparison = compare (exp, EQ, EQ);
10899	break;
10900      }
10901
10902    case NE_EXPR:
10903      {
10904	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10905
10906	if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10907	    || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10908	  {
10909	    tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10910	    tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10911	    do_jump
10912	      (fold
10913	       (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10914		       fold (build (NE_EXPR, TREE_TYPE (exp),
10915				    fold (build1 (REALPART_EXPR,
10916						  TREE_TYPE (inner_type),
10917						  exp0)),
10918				    fold (build1 (REALPART_EXPR,
10919						  TREE_TYPE (inner_type),
10920						  exp1)))),
10921		       fold (build (NE_EXPR, TREE_TYPE (exp),
10922				    fold (build1 (IMAGPART_EXPR,
10923						  TREE_TYPE (inner_type),
10924						  exp0)),
10925				    fold (build1 (IMAGPART_EXPR,
10926						  TREE_TYPE (inner_type),
10927						  exp1)))))),
10928	       if_false_label, if_true_label);
10929	  }
10930
10931	else if (integer_zerop (TREE_OPERAND (exp, 1)))
10932	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10933
10934	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10935		 && !can_compare_p (TYPE_MODE (inner_type)))
10936	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10937	else
10938	  comparison = compare (exp, NE, NE);
10939	break;
10940      }
10941
10942    case LT_EXPR:
10943      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10944	   == MODE_INT)
10945	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10946	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10947      else
10948	comparison = compare (exp, LT, LTU);
10949      break;
10950
10951    case LE_EXPR:
10952      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10953	   == MODE_INT)
10954	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10955	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10956      else
10957	comparison = compare (exp, LE, LEU);
10958      break;
10959
10960    case GT_EXPR:
10961      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10962	   == MODE_INT)
10963	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10964	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10965      else
10966	comparison = compare (exp, GT, GTU);
10967      break;
10968
10969    case GE_EXPR:
10970      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10971	   == MODE_INT)
10972	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10973	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10974      else
10975	comparison = compare (exp, GE, GEU);
10976      break;
10977
10978    default:
10979    normal:
10980      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10981#if 0
10982      /* This is not needed any more and causes poor code since it causes
10983	 comparisons and tests from non-SI objects to have different code
10984	 sequences.  */
10985      /* Copy to register to avoid generating bad insns by cse
10986	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
10987      if (!cse_not_expected && GET_CODE (temp) == MEM)
10988	temp = copy_to_reg (temp);
10989#endif
10990      do_pending_stack_adjust ();
10991      if (GET_CODE (temp) == CONST_INT)
10992	comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10993      else if (GET_CODE (temp) == LABEL_REF)
10994	comparison = const_true_rtx;
10995      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10996	       && !can_compare_p (GET_MODE (temp)))
10997	/* Note swapping the labels gives us not-equal.  */
10998	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10999      else if (GET_MODE (temp) != VOIDmode)
11000	comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
11001				       NE, TREE_UNSIGNED (TREE_TYPE (exp)),
11002				       GET_MODE (temp), NULL_RTX, 0);
11003      else
11004	abort ();
11005    }
11006
11007  /* Do any postincrements in the expression that was tested.  */
11008  emit_queue ();
11009
11010  /* If COMPARISON is nonzero here, it is an rtx that can be substituted
11011     straight into a conditional jump instruction as the jump condition.
11012     Otherwise, all the work has been done already.  */
11013
11014  if (comparison == const_true_rtx)
11015    {
11016      if (if_true_label)
11017	emit_jump (if_true_label);
11018    }
11019  else if (comparison == const0_rtx)
11020    {
11021      if (if_false_label)
11022	emit_jump (if_false_label);
11023    }
11024  else if (comparison)
11025    do_jump_for_compare (comparison, if_false_label, if_true_label);
11026
11027  if (drop_through_label)
11028    {
11029      /* If do_jump produces code that might be jumped around,
11030	 do any stack adjusts from that code, before the place
11031	 where control merges in.  */
11032      do_pending_stack_adjust ();
11033      emit_label (drop_through_label);
11034    }
11035}
11036
11037/* Given a comparison expression EXP for values too wide to be compared
11038   with one insn, test the comparison and jump to the appropriate label.
11039   The code of EXP is ignored; we always test GT if SWAP is 0,
11040   and LT if SWAP is 1.  */
11041
11042static void
11043do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
11044     tree exp;
11045     int swap;
11046     rtx if_false_label, if_true_label;
11047{
11048  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
11049  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
11050  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11051  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11052  rtx drop_through_label = 0;
11053  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
11054  int i;
11055
11056  if (! if_true_label || ! if_false_label)
11057    drop_through_label = gen_label_rtx ();
11058  if (! if_true_label)
11059    if_true_label = drop_through_label;
11060  if (! if_false_label)
11061    if_false_label = drop_through_label;
11062
11063  /* Compare a word at a time, high order first.  */
11064  for (i = 0; i < nwords; i++)
11065    {
11066      rtx comp;
11067      rtx op0_word, op1_word;
11068
11069      if (WORDS_BIG_ENDIAN)
11070	{
11071	  op0_word = operand_subword_force (op0, i, mode);
11072	  op1_word = operand_subword_force (op1, i, mode);
11073	}
11074      else
11075	{
11076	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11077	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11078	}
11079
11080      /* All but high-order word must be compared as unsigned.  */
11081      comp = compare_from_rtx (op0_word, op1_word,
11082			       (unsignedp || i > 0) ? GTU : GT,
11083			       unsignedp, word_mode, NULL_RTX, 0);
11084      if (comp == const_true_rtx)
11085	emit_jump (if_true_label);
11086      else if (comp != const0_rtx)
11087	do_jump_for_compare (comp, NULL_RTX, if_true_label);
11088
11089      /* Consider lower words only if these are equal.  */
11090      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11091			       NULL_RTX, 0);
11092      if (comp == const_true_rtx)
11093	emit_jump (if_false_label);
11094      else if (comp != const0_rtx)
11095	do_jump_for_compare (comp, NULL_RTX, if_false_label);
11096    }
11097
11098  if (if_false_label)
11099    emit_jump (if_false_label);
11100  if (drop_through_label)
11101    emit_label (drop_through_label);
11102}
11103
11104/* Compare OP0 with OP1, word at a time, in mode MODE.
11105   UNSIGNEDP says to do unsigned comparison.
11106   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
11107
11108void
11109do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
11110     enum machine_mode mode;
11111     int unsignedp;
11112     rtx op0, op1;
11113     rtx if_false_label, if_true_label;
11114{
11115  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11116  rtx drop_through_label = 0;
11117  int i;
11118
11119  if (! if_true_label || ! if_false_label)
11120    drop_through_label = gen_label_rtx ();
11121  if (! if_true_label)
11122    if_true_label = drop_through_label;
11123  if (! if_false_label)
11124    if_false_label = drop_through_label;
11125
11126  /* Compare a word at a time, high order first.  */
11127  for (i = 0; i < nwords; i++)
11128    {
11129      rtx comp;
11130      rtx op0_word, op1_word;
11131
11132      if (WORDS_BIG_ENDIAN)
11133	{
11134	  op0_word = operand_subword_force (op0, i, mode);
11135	  op1_word = operand_subword_force (op1, i, mode);
11136	}
11137      else
11138	{
11139	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11140	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11141	}
11142
11143      /* All but high-order word must be compared as unsigned.  */
11144      comp = compare_from_rtx (op0_word, op1_word,
11145			       (unsignedp || i > 0) ? GTU : GT,
11146			       unsignedp, word_mode, NULL_RTX, 0);
11147      if (comp == const_true_rtx)
11148	emit_jump (if_true_label);
11149      else if (comp != const0_rtx)
11150	do_jump_for_compare (comp, NULL_RTX, if_true_label);
11151
11152      /* Consider lower words only if these are equal.  */
11153      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11154			       NULL_RTX, 0);
11155      if (comp == const_true_rtx)
11156	emit_jump (if_false_label);
11157      else if (comp != const0_rtx)
11158	do_jump_for_compare (comp, NULL_RTX, if_false_label);
11159    }
11160
11161  if (if_false_label)
11162    emit_jump (if_false_label);
11163  if (drop_through_label)
11164    emit_label (drop_through_label);
11165}
11166
11167/* Given an EQ_EXPR expression EXP for values too wide to be compared
11168   with one insn, test the comparison and jump to the appropriate label.  */
11169
11170static void
11171do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11172     tree exp;
11173     rtx if_false_label, if_true_label;
11174{
11175  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11176  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11177  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11178  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11179  int i;
11180  rtx drop_through_label = 0;
11181
11182  if (! if_false_label)
11183    drop_through_label = if_false_label = gen_label_rtx ();
11184
11185  for (i = 0; i < nwords; i++)
11186    {
11187      rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11188				   operand_subword_force (op1, i, mode),
11189				   EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11190				   word_mode, NULL_RTX, 0);
11191      if (comp == const_true_rtx)
11192	emit_jump (if_false_label);
11193      else if (comp != const0_rtx)
11194	do_jump_for_compare (comp, if_false_label, NULL_RTX);
11195    }
11196
11197  if (if_true_label)
11198    emit_jump (if_true_label);
11199  if (drop_through_label)
11200    emit_label (drop_through_label);
11201}
11202
11203/* Jump according to whether OP0 is 0.
11204   We assume that OP0 has an integer mode that is too wide
11205   for the available compare insns.  */
11206
11207void
11208do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11209     rtx op0;
11210     rtx if_false_label, if_true_label;
11211{
11212  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11213  rtx part;
11214  int i;
11215  rtx drop_through_label = 0;
11216
11217  /* The fastest way of doing this comparison on almost any machine is to
11218     "or" all the words and compare the result.  If all have to be loaded
11219     from memory and this is a very wide item, it's possible this may
11220     be slower, but that's highly unlikely.  */
11221
11222  part = gen_reg_rtx (word_mode);
11223  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11224  for (i = 1; i < nwords && part != 0; i++)
11225    part = expand_binop (word_mode, ior_optab, part,
11226			 operand_subword_force (op0, i, GET_MODE (op0)),
11227			 part, 1, OPTAB_WIDEN);
11228
11229  if (part != 0)
11230    {
11231      rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11232				   NULL_RTX, 0);
11233
11234      if (comp == const_true_rtx)
11235	emit_jump (if_false_label);
11236      else if (comp == const0_rtx)
11237	emit_jump (if_true_label);
11238      else
11239	do_jump_for_compare (comp, if_false_label, if_true_label);
11240
11241      return;
11242    }
11243
11244  /* If we couldn't do the "or" simply, do this with a series of compares.  */
11245  if (! if_false_label)
11246    drop_through_label = if_false_label = gen_label_rtx ();
11247
11248  for (i = 0; i < nwords; i++)
11249    {
11250      rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11251							  GET_MODE (op0)),
11252				   const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11253      if (comp == const_true_rtx)
11254	emit_jump (if_false_label);
11255      else if (comp != const0_rtx)
11256	do_jump_for_compare (comp, if_false_label, NULL_RTX);
11257    }
11258
11259  if (if_true_label)
11260    emit_jump (if_true_label);
11261
11262  if (drop_through_label)
11263    emit_label (drop_through_label);
11264}
11265
11266/* Given a comparison expression in rtl form, output conditional branches to
11267   IF_TRUE_LABEL, IF_FALSE_LABEL, or both.  */
11268
11269static void
11270do_jump_for_compare (comparison, if_false_label, if_true_label)
11271     rtx comparison, if_false_label, if_true_label;
11272{
11273  if (if_true_label)
11274    {
11275      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11276	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11277			  (if_true_label));
11278      else
11279	abort ();
11280
11281      if (if_false_label)
11282	emit_jump (if_false_label);
11283    }
11284  else if (if_false_label)
11285    {
11286      rtx first = get_last_insn (), insn, branch;
11287      int br_count;
11288
11289      /* Output the branch with the opposite condition.  Then try to invert
11290	 what is generated.  If more than one insn is a branch, or if the
11291	 branch is not the last insn written, abort. If we can't invert
11292	 the branch, emit make a true label, redirect this jump to that,
11293	 emit a jump to the false label and define the true label.  */
11294      /* ??? Note that we wouldn't have to do any of this nonsense if
11295	 we passed both labels into a combined compare-and-branch.
11296	 Ah well, jump threading does a good job of repairing the damage.  */
11297
11298      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11299	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11300			  (if_false_label));
11301      else
11302	abort ();
11303
11304      /* Here we get the first insn that was just emitted.  It used to be the
11305	 case that, on some machines, emitting the branch would discard
11306	 the previous compare insn and emit a replacement.  This isn't
11307	 done anymore, but abort if we see that FIRST is deleted.  */
11308
11309      if (first == 0)
11310	first = get_insns ();
11311      else if (INSN_DELETED_P (first))
11312	abort ();
11313      else
11314	first = NEXT_INSN (first);
11315
11316      /* Look for multiple branches in this sequence, as might be generated
11317	 for a multi-word integer comparison.  */
11318
11319      br_count = 0;
11320      branch = NULL_RTX;
11321      for (insn = first; insn ; insn = NEXT_INSN (insn))
11322	if (GET_CODE (insn) == JUMP_INSN)
11323	  {
11324	    branch = insn;
11325	    br_count += 1;
11326	  }
11327
11328      /* If we've got one branch at the end of the sequence,
11329	 we can try to reverse it.  */
11330
11331      if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11332	{
11333	  rtx insn_label;
11334	  insn_label = XEXP (condjump_label (branch), 0);
11335	  JUMP_LABEL (branch) = insn_label;
11336
11337	  if (insn_label != if_false_label)
11338	    abort ();
11339
11340	  if (invert_jump (branch, if_false_label))
11341	    return;
11342	}
11343
11344      /* Multiple branches, or reversion failed.  Convert to branches
11345	 around an unconditional jump.  */
11346
11347      if_true_label = gen_label_rtx ();
11348      for (insn = first; insn; insn = NEXT_INSN (insn))
11349	if (GET_CODE (insn) == JUMP_INSN)
11350	  {
11351	    rtx insn_label;
11352	    insn_label = XEXP (condjump_label (insn), 0);
11353	    JUMP_LABEL (insn) = insn_label;
11354
11355	    if (insn_label == if_false_label)
11356	      redirect_jump (insn, if_true_label);
11357	  }
11358	emit_jump (if_false_label);
11359	emit_label (if_true_label);
11360    }
11361}
11362
11363/* Generate code for a comparison expression EXP
11364   (including code to compute the values to be compared)
11365   and set (CC0) according to the result.
11366   SIGNED_CODE should be the rtx operation for this comparison for
11367   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11368
11369   We force a stack adjustment unless there are currently
11370   things pushed on the stack that aren't yet used.  */
11371
11372static rtx
11373compare (exp, signed_code, unsigned_code)
11374     register tree exp;
11375     enum rtx_code signed_code, unsigned_code;
11376{
11377  register rtx op0, op1;
11378  register tree type;
11379  register enum machine_mode mode;
11380  int unsignedp;
11381  enum rtx_code code;
11382
11383  /* Don't crash if the comparison was erroneous.  */
11384  op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11385  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11386    return op0;
11387
11388  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11389  type = TREE_TYPE (TREE_OPERAND (exp, 0));
11390  mode = TYPE_MODE (type);
11391  unsignedp = TREE_UNSIGNED (type);
11392  code = unsignedp ? unsigned_code : signed_code;
11393
11394#ifdef HAVE_canonicalize_funcptr_for_compare
11395  /* If function pointers need to be "canonicalized" before they can
11396     be reliably compared, then canonicalize them.  */
11397  if (HAVE_canonicalize_funcptr_for_compare
11398      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11399      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11400	  == FUNCTION_TYPE))
11401    {
11402      rtx new_op0 = gen_reg_rtx (mode);
11403
11404      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11405      op0 = new_op0;
11406    }
11407
11408  if (HAVE_canonicalize_funcptr_for_compare
11409      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11410      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11411	  == FUNCTION_TYPE))
11412    {
11413      rtx new_op1 = gen_reg_rtx (mode);
11414
11415      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11416      op1 = new_op1;
11417    }
11418#endif
11419
11420  return compare_from_rtx (op0, op1, code, unsignedp, mode,
11421			   ((mode == BLKmode)
11422			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11423			   TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11424}
11425
11426/* Like compare but expects the values to compare as two rtx's.
11427   The decision as to signed or unsigned comparison must be made by the caller.
11428
11429   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11430   compared.
11431
11432   If ALIGN is non-zero, it is the alignment of this type; if zero, the
11433   size of MODE should be used.  */
11434
11435rtx
11436compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11437     register rtx op0, op1;
11438     enum rtx_code code;
11439     int unsignedp;
11440     enum machine_mode mode;
11441     rtx size;
11442     int align;
11443{
11444  rtx tem;
11445
11446  /* If one operand is constant, make it the second one.  Only do this
11447     if the other operand is not constant as well.  */
11448
11449  if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11450      || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11451    {
11452      tem = op0;
11453      op0 = op1;
11454      op1 = tem;
11455      code = swap_condition (code);
11456    }
11457
11458  if (flag_force_mem)
11459    {
11460      op0 = force_not_mem (op0);
11461      op1 = force_not_mem (op1);
11462    }
11463
11464  do_pending_stack_adjust ();
11465
11466  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11467      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11468    return tem;
11469
11470#if 0
11471  /* There's no need to do this now that combine.c can eliminate lots of
11472     sign extensions.  This can be less efficient in certain cases on other
11473     machines.  */
11474
11475  /* If this is a signed equality comparison, we can do it as an
11476     unsigned comparison since zero-extension is cheaper than sign
11477     extension and comparisons with zero are done as unsigned.  This is
11478     the case even on machines that can do fast sign extension, since
11479     zero-extension is easier to combine with other operations than
11480     sign-extension is.  If we are comparing against a constant, we must
11481     convert it to what it would look like unsigned.  */
11482  if ((code == EQ || code == NE) && ! unsignedp
11483      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11484    {
11485      if (GET_CODE (op1) == CONST_INT
11486	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11487	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11488      unsignedp = 1;
11489    }
11490#endif
11491
11492  emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11493
11494  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11495}
11496
11497/* Generate code to calculate EXP using a store-flag instruction
11498   and return an rtx for the result.  EXP is either a comparison
11499   or a TRUTH_NOT_EXPR whose operand is a comparison.
11500
11501   If TARGET is nonzero, store the result there if convenient.
11502
11503   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11504   cheap.
11505
11506   Return zero if there is no suitable set-flag instruction
11507   available on this machine.
11508
11509   Once expand_expr has been called on the arguments of the comparison,
11510   we are committed to doing the store flag, since it is not safe to
11511   re-evaluate the expression.  We emit the store-flag insn by calling
11512   emit_store_flag, but only expand the arguments if we have a reason
11513   to believe that emit_store_flag will be successful.  If we think that
11514   it will, but it isn't, we have to simulate the store-flag with a
11515   set/jump/set sequence.  */
11516
11517static rtx
11518do_store_flag (exp, target, mode, only_cheap)
11519     tree exp;
11520     rtx target;
11521     enum machine_mode mode;
11522     int only_cheap;
11523{
11524  enum rtx_code code;
11525  tree arg0, arg1, type;
11526  tree tem;
11527  enum machine_mode operand_mode;
11528  int invert = 0;
11529  int unsignedp;
11530  rtx op0, op1;
11531  enum insn_code icode;
11532  rtx subtarget = target;
11533  rtx result, label;
11534
11535  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11536     result at the end.  We can't simply invert the test since it would
11537     have already been inverted if it were valid.  This case occurs for
11538     some floating-point comparisons.  */
11539
11540  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11541    invert = 1, exp = TREE_OPERAND (exp, 0);
11542
11543  arg0 = TREE_OPERAND (exp, 0);
11544  arg1 = TREE_OPERAND (exp, 1);
11545  type = TREE_TYPE (arg0);
11546  operand_mode = TYPE_MODE (type);
11547  unsignedp = TREE_UNSIGNED (type);
11548
11549  /* We won't bother with BLKmode store-flag operations because it would mean
11550     passing a lot of information to emit_store_flag.  */
11551  if (operand_mode == BLKmode)
11552    return 0;
11553
11554  /* We won't bother with store-flag operations involving function pointers
11555     when function pointers must be canonicalized before comparisons.  */
11556#ifdef HAVE_canonicalize_funcptr_for_compare
11557  if (HAVE_canonicalize_funcptr_for_compare
11558      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11559	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11560	       == FUNCTION_TYPE))
11561	  || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11562	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11563		  == FUNCTION_TYPE))))
11564    return 0;
11565#endif
11566
11567  STRIP_NOPS (arg0);
11568  STRIP_NOPS (arg1);
11569
11570  /* Get the rtx comparison code to use.  We know that EXP is a comparison
11571     operation of some type.  Some comparisons against 1 and -1 can be
11572     converted to comparisons with zero.  Do so here so that the tests
11573     below will be aware that we have a comparison with zero.   These
11574     tests will not catch constants in the first operand, but constants
11575     are rarely passed as the first operand.  */
11576
11577  switch (TREE_CODE (exp))
11578    {
11579    case EQ_EXPR:
11580      code = EQ;
11581      break;
11582    case NE_EXPR:
11583      code = NE;
11584      break;
11585    case LT_EXPR:
11586      if (integer_onep (arg1))
11587	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11588      else
11589	code = unsignedp ? LTU : LT;
11590      break;
11591    case LE_EXPR:
11592      if (! unsignedp && integer_all_onesp (arg1))
11593	arg1 = integer_zero_node, code = LT;
11594      else
11595	code = unsignedp ? LEU : LE;
11596      break;
11597    case GT_EXPR:
11598      if (! unsignedp && integer_all_onesp (arg1))
11599	arg1 = integer_zero_node, code = GE;
11600      else
11601	code = unsignedp ? GTU : GT;
11602      break;
11603    case GE_EXPR:
11604      if (integer_onep (arg1))
11605	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11606      else
11607	code = unsignedp ? GEU : GE;
11608      break;
11609    default:
11610      abort ();
11611    }
11612
11613  /* Put a constant second.  */
11614  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11615    {
11616      tem = arg0; arg0 = arg1; arg1 = tem;
11617      code = swap_condition (code);
11618    }
11619
11620  /* If this is an equality or inequality test of a single bit, we can
11621     do this by shifting the bit being tested to the low-order bit and
11622     masking the result with the constant 1.  If the condition was EQ,
11623     we xor it with 1.  This does not require an scc insn and is faster
11624     than an scc insn even if we have it.  */
11625
11626  if ((code == NE || code == EQ)
11627      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11628      && integer_pow2p (TREE_OPERAND (arg0, 1)))
11629    {
11630      tree inner = TREE_OPERAND (arg0, 0);
11631      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11632      int ops_unsignedp;
11633
11634      /* If INNER is a right shift of a constant and it plus BITNUM does
11635	 not overflow, adjust BITNUM and INNER.  */
11636
11637      if (TREE_CODE (inner) == RSHIFT_EXPR
11638	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11639	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11640	  && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11641	      < TYPE_PRECISION (type)))
11642	{
11643	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11644	  inner = TREE_OPERAND (inner, 0);
11645	}
11646
11647      /* If we are going to be able to omit the AND below, we must do our
11648	 operations as unsigned.  If we must use the AND, we have a choice.
11649	 Normally unsigned is faster, but for some machines signed is.  */
11650      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11651#ifdef LOAD_EXTEND_OP
11652		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11653#else
11654		       : 1
11655#endif
11656		       );
11657
11658      if (subtarget == 0 || GET_CODE (subtarget) != REG
11659	  || GET_MODE (subtarget) != operand_mode
11660	  || ! safe_from_p (subtarget, inner, 1))
11661	subtarget = 0;
11662
11663      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11664
11665      if (bitnum != 0)
11666	op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11667			    size_int (bitnum), subtarget, ops_unsignedp);
11668
11669      if (GET_MODE (op0) != mode)
11670	op0 = convert_to_mode (mode, op0, ops_unsignedp);
11671
11672      if ((code == EQ && ! invert) || (code == NE && invert))
11673	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11674			    ops_unsignedp, OPTAB_LIB_WIDEN);
11675
11676      /* Put the AND last so it can combine with more things.  */
11677      if (bitnum != TYPE_PRECISION (type) - 1)
11678	op0 = expand_and (op0, const1_rtx, subtarget);
11679
11680      return op0;
11681    }
11682
11683  /* Now see if we are likely to be able to do this.  Return if not.  */
11684  if (! can_compare_p (operand_mode))
11685    return 0;
11686  icode = setcc_gen_code[(int) code];
11687  if (icode == CODE_FOR_nothing
11688      || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11689    {
11690      /* We can only do this if it is one of the special cases that
11691	 can be handled without an scc insn.  */
11692      if ((code == LT && integer_zerop (arg1))
11693	  || (! only_cheap && code == GE && integer_zerop (arg1)))
11694	;
11695      else if (BRANCH_COST >= 0
11696	       && ! only_cheap && (code == NE || code == EQ)
11697	       && TREE_CODE (type) != REAL_TYPE
11698	       && ((abs_optab->handlers[(int) operand_mode].insn_code
11699		    != CODE_FOR_nothing)
11700		   || (ffs_optab->handlers[(int) operand_mode].insn_code
11701		       != CODE_FOR_nothing)))
11702	;
11703      else
11704	return 0;
11705    }
11706
11707  preexpand_calls (exp);
11708  if (subtarget == 0 || GET_CODE (subtarget) != REG
11709      || GET_MODE (subtarget) != operand_mode
11710      || ! safe_from_p (subtarget, arg1, 1))
11711    subtarget = 0;
11712
11713  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11714  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11715
11716  if (target == 0)
11717    target = gen_reg_rtx (mode);
11718
11719  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
11720     because, if the emit_store_flag does anything it will succeed and
11721     OP0 and OP1 will not be used subsequently.  */
11722
11723  result = emit_store_flag (target, code,
11724			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11725			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11726			    operand_mode, unsignedp, 1);
11727
11728  if (result)
11729    {
11730      if (invert)
11731	result = expand_binop (mode, xor_optab, result, const1_rtx,
11732			       result, 0, OPTAB_LIB_WIDEN);
11733      return result;
11734    }
11735
11736  /* If this failed, we have to do this with set/compare/jump/set code.  */
11737  if (GET_CODE (target) != REG
11738      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11739    target = gen_reg_rtx (GET_MODE (target));
11740
11741  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11742  result = compare_from_rtx (op0, op1, code, unsignedp,
11743			     operand_mode, NULL_RTX, 0);
11744  if (GET_CODE (result) == CONST_INT)
11745    return (((result == const0_rtx && ! invert)
11746	     || (result != const0_rtx && invert))
11747	    ? const0_rtx : const1_rtx);
11748
11749  label = gen_label_rtx ();
11750  if (bcc_gen_fctn[(int) code] == 0)
11751    abort ();
11752
11753  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11754  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11755  emit_label (label);
11756
11757  return target;
11758}
11759
11760/* Generate a tablejump instruction (used for switch statements).  */
11761
11762#ifdef HAVE_tablejump
11763
11764/* INDEX is the value being switched on, with the lowest value
11765   in the table already subtracted.
11766   MODE is its expected mode (needed if INDEX is constant).
11767   RANGE is the length of the jump table.
11768   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11769
11770   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11771   index value is out of range.  */
11772
11773void
11774do_tablejump (index, mode, range, table_label, default_label)
11775     rtx index, range, table_label, default_label;
11776     enum machine_mode mode;
11777{
11778  register rtx temp, vector;
11779
11780  /* Do an unsigned comparison (in the proper mode) between the index
11781     expression and the value which represents the length of the range.
11782     Since we just finished subtracting the lower bound of the range
11783     from the index expression, this comparison allows us to simultaneously
11784     check that the original index expression value is both greater than
11785     or equal to the minimum value of the range and less than or equal to
11786     the maximum value of the range.  */
11787
11788  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11789			   0, default_label);
11790
11791  /* If index is in range, it must fit in Pmode.
11792     Convert to Pmode so we can index with it.  */
11793  if (mode != Pmode)
11794    index = convert_to_mode (Pmode, index, 1);
11795
11796  /* Don't let a MEM slip thru, because then INDEX that comes
11797     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11798     and break_out_memory_refs will go to work on it and mess it up.  */
11799#ifdef PIC_CASE_VECTOR_ADDRESS
11800  if (flag_pic && GET_CODE (index) != REG)
11801    index = copy_to_mode_reg (Pmode, index);
11802#endif
11803
11804  /* If flag_force_addr were to affect this address
11805     it could interfere with the tricky assumptions made
11806     about addresses that contain label-refs,
11807     which may be valid only very near the tablejump itself.  */
11808  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11809     GET_MODE_SIZE, because this indicates how large insns are.  The other
11810     uses should all be Pmode, because they are addresses.  This code
11811     could fail if addresses and insns are not the same size.  */
11812  index = gen_rtx_PLUS (Pmode,
11813			gen_rtx_MULT (Pmode, index,
11814				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11815			gen_rtx_LABEL_REF (Pmode, table_label));
11816#ifdef PIC_CASE_VECTOR_ADDRESS
11817  if (flag_pic)
11818    index = PIC_CASE_VECTOR_ADDRESS (index);
11819  else
11820#endif
11821    index = memory_address_noforce (CASE_VECTOR_MODE, index);
11822  temp = gen_reg_rtx (CASE_VECTOR_MODE);
11823  vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11824  RTX_UNCHANGING_P (vector) = 1;
11825  convert_move (temp, vector, 0);
11826
11827  emit_jump_insn (gen_tablejump (temp, table_label));
11828
11829  /* If we are generating PIC code or if the table is PC-relative, the
11830     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11831  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11832    emit_barrier ();
11833}
11834
11835#endif /* HAVE_tablejump */
11836