expr.c revision 18334
1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22#include "config.h"
23#include "machmode.h"
24#include "rtl.h"
25#include "tree.h"
26#include "obstack.h"
27#include "flags.h"
28#include "regs.h"
29#include "function.h"
30#include "insn-flags.h"
31#include "insn-codes.h"
32#include "expr.h"
33#include "insn-config.h"
34#include "recog.h"
35#include "output.h"
36#include "typeclass.h"
37
38#include "bytecode.h"
39#include "bc-opcode.h"
40#include "bc-typecd.h"
41#include "bc-optab.h"
42#include "bc-emit.h"
43
44
45#define CEIL(x,y) (((x) + (y) - 1) / (y))
46
47/* Decide whether a function's arguments should be processed
48   from first to last or from last to first.
49
50   They should if the stack and args grow in opposite directions, but
51   only if we have push insns.  */
52
53#ifdef PUSH_ROUNDING
54
55#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56#define PUSH_ARGS_REVERSED	/* If it's last to first */
57#endif
58
59#endif
60
61#ifndef STACK_PUSH_CODE
62#ifdef STACK_GROWS_DOWNWARD
63#define STACK_PUSH_CODE PRE_DEC
64#else
65#define STACK_PUSH_CODE PRE_INC
66#endif
67#endif
68
69/* Like STACK_BOUNDARY but in units of bytes, not bits.  */
70#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71
72/* If this is nonzero, we do not bother generating VOLATILE
73   around volatile memory references, and we are willing to
74   output indirect addresses.  If cse is to follow, we reject
75   indirect addresses so a useful potential cse is generated;
76   if it is used only once, instruction combination will produce
77   the same indirect address eventually.  */
78int cse_not_expected;
79
80/* Nonzero to generate code for all the subroutines within an
81   expression before generating the upper levels of the expression.
82   Nowadays this is never zero.  */
83int do_preexpand_calls = 1;
84
85/* Number of units that we should eventually pop off the stack.
86   These are the arguments to function calls that have already returned.  */
87int pending_stack_adjust;
88
89/* Nonzero means stack pops must not be deferred, and deferred stack
90   pops must not be output.  It is nonzero inside a function call,
91   inside a conditional expression, inside a statement expression,
92   and in other cases as well.  */
93int inhibit_defer_pop;
94
95/* A list of all cleanups which belong to the arguments of
96   function calls being expanded by expand_call.  */
97tree cleanups_this_call;
98
99/* When temporaries are created by TARGET_EXPRs, they are created at
100   this level of temp_slot_level, so that they can remain allocated
101   until no longer needed.  CLEANUP_POINT_EXPRs define the lifetime
102   of TARGET_EXPRs.  */
103int target_temp_slot_level;
104
105/* Nonzero means __builtin_saveregs has already been done in this function.
106   The value is the pseudoreg containing the value __builtin_saveregs
107   returned.  */
108static rtx saveregs_value;
109
110/* Similarly for __builtin_apply_args.  */
111static rtx apply_args_value;
112
113/* This structure is used by move_by_pieces to describe the move to
114   be performed.  */
115
116struct move_by_pieces
117{
118  rtx to;
119  rtx to_addr;
120  int autinc_to;
121  int explicit_inc_to;
122  int to_struct;
123  rtx from;
124  rtx from_addr;
125  int autinc_from;
126  int explicit_inc_from;
127  int from_struct;
128  int len;
129  int offset;
130  int reverse;
131};
132
133/* Used to generate bytecodes: keep track of size of local variables,
134   as well as depth of arithmetic stack. (Notice that variables are
135   stored on the machine's stack, not the arithmetic stack.) */
136
137extern int local_vars_size;
138extern int stack_depth;
139extern int max_stack_depth;
140extern struct obstack permanent_obstack;
141
142
143static rtx enqueue_insn		PROTO((rtx, rtx));
144static int queued_subexp_p	PROTO((rtx));
145static void init_queue		PROTO((void));
146static void move_by_pieces	PROTO((rtx, rtx, int, int));
147static int move_by_pieces_ninsns PROTO((unsigned int, int));
148static void move_by_pieces_1	PROTO((rtx (*) (), enum machine_mode,
149				       struct move_by_pieces *));
150static void store_constructor	PROTO((tree, rtx));
151static rtx store_field		PROTO((rtx, int, int, enum machine_mode, tree,
152				       enum machine_mode, int, int, int));
153static int get_inner_unaligned_p PROTO((tree));
154static tree save_noncopied_parts PROTO((tree, tree));
155static tree init_noncopied_parts PROTO((tree, tree));
156static int safe_from_p		PROTO((rtx, tree));
157static int fixed_type_p		PROTO((tree));
158static int get_pointer_alignment PROTO((tree, unsigned));
159static tree string_constant	PROTO((tree, tree *));
160static tree c_strlen		PROTO((tree));
161static rtx expand_builtin	PROTO((tree, rtx, rtx,
162				       enum machine_mode, int));
163static int apply_args_size	PROTO((void));
164static int apply_result_size	PROTO((void));
165static rtx result_vector	PROTO((int, rtx));
166static rtx expand_builtin_apply_args PROTO((void));
167static rtx expand_builtin_apply	PROTO((rtx, rtx, rtx));
168static void expand_builtin_return PROTO((rtx));
169static rtx expand_increment	PROTO((tree, int));
170rtx bc_expand_increment		PROTO((struct increment_operator *, tree));
171tree bc_runtime_type_code 	PROTO((tree));
172rtx bc_allocate_local		PROTO((int, int));
173void bc_store_memory 		PROTO((tree, tree));
174tree bc_expand_component_address PROTO((tree));
175tree bc_expand_address 		PROTO((tree));
176void bc_expand_constructor 	PROTO((tree));
177void bc_adjust_stack 		PROTO((int));
178tree bc_canonicalize_array_ref	PROTO((tree));
179void bc_load_memory		PROTO((tree, tree));
180void bc_load_externaddr		PROTO((rtx));
181void bc_load_externaddr_id	PROTO((tree, int));
182void bc_load_localaddr		PROTO((rtx));
183void bc_load_parmaddr		PROTO((rtx));
184static void preexpand_calls	PROTO((tree));
185static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
186void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
187static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
188static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
189static void do_jump_for_compare	PROTO((rtx, rtx, rtx));
190static rtx compare		PROTO((tree, enum rtx_code, enum rtx_code));
191static rtx do_store_flag	PROTO((tree, rtx, enum machine_mode, int));
192static tree defer_cleanups_to	PROTO((tree));
193extern void (*interim_eh_hook)	PROTO((tree));
194extern tree truthvalue_conversion       PROTO((tree));
195
196/* Record for each mode whether we can move a register directly to or
197   from an object of that mode in memory.  If we can't, we won't try
198   to use that mode directly when accessing a field of that mode.  */
199
200static char direct_load[NUM_MACHINE_MODES];
201static char direct_store[NUM_MACHINE_MODES];
202
203/* MOVE_RATIO is the number of move instructions that is better than
204   a block move.  */
205
206#ifndef MOVE_RATIO
207#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
208#define MOVE_RATIO 2
209#else
210/* A value of around 6 would minimize code size; infinity would minimize
211   execution time.  */
212#define MOVE_RATIO 15
213#endif
214#endif
215
216/* This array records the insn_code of insns to perform block moves.  */
217enum insn_code movstr_optab[NUM_MACHINE_MODES];
218
219/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
220
221#ifndef SLOW_UNALIGNED_ACCESS
222#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
223#endif
224
225/* Register mappings for target machines without register windows.  */
226#ifndef INCOMING_REGNO
227#define INCOMING_REGNO(OUT) (OUT)
228#endif
229#ifndef OUTGOING_REGNO
230#define OUTGOING_REGNO(IN) (IN)
231#endif
232
233/* Maps used to convert modes to const, load, and store bytecodes. */
234enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
235enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
236enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
237
238/* Initialize maps used to convert modes to const, load, and store
239   bytecodes. */
240void
241bc_init_mode_to_opcode_maps ()
242{
243  int mode;
244
245  for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
246    mode_to_const_map[mode] =
247      mode_to_load_map[mode] =
248	mode_to_store_map[mode] = neverneverland;
249
250#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
251  mode_to_const_map[(int) SYM] = CONST; \
252  mode_to_load_map[(int) SYM] = LOAD; \
253  mode_to_store_map[(int) SYM] = STORE;
254
255#include "modemap.def"
256#undef DEF_MODEMAP
257}
258
259/* This is run once per compilation to set up which modes can be used
260   directly in memory and to initialize the block move optab.  */
261
262void
263init_expr_once ()
264{
265  rtx insn, pat;
266  enum machine_mode mode;
267  /* Try indexing by frame ptr and try by stack ptr.
268     It is known that on the Convex the stack ptr isn't a valid index.
269     With luck, one or the other is valid on any machine.  */
270  rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
271  rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
272
273  start_sequence ();
274  insn = emit_insn (gen_rtx (SET, 0, 0));
275  pat = PATTERN (insn);
276
277  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278       mode = (enum machine_mode) ((int) mode + 1))
279    {
280      int regno;
281      rtx reg;
282      int num_clobbers;
283
284      direct_load[(int) mode] = direct_store[(int) mode] = 0;
285      PUT_MODE (mem, mode);
286      PUT_MODE (mem1, mode);
287
288      /* See if there is some register that can be used in this mode and
289	 directly loaded or stored from memory.  */
290
291      if (mode != VOIDmode && mode != BLKmode)
292	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
293	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
294	     regno++)
295	  {
296	    if (! HARD_REGNO_MODE_OK (regno, mode))
297	      continue;
298
299	    reg = gen_rtx (REG, mode, regno);
300
301	    SET_SRC (pat) = mem;
302	    SET_DEST (pat) = reg;
303	    if (recog (pat, insn, &num_clobbers) >= 0)
304	      direct_load[(int) mode] = 1;
305
306	    SET_SRC (pat) = mem1;
307	    SET_DEST (pat) = reg;
308	    if (recog (pat, insn, &num_clobbers) >= 0)
309	      direct_load[(int) mode] = 1;
310
311	    SET_SRC (pat) = reg;
312	    SET_DEST (pat) = mem;
313	    if (recog (pat, insn, &num_clobbers) >= 0)
314	      direct_store[(int) mode] = 1;
315
316	    SET_SRC (pat) = reg;
317	    SET_DEST (pat) = mem1;
318	    if (recog (pat, insn, &num_clobbers) >= 0)
319	      direct_store[(int) mode] = 1;
320	  }
321    }
322
323  end_sequence ();
324}
325
326/* This is run at the start of compiling a function.  */
327
328void
329init_expr ()
330{
331  init_queue ();
332
333  pending_stack_adjust = 0;
334  inhibit_defer_pop = 0;
335  cleanups_this_call = 0;
336  saveregs_value = 0;
337  apply_args_value = 0;
338  forced_labels = 0;
339}
340
341/* Save all variables describing the current status into the structure *P.
342   This is used before starting a nested function.  */
343
344void
345save_expr_status (p)
346     struct function *p;
347{
348  /* Instead of saving the postincrement queue, empty it.  */
349  emit_queue ();
350
351  p->pending_stack_adjust = pending_stack_adjust;
352  p->inhibit_defer_pop = inhibit_defer_pop;
353  p->cleanups_this_call = cleanups_this_call;
354  p->saveregs_value = saveregs_value;
355  p->apply_args_value = apply_args_value;
356  p->forced_labels = forced_labels;
357
358  pending_stack_adjust = 0;
359  inhibit_defer_pop = 0;
360  cleanups_this_call = 0;
361  saveregs_value = 0;
362  apply_args_value = 0;
363  forced_labels = 0;
364}
365
366/* Restore all variables describing the current status from the structure *P.
367   This is used after a nested function.  */
368
369void
370restore_expr_status (p)
371     struct function *p;
372{
373  pending_stack_adjust = p->pending_stack_adjust;
374  inhibit_defer_pop = p->inhibit_defer_pop;
375  cleanups_this_call = p->cleanups_this_call;
376  saveregs_value = p->saveregs_value;
377  apply_args_value = p->apply_args_value;
378  forced_labels = p->forced_labels;
379}
380
381/* Manage the queue of increment instructions to be output
382   for POSTINCREMENT_EXPR expressions, etc.  */
383
384static rtx pending_chain;
385
386/* Queue up to increment (or change) VAR later.  BODY says how:
387   BODY should be the same thing you would pass to emit_insn
388   to increment right away.  It will go to emit_insn later on.
389
390   The value is a QUEUED expression to be used in place of VAR
391   where you want to guarantee the pre-incrementation value of VAR.  */
392
393static rtx
394enqueue_insn (var, body)
395     rtx var, body;
396{
397  pending_chain = gen_rtx (QUEUED, GET_MODE (var),
398			   var, NULL_RTX, NULL_RTX, body, pending_chain);
399  return pending_chain;
400}
401
402/* Use protect_from_queue to convert a QUEUED expression
403   into something that you can put immediately into an instruction.
404   If the queued incrementation has not happened yet,
405   protect_from_queue returns the variable itself.
406   If the incrementation has happened, protect_from_queue returns a temp
407   that contains a copy of the old value of the variable.
408
409   Any time an rtx which might possibly be a QUEUED is to be put
410   into an instruction, it must be passed through protect_from_queue first.
411   QUEUED expressions are not meaningful in instructions.
412
413   Do not pass a value through protect_from_queue and then hold
414   on to it for a while before putting it in an instruction!
415   If the queue is flushed in between, incorrect code will result.  */
416
417rtx
418protect_from_queue (x, modify)
419     register rtx x;
420     int modify;
421{
422  register RTX_CODE code = GET_CODE (x);
423
424#if 0  /* A QUEUED can hang around after the queue is forced out.  */
425  /* Shortcut for most common case.  */
426  if (pending_chain == 0)
427    return x;
428#endif
429
430  if (code != QUEUED)
431    {
432      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
433	 use of autoincrement.  Make a copy of the contents of the memory
434	 location rather than a copy of the address, but not if the value is
435	 of mode BLKmode.  Don't modify X in place since it might be
436	 shared.  */
437      if (code == MEM && GET_MODE (x) != BLKmode
438	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
439	{
440	  register rtx y = XEXP (x, 0);
441	  register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
442
443	  MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
444	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
445	  MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
446
447	  if (QUEUED_INSN (y))
448	    {
449	      register rtx temp = gen_reg_rtx (GET_MODE (new));
450	      emit_insn_before (gen_move_insn (temp, new),
451				QUEUED_INSN (y));
452	      return temp;
453	    }
454	  return new;
455	}
456      /* Otherwise, recursively protect the subexpressions of all
457	 the kinds of rtx's that can contain a QUEUED.  */
458      if (code == MEM)
459	{
460	  rtx tem = protect_from_queue (XEXP (x, 0), 0);
461	  if (tem != XEXP (x, 0))
462	    {
463	      x = copy_rtx (x);
464	      XEXP (x, 0) = tem;
465	    }
466	}
467      else if (code == PLUS || code == MULT)
468	{
469	  rtx new0 = protect_from_queue (XEXP (x, 0), 0);
470	  rtx new1 = protect_from_queue (XEXP (x, 1), 0);
471	  if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
472	    {
473	      x = copy_rtx (x);
474	      XEXP (x, 0) = new0;
475	      XEXP (x, 1) = new1;
476	    }
477	}
478      return x;
479    }
480  /* If the increment has not happened, use the variable itself.  */
481  if (QUEUED_INSN (x) == 0)
482    return QUEUED_VAR (x);
483  /* If the increment has happened and a pre-increment copy exists,
484     use that copy.  */
485  if (QUEUED_COPY (x) != 0)
486    return QUEUED_COPY (x);
487  /* The increment has happened but we haven't set up a pre-increment copy.
488     Set one up now, and use it.  */
489  QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
490  emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
491		    QUEUED_INSN (x));
492  return QUEUED_COPY (x);
493}
494
495/* Return nonzero if X contains a QUEUED expression:
496   if it contains anything that will be altered by a queued increment.
497   We handle only combinations of MEM, PLUS, MINUS and MULT operators
498   since memory addresses generally contain only those.  */
499
500static int
501queued_subexp_p (x)
502     rtx x;
503{
504  register enum rtx_code code = GET_CODE (x);
505  switch (code)
506    {
507    case QUEUED:
508      return 1;
509    case MEM:
510      return queued_subexp_p (XEXP (x, 0));
511    case MULT:
512    case PLUS:
513    case MINUS:
514      return queued_subexp_p (XEXP (x, 0))
515	|| queued_subexp_p (XEXP (x, 1));
516    }
517  return 0;
518}
519
520/* Perform all the pending incrementations.  */
521
522void
523emit_queue ()
524{
525  register rtx p;
526  while (p = pending_chain)
527    {
528      QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
529      pending_chain = QUEUED_NEXT (p);
530    }
531}
532
533static void
534init_queue ()
535{
536  if (pending_chain)
537    abort ();
538}
539
540/* Copy data from FROM to TO, where the machine modes are not the same.
541   Both modes may be integer, or both may be floating.
542   UNSIGNEDP should be nonzero if FROM is an unsigned type.
543   This causes zero-extension instead of sign-extension.  */
544
545void
546convert_move (to, from, unsignedp)
547     register rtx to, from;
548     int unsignedp;
549{
550  enum machine_mode to_mode = GET_MODE (to);
551  enum machine_mode from_mode = GET_MODE (from);
552  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
553  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
554  enum insn_code code;
555  rtx libcall;
556
557  /* rtx code for making an equivalent value.  */
558  enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
559
560  to = protect_from_queue (to, 1);
561  from = protect_from_queue (from, 0);
562
563  if (to_real != from_real)
564    abort ();
565
566  /* If FROM is a SUBREG that indicates that we have already done at least
567     the required extension, strip it.  We don't handle such SUBREGs as
568     TO here.  */
569
570  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
571      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
572	  >= GET_MODE_SIZE (to_mode))
573      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
574    from = gen_lowpart (to_mode, from), from_mode = to_mode;
575
576  if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
577    abort ();
578
579  if (to_mode == from_mode
580      || (from_mode == VOIDmode && CONSTANT_P (from)))
581    {
582      emit_move_insn (to, from);
583      return;
584    }
585
586  if (to_real)
587    {
588      rtx value;
589
590#ifdef HAVE_extendqfhf2
591      if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
592	{
593	  emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
594	  return;
595	}
596#endif
597#ifdef HAVE_extendqfsf2
598      if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
599	{
600	  emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
601	  return;
602	}
603#endif
604#ifdef HAVE_extendqfdf2
605      if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
606	{
607	  emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
608	  return;
609	}
610#endif
611#ifdef HAVE_extendqfxf2
612      if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
613	{
614	  emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
615	  return;
616	}
617#endif
618#ifdef HAVE_extendqftf2
619      if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
620	{
621	  emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
622	  return;
623	}
624#endif
625
626#ifdef HAVE_extendhftqf2
627      if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
628	{
629	  emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
630	  return;
631	}
632#endif
633
634#ifdef HAVE_extendhfsf2
635      if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
636	{
637	  emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
638	  return;
639	}
640#endif
641#ifdef HAVE_extendhfdf2
642      if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
643	{
644	  emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
645	  return;
646	}
647#endif
648#ifdef HAVE_extendhfxf2
649      if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
650	{
651	  emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
652	  return;
653	}
654#endif
655#ifdef HAVE_extendhftf2
656      if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
657	{
658	  emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
659	  return;
660	}
661#endif
662
663#ifdef HAVE_extendsfdf2
664      if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
665	{
666	  emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
667	  return;
668	}
669#endif
670#ifdef HAVE_extendsfxf2
671      if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
672	{
673	  emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
674	  return;
675	}
676#endif
677#ifdef HAVE_extendsftf2
678      if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
679	{
680	  emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
681	  return;
682	}
683#endif
684#ifdef HAVE_extenddfxf2
685      if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
686	{
687	  emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
688	  return;
689	}
690#endif
691#ifdef HAVE_extenddftf2
692      if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
693	{
694	  emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
695	  return;
696	}
697#endif
698
699#ifdef HAVE_trunchfqf2
700      if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
701	{
702	  emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
703	  return;
704	}
705#endif
706#ifdef HAVE_truncsfqf2
707      if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
708	{
709	  emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
710	  return;
711	}
712#endif
713#ifdef HAVE_truncdfqf2
714      if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
715	{
716	  emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
717	  return;
718	}
719#endif
720#ifdef HAVE_truncxfqf2
721      if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
722	{
723	  emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
724	  return;
725	}
726#endif
727#ifdef HAVE_trunctfqf2
728      if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
729	{
730	  emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
731	  return;
732	}
733#endif
734
735#ifdef HAVE_trunctqfhf2
736      if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
737	{
738	  emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
739	  return;
740	}
741#endif
742#ifdef HAVE_truncsfhf2
743      if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
744	{
745	  emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
746	  return;
747	}
748#endif
749#ifdef HAVE_truncdfhf2
750      if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
751	{
752	  emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
753	  return;
754	}
755#endif
756#ifdef HAVE_truncxfhf2
757      if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
758	{
759	  emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
760	  return;
761	}
762#endif
763#ifdef HAVE_trunctfhf2
764      if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
765	{
766	  emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
767	  return;
768	}
769#endif
770#ifdef HAVE_truncdfsf2
771      if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
772	{
773	  emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
774	  return;
775	}
776#endif
777#ifdef HAVE_truncxfsf2
778      if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
779	{
780	  emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
781	  return;
782	}
783#endif
784#ifdef HAVE_trunctfsf2
785      if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
786	{
787	  emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
788	  return;
789	}
790#endif
791#ifdef HAVE_truncxfdf2
792      if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
793	{
794	  emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
795	  return;
796	}
797#endif
798#ifdef HAVE_trunctfdf2
799      if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
800	{
801	  emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
802	  return;
803	}
804#endif
805
806      libcall = (rtx) 0;
807      switch (from_mode)
808	{
809	case SFmode:
810	  switch (to_mode)
811	    {
812	    case DFmode:
813	      libcall = extendsfdf2_libfunc;
814	      break;
815
816	    case XFmode:
817	      libcall = extendsfxf2_libfunc;
818	      break;
819
820	    case TFmode:
821	      libcall = extendsftf2_libfunc;
822	      break;
823	    }
824	  break;
825
826	case DFmode:
827	  switch (to_mode)
828	    {
829	    case SFmode:
830	      libcall = truncdfsf2_libfunc;
831	      break;
832
833	    case XFmode:
834	      libcall = extenddfxf2_libfunc;
835	      break;
836
837	    case TFmode:
838	      libcall = extenddftf2_libfunc;
839	      break;
840	    }
841	  break;
842
843	case XFmode:
844	  switch (to_mode)
845	    {
846	    case SFmode:
847	      libcall = truncxfsf2_libfunc;
848	      break;
849
850	    case DFmode:
851	      libcall = truncxfdf2_libfunc;
852	      break;
853	    }
854	  break;
855
856	case TFmode:
857	  switch (to_mode)
858	    {
859	    case SFmode:
860	      libcall = trunctfsf2_libfunc;
861	      break;
862
863	    case DFmode:
864	      libcall = trunctfdf2_libfunc;
865	      break;
866	    }
867	  break;
868	}
869
870      if (libcall == (rtx) 0)
871	/* This conversion is not implemented yet.  */
872	abort ();
873
874      value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
875				       1, from, from_mode);
876      emit_move_insn (to, value);
877      return;
878    }
879
880  /* Now both modes are integers.  */
881
882  /* Handle expanding beyond a word.  */
883  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
884      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
885    {
886      rtx insns;
887      rtx lowpart;
888      rtx fill_value;
889      rtx lowfrom;
890      int i;
891      enum machine_mode lowpart_mode;
892      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
893
894      /* Try converting directly if the insn is supported.  */
895      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
896	  != CODE_FOR_nothing)
897	{
898	  /* If FROM is a SUBREG, put it into a register.  Do this
899	     so that we always generate the same set of insns for
900	     better cse'ing; if an intermediate assignment occurred,
901	     we won't be doing the operation directly on the SUBREG.  */
902	  if (optimize > 0 && GET_CODE (from) == SUBREG)
903	    from = force_reg (from_mode, from);
904	  emit_unop_insn (code, to, from, equiv_code);
905	  return;
906	}
907      /* Next, try converting via full word.  */
908      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
909	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
910		   != CODE_FOR_nothing))
911	{
912	  if (GET_CODE (to) == REG)
913	    emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
914	  convert_move (gen_lowpart (word_mode, to), from, unsignedp);
915	  emit_unop_insn (code, to,
916			  gen_lowpart (word_mode, to), equiv_code);
917	  return;
918	}
919
920      /* No special multiword conversion insn; do it by hand.  */
921      start_sequence ();
922
923      /* Since we will turn this into a no conflict block, we must ensure
924	 that the source does not overlap the target.  */
925
926      if (reg_overlap_mentioned_p (to, from))
927	from = force_reg (from_mode, from);
928
929      /* Get a copy of FROM widened to a word, if necessary.  */
930      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
931	lowpart_mode = word_mode;
932      else
933	lowpart_mode = from_mode;
934
935      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
936
937      lowpart = gen_lowpart (lowpart_mode, to);
938      emit_move_insn (lowpart, lowfrom);
939
940      /* Compute the value to put in each remaining word.  */
941      if (unsignedp)
942	fill_value = const0_rtx;
943      else
944	{
945#ifdef HAVE_slt
946	  if (HAVE_slt
947	      && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
948	      && STORE_FLAG_VALUE == -1)
949	    {
950	      emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
951			     lowpart_mode, 0, 0);
952	      fill_value = gen_reg_rtx (word_mode);
953	      emit_insn (gen_slt (fill_value));
954	    }
955	  else
956#endif
957	    {
958	      fill_value
959		= expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
960				size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
961				NULL_RTX, 0);
962	      fill_value = convert_to_mode (word_mode, fill_value, 1);
963	    }
964	}
965
966      /* Fill the remaining words.  */
967      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
968	{
969	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
970	  rtx subword = operand_subword (to, index, 1, to_mode);
971
972	  if (subword == 0)
973	    abort ();
974
975	  if (fill_value != subword)
976	    emit_move_insn (subword, fill_value);
977	}
978
979      insns = get_insns ();
980      end_sequence ();
981
982      emit_no_conflict_block (insns, to, from, NULL_RTX,
983			      gen_rtx (equiv_code, to_mode, copy_rtx (from)));
984      return;
985    }
986
987  /* Truncating multi-word to a word or less.  */
988  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
989      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
990    {
991      if (!((GET_CODE (from) == MEM
992	     && ! MEM_VOLATILE_P (from)
993	     && direct_load[(int) to_mode]
994	     && ! mode_dependent_address_p (XEXP (from, 0)))
995	    || GET_CODE (from) == REG
996	    || GET_CODE (from) == SUBREG))
997	from = force_reg (from_mode, from);
998      convert_move (to, gen_lowpart (word_mode, from), 0);
999      return;
1000    }
1001
1002  /* Handle pointer conversion */			/* SPEE 900220 */
1003  if (to_mode == PSImode)
1004    {
1005      if (from_mode != SImode)
1006	from = convert_to_mode (SImode, from, unsignedp);
1007
1008#ifdef HAVE_truncsipsi2
1009      if (HAVE_truncsipsi2)
1010	{
1011	  emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1012	  return;
1013	}
1014#endif /* HAVE_truncsipsi2 */
1015      abort ();
1016    }
1017
1018  if (from_mode == PSImode)
1019    {
1020      if (to_mode != SImode)
1021	{
1022	  from = convert_to_mode (SImode, from, unsignedp);
1023	  from_mode = SImode;
1024	}
1025      else
1026	{
1027#ifdef HAVE_extendpsisi2
1028	  if (HAVE_extendpsisi2)
1029	    {
1030	      emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1031	      return;
1032	    }
1033#endif /* HAVE_extendpsisi2 */
1034	  abort ();
1035	}
1036    }
1037
1038  if (to_mode == PDImode)
1039    {
1040      if (from_mode != DImode)
1041	from = convert_to_mode (DImode, from, unsignedp);
1042
1043#ifdef HAVE_truncdipdi2
1044      if (HAVE_truncdipdi2)
1045	{
1046	  emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1047	  return;
1048	}
1049#endif /* HAVE_truncdipdi2 */
1050      abort ();
1051    }
1052
1053  if (from_mode == PDImode)
1054    {
1055      if (to_mode != DImode)
1056	{
1057	  from = convert_to_mode (DImode, from, unsignedp);
1058	  from_mode = DImode;
1059	}
1060      else
1061	{
1062#ifdef HAVE_extendpdidi2
1063	  if (HAVE_extendpdidi2)
1064	    {
1065	      emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1066	      return;
1067	    }
1068#endif /* HAVE_extendpdidi2 */
1069	  abort ();
1070	}
1071    }
1072
1073  /* Now follow all the conversions between integers
1074     no more than a word long.  */
1075
1076  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
1077  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1078      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1079				GET_MODE_BITSIZE (from_mode)))
1080    {
1081      if (!((GET_CODE (from) == MEM
1082	     && ! MEM_VOLATILE_P (from)
1083	     && direct_load[(int) to_mode]
1084	     && ! mode_dependent_address_p (XEXP (from, 0)))
1085	    || GET_CODE (from) == REG
1086	    || GET_CODE (from) == SUBREG))
1087	from = force_reg (from_mode, from);
1088      if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1089	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1090	from = copy_to_reg (from);
1091      emit_move_insn (to, gen_lowpart (to_mode, from));
1092      return;
1093    }
1094
1095  /* Handle extension.  */
1096  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1097    {
1098      /* Convert directly if that works.  */
1099      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1100	  != CODE_FOR_nothing)
1101	{
1102	  emit_unop_insn (code, to, from, equiv_code);
1103	  return;
1104	}
1105      else
1106	{
1107	  enum machine_mode intermediate;
1108
1109	  /* Search for a mode to convert via.  */
1110	  for (intermediate = from_mode; intermediate != VOIDmode;
1111	       intermediate = GET_MODE_WIDER_MODE (intermediate))
1112	    if (((can_extend_p (to_mode, intermediate, unsignedp)
1113		  != CODE_FOR_nothing)
1114		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1115		     && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1116		&& (can_extend_p (intermediate, from_mode, unsignedp)
1117		    != CODE_FOR_nothing))
1118	      {
1119		convert_move (to, convert_to_mode (intermediate, from,
1120						   unsignedp), unsignedp);
1121		return;
1122	      }
1123
1124	  /* No suitable intermediate mode.  */
1125	  abort ();
1126	}
1127    }
1128
1129  /* Support special truncate insns for certain modes.  */
1130
1131  if (from_mode == DImode && to_mode == SImode)
1132    {
1133#ifdef HAVE_truncdisi2
1134      if (HAVE_truncdisi2)
1135	{
1136	  emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1137	  return;
1138	}
1139#endif
1140      convert_move (to, force_reg (from_mode, from), unsignedp);
1141      return;
1142    }
1143
1144  if (from_mode == DImode && to_mode == HImode)
1145    {
1146#ifdef HAVE_truncdihi2
1147      if (HAVE_truncdihi2)
1148	{
1149	  emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1150	  return;
1151	}
1152#endif
1153      convert_move (to, force_reg (from_mode, from), unsignedp);
1154      return;
1155    }
1156
1157  if (from_mode == DImode && to_mode == QImode)
1158    {
1159#ifdef HAVE_truncdiqi2
1160      if (HAVE_truncdiqi2)
1161	{
1162	  emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1163	  return;
1164	}
1165#endif
1166      convert_move (to, force_reg (from_mode, from), unsignedp);
1167      return;
1168    }
1169
1170  if (from_mode == SImode && to_mode == HImode)
1171    {
1172#ifdef HAVE_truncsihi2
1173      if (HAVE_truncsihi2)
1174	{
1175	  emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1176	  return;
1177	}
1178#endif
1179      convert_move (to, force_reg (from_mode, from), unsignedp);
1180      return;
1181    }
1182
1183  if (from_mode == SImode && to_mode == QImode)
1184    {
1185#ifdef HAVE_truncsiqi2
1186      if (HAVE_truncsiqi2)
1187	{
1188	  emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1189	  return;
1190	}
1191#endif
1192      convert_move (to, force_reg (from_mode, from), unsignedp);
1193      return;
1194    }
1195
1196  if (from_mode == HImode && to_mode == QImode)
1197    {
1198#ifdef HAVE_trunchiqi2
1199      if (HAVE_trunchiqi2)
1200	{
1201	  emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1202	  return;
1203	}
1204#endif
1205      convert_move (to, force_reg (from_mode, from), unsignedp);
1206      return;
1207    }
1208
1209  if (from_mode == TImode && to_mode == DImode)
1210    {
1211#ifdef HAVE_trunctidi2
1212      if (HAVE_trunctidi2)
1213	{
1214	  emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1215	  return;
1216	}
1217#endif
1218      convert_move (to, force_reg (from_mode, from), unsignedp);
1219      return;
1220    }
1221
1222  if (from_mode == TImode && to_mode == SImode)
1223    {
1224#ifdef HAVE_trunctisi2
1225      if (HAVE_trunctisi2)
1226	{
1227	  emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1228	  return;
1229	}
1230#endif
1231      convert_move (to, force_reg (from_mode, from), unsignedp);
1232      return;
1233    }
1234
1235  if (from_mode == TImode && to_mode == HImode)
1236    {
1237#ifdef HAVE_trunctihi2
1238      if (HAVE_trunctihi2)
1239	{
1240	  emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1241	  return;
1242	}
1243#endif
1244      convert_move (to, force_reg (from_mode, from), unsignedp);
1245      return;
1246    }
1247
1248  if (from_mode == TImode && to_mode == QImode)
1249    {
1250#ifdef HAVE_trunctiqi2
1251      if (HAVE_trunctiqi2)
1252	{
1253	  emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1254	  return;
1255	}
1256#endif
1257      convert_move (to, force_reg (from_mode, from), unsignedp);
1258      return;
1259    }
1260
1261  /* Handle truncation of volatile memrefs, and so on;
1262     the things that couldn't be truncated directly,
1263     and for which there was no special instruction.  */
1264  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1265    {
1266      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1267      emit_move_insn (to, temp);
1268      return;
1269    }
1270
1271  /* Mode combination is not recognized.  */
1272  abort ();
1273}
1274
1275/* Return an rtx for a value that would result
1276   from converting X to mode MODE.
1277   Both X and MODE may be floating, or both integer.
1278   UNSIGNEDP is nonzero if X is an unsigned value.
1279   This can be done by referring to a part of X in place
1280   or by copying to a new temporary with conversion.
1281
1282   This function *must not* call protect_from_queue
1283   except when putting X into an insn (in which case convert_move does it).  */
1284
1285rtx
1286convert_to_mode (mode, x, unsignedp)
1287     enum machine_mode mode;
1288     rtx x;
1289     int unsignedp;
1290{
1291  return convert_modes (mode, VOIDmode, x, unsignedp);
1292}
1293
1294/* Return an rtx for a value that would result
1295   from converting X from mode OLDMODE to mode MODE.
1296   Both modes may be floating, or both integer.
1297   UNSIGNEDP is nonzero if X is an unsigned value.
1298
1299   This can be done by referring to a part of X in place
1300   or by copying to a new temporary with conversion.
1301
1302   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1303
1304   This function *must not* call protect_from_queue
1305   except when putting X into an insn (in which case convert_move does it).  */
1306
1307rtx
1308convert_modes (mode, oldmode, x, unsignedp)
1309     enum machine_mode mode, oldmode;
1310     rtx x;
1311     int unsignedp;
1312{
1313  register rtx temp;
1314
1315  /* If FROM is a SUBREG that indicates that we have already done at least
1316     the required extension, strip it.  */
1317
1318  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1319      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1320      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1321    x = gen_lowpart (mode, x);
1322
1323  if (GET_MODE (x) != VOIDmode)
1324    oldmode = GET_MODE (x);
1325
1326  if (mode == oldmode)
1327    return x;
1328
1329  /* There is one case that we must handle specially: If we are converting
1330     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1331     we are to interpret the constant as unsigned, gen_lowpart will do
1332     the wrong if the constant appears negative.  What we want to do is
1333     make the high-order word of the constant zero, not all ones.  */
1334
1335  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1336      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1337      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1338    return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1339
1340  /* We can do this with a gen_lowpart if both desired and current modes
1341     are integer, and this is either a constant integer, a register, or a
1342     non-volatile MEM.  Except for the constant case where MODE is no
1343     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
1344
1345  if ((GET_CODE (x) == CONST_INT
1346       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1347      || (GET_MODE_CLASS (mode) == MODE_INT
1348	  && GET_MODE_CLASS (oldmode) == MODE_INT
1349	  && (GET_CODE (x) == CONST_DOUBLE
1350	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1351		  && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1352		       && direct_load[(int) mode])
1353		      || (GET_CODE (x) == REG
1354			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1355						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
1356    {
1357      /* ?? If we don't know OLDMODE, we have to assume here that
1358	 X does not need sign- or zero-extension.   This may not be
1359	 the case, but it's the best we can do.  */
1360      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1361	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1362	{
1363	  HOST_WIDE_INT val = INTVAL (x);
1364	  int width = GET_MODE_BITSIZE (oldmode);
1365
1366	  /* We must sign or zero-extend in this case.  Start by
1367	     zero-extending, then sign extend if we need to.  */
1368	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
1369	  if (! unsignedp
1370	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1371	    val |= (HOST_WIDE_INT) (-1) << width;
1372
1373	  return GEN_INT (val);
1374	}
1375
1376      return gen_lowpart (mode, x);
1377    }
1378
1379  temp = gen_reg_rtx (mode);
1380  convert_move (temp, x, unsignedp);
1381  return temp;
1382}
1383
1384/* Generate several move instructions to copy LEN bytes
1385   from block FROM to block TO.  (These are MEM rtx's with BLKmode).
1386   The caller must pass FROM and TO
1387    through protect_from_queue before calling.
1388   ALIGN (in bytes) is maximum alignment we can assume.  */
1389
1390static void
1391move_by_pieces (to, from, len, align)
1392     rtx to, from;
1393     int len, align;
1394{
1395  struct move_by_pieces data;
1396  rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1397  int max_size = MOVE_MAX + 1;
1398
1399  data.offset = 0;
1400  data.to_addr = to_addr;
1401  data.from_addr = from_addr;
1402  data.to = to;
1403  data.from = from;
1404  data.autinc_to
1405    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407  data.autinc_from
1408    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1409       || GET_CODE (from_addr) == POST_INC
1410       || GET_CODE (from_addr) == POST_DEC);
1411
1412  data.explicit_inc_from = 0;
1413  data.explicit_inc_to = 0;
1414  data.reverse
1415    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1416  if (data.reverse) data.offset = len;
1417  data.len = len;
1418
1419  data.to_struct = MEM_IN_STRUCT_P (to);
1420  data.from_struct = MEM_IN_STRUCT_P (from);
1421
1422  /* If copying requires more than two move insns,
1423     copy addresses to registers (to make displacements shorter)
1424     and use post-increment if available.  */
1425  if (!(data.autinc_from && data.autinc_to)
1426      && move_by_pieces_ninsns (len, align) > 2)
1427    {
1428#ifdef HAVE_PRE_DECREMENT
1429      if (data.reverse && ! data.autinc_from)
1430	{
1431	  data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1432	  data.autinc_from = 1;
1433	  data.explicit_inc_from = -1;
1434	}
1435#endif
1436#ifdef HAVE_POST_INCREMENT
1437      if (! data.autinc_from)
1438	{
1439	  data.from_addr = copy_addr_to_reg (from_addr);
1440	  data.autinc_from = 1;
1441	  data.explicit_inc_from = 1;
1442	}
1443#endif
1444      if (!data.autinc_from && CONSTANT_P (from_addr))
1445	data.from_addr = copy_addr_to_reg (from_addr);
1446#ifdef HAVE_PRE_DECREMENT
1447      if (data.reverse && ! data.autinc_to)
1448	{
1449	  data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1450	  data.autinc_to = 1;
1451	  data.explicit_inc_to = -1;
1452	}
1453#endif
1454#ifdef HAVE_POST_INCREMENT
1455      if (! data.reverse && ! data.autinc_to)
1456	{
1457	  data.to_addr = copy_addr_to_reg (to_addr);
1458	  data.autinc_to = 1;
1459	  data.explicit_inc_to = 1;
1460	}
1461#endif
1462      if (!data.autinc_to && CONSTANT_P (to_addr))
1463	data.to_addr = copy_addr_to_reg (to_addr);
1464    }
1465
1466  if (! SLOW_UNALIGNED_ACCESS
1467      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1468    align = MOVE_MAX;
1469
1470  /* First move what we can in the largest integer mode, then go to
1471     successively smaller modes.  */
1472
1473  while (max_size > 1)
1474    {
1475      enum machine_mode mode = VOIDmode, tmode;
1476      enum insn_code icode;
1477
1478      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1479	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1480	if (GET_MODE_SIZE (tmode) < max_size)
1481	  mode = tmode;
1482
1483      if (mode == VOIDmode)
1484	break;
1485
1486      icode = mov_optab->handlers[(int) mode].insn_code;
1487      if (icode != CODE_FOR_nothing
1488	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1489			   GET_MODE_SIZE (mode)))
1490	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1491
1492      max_size = GET_MODE_SIZE (mode);
1493    }
1494
1495  /* The code above should have handled everything.  */
1496  if (data.len != 0)
1497    abort ();
1498}
1499
1500/* Return number of insns required to move L bytes by pieces.
1501   ALIGN (in bytes) is maximum alignment we can assume.  */
1502
1503static int
1504move_by_pieces_ninsns (l, align)
1505     unsigned int l;
1506     int align;
1507{
1508  register int n_insns = 0;
1509  int max_size = MOVE_MAX + 1;
1510
1511  if (! SLOW_UNALIGNED_ACCESS
1512      || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1513    align = MOVE_MAX;
1514
1515  while (max_size > 1)
1516    {
1517      enum machine_mode mode = VOIDmode, tmode;
1518      enum insn_code icode;
1519
1520      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1521	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1522	if (GET_MODE_SIZE (tmode) < max_size)
1523	  mode = tmode;
1524
1525      if (mode == VOIDmode)
1526	break;
1527
1528      icode = mov_optab->handlers[(int) mode].insn_code;
1529      if (icode != CODE_FOR_nothing
1530	  && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1531			   GET_MODE_SIZE (mode)))
1532	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1533
1534      max_size = GET_MODE_SIZE (mode);
1535    }
1536
1537  return n_insns;
1538}
1539
1540/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1541   with move instructions for mode MODE.  GENFUN is the gen_... function
1542   to make a move insn for that mode.  DATA has all the other info.  */
1543
1544static void
1545move_by_pieces_1 (genfun, mode, data)
1546     rtx (*genfun) ();
1547     enum machine_mode mode;
1548     struct move_by_pieces *data;
1549{
1550  register int size = GET_MODE_SIZE (mode);
1551  register rtx to1, from1;
1552
1553  while (data->len >= size)
1554    {
1555      if (data->reverse) data->offset -= size;
1556
1557      to1 = (data->autinc_to
1558	     ? gen_rtx (MEM, mode, data->to_addr)
1559	     : change_address (data->to, mode,
1560			       plus_constant (data->to_addr, data->offset)));
1561      MEM_IN_STRUCT_P (to1) = data->to_struct;
1562      from1 =
1563	(data->autinc_from
1564	 ? gen_rtx (MEM, mode, data->from_addr)
1565	 : change_address (data->from, mode,
1566			   plus_constant (data->from_addr, data->offset)));
1567      MEM_IN_STRUCT_P (from1) = data->from_struct;
1568
1569#ifdef HAVE_PRE_DECREMENT
1570      if (data->explicit_inc_to < 0)
1571	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1572      if (data->explicit_inc_from < 0)
1573	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1574#endif
1575
1576      emit_insn ((*genfun) (to1, from1));
1577#ifdef HAVE_POST_INCREMENT
1578      if (data->explicit_inc_to > 0)
1579	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1580      if (data->explicit_inc_from > 0)
1581	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1582#endif
1583
1584      if (! data->reverse) data->offset += size;
1585
1586      data->len -= size;
1587    }
1588}
1589
1590/* Emit code to move a block Y to a block X.
1591   This may be done with string-move instructions,
1592   with multiple scalar move instructions, or with a library call.
1593
1594   Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1595   with mode BLKmode.
1596   SIZE is an rtx that says how long they are.
1597   ALIGN is the maximum alignment we can assume they have,
1598   measured in bytes.  */
1599
1600void
1601emit_block_move (x, y, size, align)
1602     rtx x, y;
1603     rtx size;
1604     int align;
1605{
1606  if (GET_MODE (x) != BLKmode)
1607    abort ();
1608
1609  if (GET_MODE (y) != BLKmode)
1610    abort ();
1611
1612  x = protect_from_queue (x, 1);
1613  y = protect_from_queue (y, 0);
1614  size = protect_from_queue (size, 0);
1615
1616  if (GET_CODE (x) != MEM)
1617    abort ();
1618  if (GET_CODE (y) != MEM)
1619    abort ();
1620  if (size == 0)
1621    abort ();
1622
1623  if (GET_CODE (size) == CONST_INT
1624      && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1625    move_by_pieces (x, y, INTVAL (size), align);
1626  else
1627    {
1628      /* Try the most limited insn first, because there's no point
1629	 including more than one in the machine description unless
1630	 the more limited one has some advantage.  */
1631
1632      rtx opalign = GEN_INT (align);
1633      enum machine_mode mode;
1634
1635      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1636	   mode = GET_MODE_WIDER_MODE (mode))
1637	{
1638	  enum insn_code code = movstr_optab[(int) mode];
1639
1640	  if (code != CODE_FOR_nothing
1641	      /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1642		 here because if SIZE is less than the mode mask, as it is
1643		 returned by the macro, it will definitely be less than the
1644		 actual mode mask.  */
1645	      && ((GET_CODE (size) == CONST_INT
1646		   && ((unsigned HOST_WIDE_INT) INTVAL (size)
1647		       <= GET_MODE_MASK (mode)))
1648		  || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1649	      && (insn_operand_predicate[(int) code][0] == 0
1650		  || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1651	      && (insn_operand_predicate[(int) code][1] == 0
1652		  || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1653	      && (insn_operand_predicate[(int) code][3] == 0
1654		  || (*insn_operand_predicate[(int) code][3]) (opalign,
1655							       VOIDmode)))
1656	    {
1657	      rtx op2;
1658	      rtx last = get_last_insn ();
1659	      rtx pat;
1660
1661	      op2 = convert_to_mode (mode, size, 1);
1662	      if (insn_operand_predicate[(int) code][2] != 0
1663		  && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1664		op2 = copy_to_mode_reg (mode, op2);
1665
1666	      pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1667	      if (pat)
1668		{
1669		  emit_insn (pat);
1670		  return;
1671		}
1672	      else
1673		delete_insns_since (last);
1674	    }
1675	}
1676
1677#ifdef TARGET_MEM_FUNCTIONS
1678      emit_library_call (memcpy_libfunc, 0,
1679			 VOIDmode, 3, XEXP (x, 0), Pmode,
1680			 XEXP (y, 0), Pmode,
1681			 convert_to_mode (TYPE_MODE (sizetype), size,
1682					  TREE_UNSIGNED (sizetype)),
1683			 TYPE_MODE (sizetype));
1684#else
1685      emit_library_call (bcopy_libfunc, 0,
1686			 VOIDmode, 3, XEXP (y, 0), Pmode,
1687			 XEXP (x, 0), Pmode,
1688			 convert_to_mode (TYPE_MODE (sizetype), size,
1689					  TREE_UNSIGNED (sizetype)),
1690			 TYPE_MODE (sizetype));
1691#endif
1692    }
1693}
1694
1695/* Copy all or part of a value X into registers starting at REGNO.
1696   The number of registers to be filled is NREGS.  */
1697
1698void
1699move_block_to_reg (regno, x, nregs, mode)
1700     int regno;
1701     rtx x;
1702     int nregs;
1703     enum machine_mode mode;
1704{
1705  int i;
1706  rtx pat, last;
1707
1708  if (nregs == 0)
1709    return;
1710
1711  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1712    x = validize_mem (force_const_mem (mode, x));
1713
1714  /* See if the machine can do this with a load multiple insn.  */
1715#ifdef HAVE_load_multiple
1716  if (HAVE_load_multiple)
1717    {
1718      last = get_last_insn ();
1719      pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1720			       GEN_INT (nregs));
1721      if (pat)
1722	{
1723	  emit_insn (pat);
1724	  return;
1725	}
1726      else
1727	delete_insns_since (last);
1728    }
1729#endif
1730
1731  for (i = 0; i < nregs; i++)
1732    emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1733		    operand_subword_force (x, i, mode));
1734}
1735
1736/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1737   The number of registers to be filled is NREGS.  SIZE indicates the number
1738   of bytes in the object X.  */
1739
1740
1741void
1742move_block_from_reg (regno, x, nregs, size)
1743     int regno;
1744     rtx x;
1745     int nregs;
1746     int size;
1747{
1748  int i;
1749  rtx pat, last;
1750
1751  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1752     to the left before storing to memory.  */
1753  if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1754    {
1755      rtx tem = operand_subword (x, 0, 1, BLKmode);
1756      rtx shift;
1757
1758      if (tem == 0)
1759	abort ();
1760
1761      shift = expand_shift (LSHIFT_EXPR, word_mode,
1762			    gen_rtx (REG, word_mode, regno),
1763			    build_int_2 ((UNITS_PER_WORD - size)
1764					 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1765      emit_move_insn (tem, shift);
1766      return;
1767    }
1768
1769  /* See if the machine can do this with a store multiple insn.  */
1770#ifdef HAVE_store_multiple
1771  if (HAVE_store_multiple)
1772    {
1773      last = get_last_insn ();
1774      pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1775				GEN_INT (nregs));
1776      if (pat)
1777	{
1778	  emit_insn (pat);
1779	  return;
1780	}
1781      else
1782	delete_insns_since (last);
1783    }
1784#endif
1785
1786  for (i = 0; i < nregs; i++)
1787    {
1788      rtx tem = operand_subword (x, i, 1, BLKmode);
1789
1790      if (tem == 0)
1791	abort ();
1792
1793      emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1794    }
1795}
1796
1797/* Add a USE expression for REG to the (possibly empty) list pointed
1798   to by CALL_FUSAGE.  REG must denote a hard register.  */
1799
1800void
1801use_reg (call_fusage, reg)
1802     rtx *call_fusage, reg;
1803{
1804  if (GET_CODE (reg) != REG
1805      || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1806    abort();
1807
1808  *call_fusage
1809    = gen_rtx (EXPR_LIST, VOIDmode,
1810	       gen_rtx (USE, VOIDmode, reg), *call_fusage);
1811}
1812
1813/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1814   starting at REGNO.  All of these registers must be hard registers.  */
1815
1816void
1817use_regs (call_fusage, regno, nregs)
1818     rtx *call_fusage;
1819     int regno;
1820     int nregs;
1821{
1822  int i;
1823
1824  if (regno + nregs > FIRST_PSEUDO_REGISTER)
1825    abort ();
1826
1827  for (i = 0; i < nregs; i++)
1828    use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1829}
1830
1831/* Write zeros through the storage of OBJECT.
1832   If OBJECT has BLKmode, SIZE is its length in bytes.  */
1833
1834void
1835clear_storage (object, size)
1836     rtx object;
1837     rtx size;
1838{
1839  if (GET_MODE (object) == BLKmode)
1840    {
1841#ifdef TARGET_MEM_FUNCTIONS
1842      emit_library_call (memset_libfunc, 0,
1843			 VOIDmode, 3,
1844			 XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
1845			 convert_to_mode (TYPE_MODE (sizetype),
1846					  size, TREE_UNSIGNED (sizetype)),
1847			 TYPE_MODE (sizetype));
1848#else
1849      emit_library_call (bzero_libfunc, 0,
1850			 VOIDmode, 2,
1851			 XEXP (object, 0), Pmode,
1852			 convert_to_mode (TYPE_MODE (sizetype),
1853					  size, TREE_UNSIGNED (sizetype)),
1854			 TYPE_MODE (sizetype));
1855#endif
1856    }
1857  else
1858    emit_move_insn (object, const0_rtx);
1859}
1860
1861/* Generate code to copy Y into X.
1862   Both Y and X must have the same mode, except that
1863   Y can be a constant with VOIDmode.
1864   This mode cannot be BLKmode; use emit_block_move for that.
1865
1866   Return the last instruction emitted.  */
1867
1868rtx
1869emit_move_insn (x, y)
1870     rtx x, y;
1871{
1872  enum machine_mode mode = GET_MODE (x);
1873
1874  x = protect_from_queue (x, 1);
1875  y = protect_from_queue (y, 0);
1876
1877  if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1878    abort ();
1879
1880  if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1881    y = force_const_mem (mode, y);
1882
1883  /* If X or Y are memory references, verify that their addresses are valid
1884     for the machine.  */
1885  if (GET_CODE (x) == MEM
1886      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1887	   && ! push_operand (x, GET_MODE (x)))
1888	  || (flag_force_addr
1889	      && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1890    x = change_address (x, VOIDmode, XEXP (x, 0));
1891
1892  if (GET_CODE (y) == MEM
1893      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1894	  || (flag_force_addr
1895	      && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1896    y = change_address (y, VOIDmode, XEXP (y, 0));
1897
1898  if (mode == BLKmode)
1899    abort ();
1900
1901  return emit_move_insn_1 (x, y);
1902}
1903
1904/* Low level part of emit_move_insn.
1905   Called just like emit_move_insn, but assumes X and Y
1906   are basically valid.  */
1907
1908rtx
1909emit_move_insn_1 (x, y)
1910     rtx x, y;
1911{
1912  enum machine_mode mode = GET_MODE (x);
1913  enum machine_mode submode;
1914  enum mode_class class = GET_MODE_CLASS (mode);
1915  int i;
1916
1917  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1918    return
1919      emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1920
1921  /* Expand complex moves by moving real part and imag part, if possible.  */
1922  else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1923	   && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1924						    * BITS_PER_UNIT),
1925						   (class == MODE_COMPLEX_INT
1926						    ? MODE_INT : MODE_FLOAT),
1927						   0))
1928	   && (mov_optab->handlers[(int) submode].insn_code
1929	       != CODE_FOR_nothing))
1930    {
1931      /* Don't split destination if it is a stack push.  */
1932      int stack = push_operand (x, GET_MODE (x));
1933      rtx insns;
1934
1935      /* If this is a stack, push the highpart first, so it
1936	 will be in the argument order.
1937
1938	 In that case, change_address is used only to convert
1939	 the mode, not to change the address.  */
1940      if (stack)
1941	{
1942	  /* Note that the real part always precedes the imag part in memory
1943	     regardless of machine's endianness.  */
1944#ifdef STACK_GROWS_DOWNWARD
1945	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1946		     (gen_rtx (MEM, submode, (XEXP (x, 0))),
1947		      gen_imagpart (submode, y)));
1948	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1949		     (gen_rtx (MEM, submode, (XEXP (x, 0))),
1950		      gen_realpart (submode, y)));
1951#else
1952	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1953		     (gen_rtx (MEM, submode, (XEXP (x, 0))),
1954		      gen_realpart (submode, y)));
1955	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1956		     (gen_rtx (MEM, submode, (XEXP (x, 0))),
1957		      gen_imagpart (submode, y)));
1958#endif
1959	}
1960      else
1961	{
1962	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1963		     (gen_realpart (submode, x), gen_realpart (submode, y)));
1964	  emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1965		     (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1966	}
1967
1968      return get_last_insn ();
1969    }
1970
1971  /* This will handle any multi-word mode that lacks a move_insn pattern.
1972     However, you will get better code if you define such patterns,
1973     even if they must turn into multiple assembler instructions.  */
1974  else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1975    {
1976      rtx last_insn = 0;
1977      rtx insns;
1978
1979#ifdef PUSH_ROUNDING
1980
1981      /* If X is a push on the stack, do the push now and replace
1982	 X with a reference to the stack pointer.  */
1983      if (push_operand (x, GET_MODE (x)))
1984	{
1985	  anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1986	  x = change_address (x, VOIDmode, stack_pointer_rtx);
1987	}
1988#endif
1989
1990      /* Show the output dies here.  */
1991      emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1992
1993      for (i = 0;
1994	   i < (GET_MODE_SIZE (mode)  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1995	   i++)
1996	{
1997	  rtx xpart = operand_subword (x, i, 1, mode);
1998	  rtx ypart = operand_subword (y, i, 1, mode);
1999
2000	  /* If we can't get a part of Y, put Y into memory if it is a
2001	     constant.  Otherwise, force it into a register.  If we still
2002	     can't get a part of Y, abort.  */
2003	  if (ypart == 0 && CONSTANT_P (y))
2004	    {
2005	      y = force_const_mem (mode, y);
2006	      ypart = operand_subword (y, i, 1, mode);
2007	    }
2008	  else if (ypart == 0)
2009	    ypart = operand_subword_force (y, i, mode);
2010
2011	  if (xpart == 0 || ypart == 0)
2012	    abort ();
2013
2014	  last_insn = emit_move_insn (xpart, ypart);
2015	}
2016
2017      return last_insn;
2018    }
2019  else
2020    abort ();
2021}
2022
2023/* Pushing data onto the stack.  */
2024
2025/* Push a block of length SIZE (perhaps variable)
2026   and return an rtx to address the beginning of the block.
2027   Note that it is not possible for the value returned to be a QUEUED.
2028   The value may be virtual_outgoing_args_rtx.
2029
2030   EXTRA is the number of bytes of padding to push in addition to SIZE.
2031   BELOW nonzero means this padding comes at low addresses;
2032   otherwise, the padding comes at high addresses.  */
2033
2034rtx
2035push_block (size, extra, below)
2036     rtx size;
2037     int extra, below;
2038{
2039  register rtx temp;
2040
2041  size = convert_modes (Pmode, ptr_mode, size, 1);
2042  if (CONSTANT_P (size))
2043    anti_adjust_stack (plus_constant (size, extra));
2044  else if (GET_CODE (size) == REG && extra == 0)
2045    anti_adjust_stack (size);
2046  else
2047    {
2048      rtx temp = copy_to_mode_reg (Pmode, size);
2049      if (extra != 0)
2050	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2051			     temp, 0, OPTAB_LIB_WIDEN);
2052      anti_adjust_stack (temp);
2053    }
2054
2055#ifdef STACK_GROWS_DOWNWARD
2056  temp = virtual_outgoing_args_rtx;
2057  if (extra != 0 && below)
2058    temp = plus_constant (temp, extra);
2059#else
2060  if (GET_CODE (size) == CONST_INT)
2061    temp = plus_constant (virtual_outgoing_args_rtx,
2062			  - INTVAL (size) - (below ? 0 : extra));
2063  else if (extra != 0 && !below)
2064    temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2065		    negate_rtx (Pmode, plus_constant (size, extra)));
2066  else
2067    temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2068		    negate_rtx (Pmode, size));
2069#endif
2070
2071  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2072}
2073
2074rtx
2075gen_push_operand ()
2076{
2077  return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2078}
2079
2080/* Generate code to push X onto the stack, assuming it has mode MODE and
2081   type TYPE.
2082   MODE is redundant except when X is a CONST_INT (since they don't
2083   carry mode info).
2084   SIZE is an rtx for the size of data to be copied (in bytes),
2085   needed only if X is BLKmode.
2086
2087   ALIGN (in bytes) is maximum alignment we can assume.
2088
2089   If PARTIAL and REG are both nonzero, then copy that many of the first
2090   words of X into registers starting with REG, and push the rest of X.
2091   The amount of space pushed is decreased by PARTIAL words,
2092   rounded *down* to a multiple of PARM_BOUNDARY.
2093   REG must be a hard register in this case.
2094   If REG is zero but PARTIAL is not, take any all others actions for an
2095   argument partially in registers, but do not actually load any
2096   registers.
2097
2098   EXTRA is the amount in bytes of extra space to leave next to this arg.
2099   This is ignored if an argument block has already been allocated.
2100
2101   On a machine that lacks real push insns, ARGS_ADDR is the address of
2102   the bottom of the argument block for this call.  We use indexing off there
2103   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
2104   argument block has not been preallocated.
2105
2106   ARGS_SO_FAR is the size of args previously pushed for this call.  */
2107
2108void
2109emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2110		args_addr, args_so_far)
2111     register rtx x;
2112     enum machine_mode mode;
2113     tree type;
2114     rtx size;
2115     int align;
2116     int partial;
2117     rtx reg;
2118     int extra;
2119     rtx args_addr;
2120     rtx args_so_far;
2121{
2122  rtx xinner;
2123  enum direction stack_direction
2124#ifdef STACK_GROWS_DOWNWARD
2125    = downward;
2126#else
2127    = upward;
2128#endif
2129
2130  /* Decide where to pad the argument: `downward' for below,
2131     `upward' for above, or `none' for don't pad it.
2132     Default is below for small data on big-endian machines; else above.  */
2133  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2134
2135  /* Invert direction if stack is post-update.  */
2136  if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2137    if (where_pad != none)
2138      where_pad = (where_pad == downward ? upward : downward);
2139
2140  xinner = x = protect_from_queue (x, 0);
2141
2142  if (mode == BLKmode)
2143    {
2144      /* Copy a block into the stack, entirely or partially.  */
2145
2146      register rtx temp;
2147      int used = partial * UNITS_PER_WORD;
2148      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2149      int skip;
2150
2151      if (size == 0)
2152	abort ();
2153
2154      used -= offset;
2155
2156      /* USED is now the # of bytes we need not copy to the stack
2157	 because registers will take care of them.  */
2158
2159      if (partial != 0)
2160	xinner = change_address (xinner, BLKmode,
2161				 plus_constant (XEXP (xinner, 0), used));
2162
2163      /* If the partial register-part of the arg counts in its stack size,
2164	 skip the part of stack space corresponding to the registers.
2165	 Otherwise, start copying to the beginning of the stack space,
2166	 by setting SKIP to 0.  */
2167#ifndef REG_PARM_STACK_SPACE
2168      skip = 0;
2169#else
2170      skip = used;
2171#endif
2172
2173#ifdef PUSH_ROUNDING
2174      /* Do it with several push insns if that doesn't take lots of insns
2175	 and if there is no difficulty with push insns that skip bytes
2176	 on the stack for alignment purposes.  */
2177      if (args_addr == 0
2178	  && GET_CODE (size) == CONST_INT
2179	  && skip == 0
2180	  && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2181	      < MOVE_RATIO)
2182	  /* Here we avoid the case of a structure whose weak alignment
2183	     forces many pushes of a small amount of data,
2184	     and such small pushes do rounding that causes trouble.  */
2185	  && ((! SLOW_UNALIGNED_ACCESS)
2186	      || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2187	      || PUSH_ROUNDING (align) == align)
2188	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2189	{
2190	  /* Push padding now if padding above and stack grows down,
2191	     or if padding below and stack grows up.
2192	     But if space already allocated, this has already been done.  */
2193	  if (extra && args_addr == 0
2194	      && where_pad != none && where_pad != stack_direction)
2195	    anti_adjust_stack (GEN_INT (extra));
2196
2197	  move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2198			  INTVAL (size) - used, align);
2199	}
2200      else
2201#endif /* PUSH_ROUNDING */
2202	{
2203	  /* Otherwise make space on the stack and copy the data
2204	     to the address of that space.  */
2205
2206	  /* Deduct words put into registers from the size we must copy.  */
2207	  if (partial != 0)
2208	    {
2209	      if (GET_CODE (size) == CONST_INT)
2210		size = GEN_INT (INTVAL (size) - used);
2211	      else
2212		size = expand_binop (GET_MODE (size), sub_optab, size,
2213				     GEN_INT (used), NULL_RTX, 0,
2214				     OPTAB_LIB_WIDEN);
2215	    }
2216
2217	  /* Get the address of the stack space.
2218	     In this case, we do not deal with EXTRA separately.
2219	     A single stack adjust will do.  */
2220	  if (! args_addr)
2221	    {
2222	      temp = push_block (size, extra, where_pad == downward);
2223	      extra = 0;
2224	    }
2225	  else if (GET_CODE (args_so_far) == CONST_INT)
2226	    temp = memory_address (BLKmode,
2227				   plus_constant (args_addr,
2228						  skip + INTVAL (args_so_far)));
2229	  else
2230	    temp = memory_address (BLKmode,
2231				   plus_constant (gen_rtx (PLUS, Pmode,
2232							   args_addr, args_so_far),
2233						  skip));
2234
2235	  /* TEMP is the address of the block.  Copy the data there.  */
2236	  if (GET_CODE (size) == CONST_INT
2237	      && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2238		  < MOVE_RATIO))
2239	    {
2240	      move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2241			      INTVAL (size), align);
2242	      goto ret;
2243	    }
2244	  /* Try the most limited insn first, because there's no point
2245	     including more than one in the machine description unless
2246	     the more limited one has some advantage.  */
2247#ifdef HAVE_movstrqi
2248	  if (HAVE_movstrqi
2249	      && GET_CODE (size) == CONST_INT
2250	      && ((unsigned) INTVAL (size)
2251		  < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2252	    {
2253	      rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2254				      xinner, size, GEN_INT (align));
2255	      if (pat != 0)
2256		{
2257		  emit_insn (pat);
2258		  goto ret;
2259		}
2260	    }
2261#endif
2262#ifdef HAVE_movstrhi
2263	  if (HAVE_movstrhi
2264	      && GET_CODE (size) == CONST_INT
2265	      && ((unsigned) INTVAL (size)
2266		  < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2267	    {
2268	      rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2269				      xinner, size, GEN_INT (align));
2270	      if (pat != 0)
2271		{
2272		  emit_insn (pat);
2273		  goto ret;
2274		}
2275	    }
2276#endif
2277#ifdef HAVE_movstrsi
2278	  if (HAVE_movstrsi)
2279	    {
2280	      rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2281				      xinner, size, GEN_INT (align));
2282	      if (pat != 0)
2283		{
2284		  emit_insn (pat);
2285		  goto ret;
2286		}
2287	    }
2288#endif
2289#ifdef HAVE_movstrdi
2290	  if (HAVE_movstrdi)
2291	    {
2292	      rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2293				      xinner, size, GEN_INT (align));
2294	      if (pat != 0)
2295		{
2296		  emit_insn (pat);
2297		  goto ret;
2298		}
2299	    }
2300#endif
2301
2302#ifndef ACCUMULATE_OUTGOING_ARGS
2303	  /* If the source is referenced relative to the stack pointer,
2304	     copy it to another register to stabilize it.  We do not need
2305	     to do this if we know that we won't be changing sp.  */
2306
2307	  if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2308	      || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2309	    temp = copy_to_reg (temp);
2310#endif
2311
2312	  /* Make inhibit_defer_pop nonzero around the library call
2313	     to force it to pop the bcopy-arguments right away.  */
2314	  NO_DEFER_POP;
2315#ifdef TARGET_MEM_FUNCTIONS
2316	  emit_library_call (memcpy_libfunc, 0,
2317			     VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2318			     convert_to_mode (TYPE_MODE (sizetype),
2319					      size, TREE_UNSIGNED (sizetype)),
2320			     TYPE_MODE (sizetype));
2321#else
2322	  emit_library_call (bcopy_libfunc, 0,
2323			     VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2324			     convert_to_mode (TYPE_MODE (sizetype),
2325					      size, TREE_UNSIGNED (sizetype)),
2326			     TYPE_MODE (sizetype));
2327#endif
2328	  OK_DEFER_POP;
2329	}
2330    }
2331  else if (partial > 0)
2332    {
2333      /* Scalar partly in registers.  */
2334
2335      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2336      int i;
2337      int not_stack;
2338      /* # words of start of argument
2339	 that we must make space for but need not store.  */
2340      int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2341      int args_offset = INTVAL (args_so_far);
2342      int skip;
2343
2344      /* Push padding now if padding above and stack grows down,
2345	 or if padding below and stack grows up.
2346	 But if space already allocated, this has already been done.  */
2347      if (extra && args_addr == 0
2348	  && where_pad != none && where_pad != stack_direction)
2349	anti_adjust_stack (GEN_INT (extra));
2350
2351      /* If we make space by pushing it, we might as well push
2352	 the real data.  Otherwise, we can leave OFFSET nonzero
2353	 and leave the space uninitialized.  */
2354      if (args_addr == 0)
2355	offset = 0;
2356
2357      /* Now NOT_STACK gets the number of words that we don't need to
2358	 allocate on the stack.  */
2359      not_stack = partial - offset;
2360
2361      /* If the partial register-part of the arg counts in its stack size,
2362	 skip the part of stack space corresponding to the registers.
2363	 Otherwise, start copying to the beginning of the stack space,
2364	 by setting SKIP to 0.  */
2365#ifndef REG_PARM_STACK_SPACE
2366      skip = 0;
2367#else
2368      skip = not_stack;
2369#endif
2370
2371      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2372	x = validize_mem (force_const_mem (mode, x));
2373
2374      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2375	 SUBREGs of such registers are not allowed.  */
2376      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2377	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2378	x = copy_to_reg (x);
2379
2380      /* Loop over all the words allocated on the stack for this arg.  */
2381      /* We can do it by words, because any scalar bigger than a word
2382	 has a size a multiple of a word.  */
2383#ifndef PUSH_ARGS_REVERSED
2384      for (i = not_stack; i < size; i++)
2385#else
2386      for (i = size - 1; i >= not_stack; i--)
2387#endif
2388	if (i >= not_stack + offset)
2389	  emit_push_insn (operand_subword_force (x, i, mode),
2390			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2391			  0, args_addr,
2392			  GEN_INT (args_offset + ((i - not_stack + skip)
2393						  * UNITS_PER_WORD)));
2394    }
2395  else
2396    {
2397      rtx addr;
2398
2399      /* Push padding now if padding above and stack grows down,
2400	 or if padding below and stack grows up.
2401	 But if space already allocated, this has already been done.  */
2402      if (extra && args_addr == 0
2403	  && where_pad != none && where_pad != stack_direction)
2404	anti_adjust_stack (GEN_INT (extra));
2405
2406#ifdef PUSH_ROUNDING
2407      if (args_addr == 0)
2408	addr = gen_push_operand ();
2409      else
2410#endif
2411	if (GET_CODE (args_so_far) == CONST_INT)
2412	  addr
2413	    = memory_address (mode,
2414			      plus_constant (args_addr, INTVAL (args_so_far)));
2415      else
2416	addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2417					      args_so_far));
2418
2419      emit_move_insn (gen_rtx (MEM, mode, addr), x);
2420    }
2421
2422 ret:
2423  /* If part should go in registers, copy that part
2424     into the appropriate registers.  Do this now, at the end,
2425     since mem-to-mem copies above may do function calls.  */
2426  if (partial > 0 && reg != 0)
2427    move_block_to_reg (REGNO (reg), x, partial, mode);
2428
2429  if (extra && args_addr == 0 && where_pad == stack_direction)
2430    anti_adjust_stack (GEN_INT (extra));
2431}
2432
2433/* Expand an assignment that stores the value of FROM into TO.
2434   If WANT_VALUE is nonzero, return an rtx for the value of TO.
2435   (This may contain a QUEUED rtx;
2436   if the value is constant, this rtx is a constant.)
2437   Otherwise, the returned value is NULL_RTX.
2438
2439   SUGGEST_REG is no longer actually used.
2440   It used to mean, copy the value through a register
2441   and return that register, if that is possible.
2442   We now use WANT_VALUE to decide whether to do this.  */
2443
2444rtx
2445expand_assignment (to, from, want_value, suggest_reg)
2446     tree to, from;
2447     int want_value;
2448     int suggest_reg;
2449{
2450  register rtx to_rtx = 0;
2451  rtx result;
2452
2453  /* Don't crash if the lhs of the assignment was erroneous.  */
2454
2455  if (TREE_CODE (to) == ERROR_MARK)
2456    {
2457      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2458      return want_value ? result : NULL_RTX;
2459    }
2460
2461  if (output_bytecode)
2462    {
2463      tree dest_innermost;
2464
2465      bc_expand_expr (from);
2466      bc_emit_instruction (duplicate);
2467
2468      dest_innermost = bc_expand_address (to);
2469
2470      /* Can't deduce from TYPE that we're dealing with a bitfield, so
2471	 take care of it here. */
2472
2473      bc_store_memory (TREE_TYPE (to), dest_innermost);
2474      return NULL;
2475    }
2476
2477  /* Assignment of a structure component needs special treatment
2478     if the structure component's rtx is not simply a MEM.
2479     Assignment of an array element at a constant index, and assignment of
2480     an array element in an unaligned packed structure field, has the same
2481     problem.  */
2482
2483  if (TREE_CODE (to) == COMPONENT_REF
2484      || TREE_CODE (to) == BIT_FIELD_REF
2485      || (TREE_CODE (to) == ARRAY_REF
2486	  && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2487	       && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2488	      || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2489    {
2490      enum machine_mode mode1;
2491      int bitsize;
2492      int bitpos;
2493      tree offset;
2494      int unsignedp;
2495      int volatilep = 0;
2496      tree tem;
2497      int alignment;
2498
2499      push_temp_slots ();
2500      tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2501				      &mode1, &unsignedp, &volatilep);
2502
2503      /* If we are going to use store_bit_field and extract_bit_field,
2504	 make sure to_rtx will be safe for multiple use.  */
2505
2506      if (mode1 == VOIDmode && want_value)
2507	tem = stabilize_reference (tem);
2508
2509      alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2510      to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2511      if (offset != 0)
2512	{
2513	  rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2514
2515	  if (GET_CODE (to_rtx) != MEM)
2516	    abort ();
2517	  to_rtx = change_address (to_rtx, VOIDmode,
2518				   gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2519					    force_reg (ptr_mode, offset_rtx)));
2520	  /* If we have a variable offset, the known alignment
2521	     is only that of the innermost structure containing the field.
2522	     (Actually, we could sometimes do better by using the
2523	     align of an element of the innermost array, but no need.)  */
2524	  if (TREE_CODE (to) == COMPONENT_REF
2525	      || TREE_CODE (to) == BIT_FIELD_REF)
2526	    alignment
2527	      = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2528	}
2529      if (volatilep)
2530	{
2531	  if (GET_CODE (to_rtx) == MEM)
2532	    {
2533	      /* When the offset is zero, to_rtx is the address of the
2534		 structure we are storing into, and hence may be shared.
2535		 We must make a new MEM before setting the volatile bit.  */
2536	      if (offset == 0)
2537		to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2538	      MEM_VOLATILE_P (to_rtx) = 1;
2539	    }
2540#if 0  /* This was turned off because, when a field is volatile
2541	  in an object which is not volatile, the object may be in a register,
2542	  and then we would abort over here.  */
2543	  else
2544	    abort ();
2545#endif
2546	}
2547
2548      result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2549			    (want_value
2550			     /* Spurious cast makes HPUX compiler happy.  */
2551			     ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2552			     : VOIDmode),
2553			    unsignedp,
2554			    /* Required alignment of containing datum.  */
2555			    alignment,
2556			    int_size_in_bytes (TREE_TYPE (tem)));
2557      preserve_temp_slots (result);
2558      free_temp_slots ();
2559      pop_temp_slots ();
2560
2561      /* If the value is meaningful, convert RESULT to the proper mode.
2562	 Otherwise, return nothing.  */
2563      return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2564					  TYPE_MODE (TREE_TYPE (from)),
2565					  result,
2566					  TREE_UNSIGNED (TREE_TYPE (to)))
2567	      : NULL_RTX);
2568    }
2569
2570  /* If the rhs is a function call and its value is not an aggregate,
2571     call the function before we start to compute the lhs.
2572     This is needed for correct code for cases such as
2573     val = setjmp (buf) on machines where reference to val
2574     requires loading up part of an address in a separate insn.
2575
2576     Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2577     a promoted variable where the zero- or sign- extension needs to be done.
2578     Handling this in the normal way is safe because no computation is done
2579     before the call.  */
2580  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2581      && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2582    {
2583      rtx value;
2584
2585      push_temp_slots ();
2586      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2587      if (to_rtx == 0)
2588	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2589
2590      if (GET_MODE (to_rtx) == BLKmode)
2591	{
2592	  int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2593	  emit_block_move (to_rtx, value, expr_size (from), align);
2594	}
2595      else
2596	emit_move_insn (to_rtx, value);
2597      preserve_temp_slots (to_rtx);
2598      free_temp_slots ();
2599      pop_temp_slots ();
2600      return want_value ? to_rtx : NULL_RTX;
2601    }
2602
2603  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
2604     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
2605
2606  if (to_rtx == 0)
2607    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2608
2609  /* Don't move directly into a return register.  */
2610  if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2611    {
2612      rtx temp;
2613
2614      push_temp_slots ();
2615      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2616      emit_move_insn (to_rtx, temp);
2617      preserve_temp_slots (to_rtx);
2618      free_temp_slots ();
2619      pop_temp_slots ();
2620      return want_value ? to_rtx : NULL_RTX;
2621    }
2622
2623  /* In case we are returning the contents of an object which overlaps
2624     the place the value is being stored, use a safe function when copying
2625     a value through a pointer into a structure value return block.  */
2626  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2627      && current_function_returns_struct
2628      && !current_function_returns_pcc_struct)
2629    {
2630      rtx from_rtx, size;
2631
2632      push_temp_slots ();
2633      size = expr_size (from);
2634      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2635
2636#ifdef TARGET_MEM_FUNCTIONS
2637      emit_library_call (memcpy_libfunc, 0,
2638			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2639			 XEXP (from_rtx, 0), Pmode,
2640			 convert_to_mode (TYPE_MODE (sizetype),
2641					  size, TREE_UNSIGNED (sizetype)),
2642			 TYPE_MODE (sizetype));
2643#else
2644      emit_library_call (bcopy_libfunc, 0,
2645			 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2646			 XEXP (to_rtx, 0), Pmode,
2647			 convert_to_mode (TYPE_MODE (sizetype),
2648					  size, TREE_UNSIGNED (sizetype)),
2649			 TYPE_MODE (sizetype));
2650#endif
2651
2652      preserve_temp_slots (to_rtx);
2653      free_temp_slots ();
2654      pop_temp_slots ();
2655      return want_value ? to_rtx : NULL_RTX;
2656    }
2657
2658  /* Compute FROM and store the value in the rtx we got.  */
2659
2660  push_temp_slots ();
2661  result = store_expr (from, to_rtx, want_value);
2662  preserve_temp_slots (result);
2663  free_temp_slots ();
2664  pop_temp_slots ();
2665  return want_value ? result : NULL_RTX;
2666}
2667
2668/* Generate code for computing expression EXP,
2669   and storing the value into TARGET.
2670   TARGET may contain a QUEUED rtx.
2671
2672   If WANT_VALUE is nonzero, return a copy of the value
2673   not in TARGET, so that we can be sure to use the proper
2674   value in a containing expression even if TARGET has something
2675   else stored in it.  If possible, we copy the value through a pseudo
2676   and return that pseudo.  Or, if the value is constant, we try to
2677   return the constant.  In some cases, we return a pseudo
2678   copied *from* TARGET.
2679
2680   If the mode is BLKmode then we may return TARGET itself.
2681   It turns out that in BLKmode it doesn't cause a problem.
2682   because C has no operators that could combine two different
2683   assignments into the same BLKmode object with different values
2684   with no sequence point.  Will other languages need this to
2685   be more thorough?
2686
2687   If WANT_VALUE is 0, we return NULL, to make sure
2688   to catch quickly any cases where the caller uses the value
2689   and fails to set WANT_VALUE.  */
2690
2691rtx
2692store_expr (exp, target, want_value)
2693     register tree exp;
2694     register rtx target;
2695     int want_value;
2696{
2697  register rtx temp;
2698  int dont_return_target = 0;
2699
2700  if (TREE_CODE (exp) == COMPOUND_EXPR)
2701    {
2702      /* Perform first part of compound expression, then assign from second
2703	 part.  */
2704      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2705      emit_queue ();
2706      return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2707    }
2708  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2709    {
2710      /* For conditional expression, get safe form of the target.  Then
2711	 test the condition, doing the appropriate assignment on either
2712	 side.  This avoids the creation of unnecessary temporaries.
2713	 For non-BLKmode, it is more efficient not to do this.  */
2714
2715      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2716
2717      emit_queue ();
2718      target = protect_from_queue (target, 1);
2719
2720      do_pending_stack_adjust ();
2721      NO_DEFER_POP;
2722      jumpifnot (TREE_OPERAND (exp, 0), lab1);
2723      store_expr (TREE_OPERAND (exp, 1), target, 0);
2724      emit_queue ();
2725      emit_jump_insn (gen_jump (lab2));
2726      emit_barrier ();
2727      emit_label (lab1);
2728      store_expr (TREE_OPERAND (exp, 2), target, 0);
2729      emit_queue ();
2730      emit_label (lab2);
2731      OK_DEFER_POP;
2732      return want_value ? target : NULL_RTX;
2733    }
2734  else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2735	   && GET_MODE (target) != BLKmode)
2736    /* If target is in memory and caller wants value in a register instead,
2737       arrange that.  Pass TARGET as target for expand_expr so that,
2738       if EXP is another assignment, WANT_VALUE will be nonzero for it.
2739       We know expand_expr will not use the target in that case.
2740       Don't do this if TARGET is volatile because we are supposed
2741       to write it and then read it.  */
2742    {
2743      temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2744			  GET_MODE (target), 0);
2745      if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2746	temp = copy_to_reg (temp);
2747      dont_return_target = 1;
2748    }
2749  else if (queued_subexp_p (target))
2750    /* If target contains a postincrement, let's not risk
2751       using it as the place to generate the rhs.  */
2752    {
2753      if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2754	{
2755	  /* Expand EXP into a new pseudo.  */
2756	  temp = gen_reg_rtx (GET_MODE (target));
2757	  temp = expand_expr (exp, temp, GET_MODE (target), 0);
2758	}
2759      else
2760	temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2761
2762      /* If target is volatile, ANSI requires accessing the value
2763	 *from* the target, if it is accessed.  So make that happen.
2764	 In no case return the target itself.  */
2765      if (! MEM_VOLATILE_P (target) && want_value)
2766	dont_return_target = 1;
2767    }
2768  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2769    /* If this is an scalar in a register that is stored in a wider mode
2770       than the declared mode, compute the result into its declared mode
2771       and then convert to the wider mode.  Our value is the computed
2772       expression.  */
2773    {
2774      /* If we don't want a value, we can do the conversion inside EXP,
2775	 which will often result in some optimizations.  Do the conversion
2776	 in two steps: first change the signedness, if needed, then
2777	 the extend.  */
2778      if (! want_value)
2779	{
2780	  if (TREE_UNSIGNED (TREE_TYPE (exp))
2781	      != SUBREG_PROMOTED_UNSIGNED_P (target))
2782	    exp
2783	      = convert
2784		(signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
2785					  TREE_TYPE (exp)),
2786		 exp);
2787
2788	  exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2789					SUBREG_PROMOTED_UNSIGNED_P (target)),
2790			 exp);
2791	}
2792
2793      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2794
2795      /* If TEMP is a volatile MEM and we want a result value, make
2796	 the access now so it gets done only once.  Likewise if
2797	 it contains TARGET.  */
2798      if (GET_CODE (temp) == MEM && want_value
2799	  && (MEM_VOLATILE_P (temp)
2800	      || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
2801	temp = copy_to_reg (temp);
2802
2803      /* If TEMP is a VOIDmode constant, use convert_modes to make
2804	 sure that we properly convert it.  */
2805      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2806	temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2807			      TYPE_MODE (TREE_TYPE (exp)), temp,
2808			      SUBREG_PROMOTED_UNSIGNED_P (target));
2809
2810      convert_move (SUBREG_REG (target), temp,
2811		    SUBREG_PROMOTED_UNSIGNED_P (target));
2812      return want_value ? temp : NULL_RTX;
2813    }
2814  else
2815    {
2816      temp = expand_expr (exp, target, GET_MODE (target), 0);
2817      /* Return TARGET if it's a specified hardware register.
2818	 If TARGET is a volatile mem ref, either return TARGET
2819	 or return a reg copied *from* TARGET; ANSI requires this.
2820
2821	 Otherwise, if TEMP is not TARGET, return TEMP
2822	 if it is constant (for efficiency),
2823	 or if we really want the correct value.  */
2824      if (!(target && GET_CODE (target) == REG
2825	    && REGNO (target) < FIRST_PSEUDO_REGISTER)
2826	  && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2827	  && temp != target
2828	  && (CONSTANT_P (temp) || want_value))
2829	dont_return_target = 1;
2830    }
2831
2832  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2833     the same as that of TARGET, adjust the constant.  This is needed, for
2834     example, in case it is a CONST_DOUBLE and we want only a word-sized
2835     value.  */
2836  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2837      && TREE_CODE (exp) != ERROR_MARK
2838      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2839    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2840			  temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2841
2842  /* If value was not generated in the target, store it there.
2843     Convert the value to TARGET's type first if nec.  */
2844
2845  if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2846    {
2847      target = protect_from_queue (target, 1);
2848      if (GET_MODE (temp) != GET_MODE (target)
2849	  && GET_MODE (temp) != VOIDmode)
2850	{
2851	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2852	  if (dont_return_target)
2853	    {
2854	      /* In this case, we will return TEMP,
2855		 so make sure it has the proper mode.
2856		 But don't forget to store the value into TARGET.  */
2857	      temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2858	      emit_move_insn (target, temp);
2859	    }
2860	  else
2861	    convert_move (target, temp, unsignedp);
2862	}
2863
2864      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2865	{
2866	  /* Handle copying a string constant into an array.
2867	     The string constant may be shorter than the array.
2868	     So copy just the string's actual length, and clear the rest.  */
2869	  rtx size;
2870	  rtx addr;
2871
2872	  /* Get the size of the data type of the string,
2873	     which is actually the size of the target.  */
2874	  size = expr_size (exp);
2875	  if (GET_CODE (size) == CONST_INT
2876	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
2877	    emit_block_move (target, temp, size,
2878			     TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2879	  else
2880	    {
2881	      /* Compute the size of the data to copy from the string.  */
2882	      tree copy_size
2883		= size_binop (MIN_EXPR,
2884			      make_tree (sizetype, size),
2885			      convert (sizetype,
2886				       build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2887	      rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2888					       VOIDmode, 0);
2889	      rtx label = 0;
2890
2891	      /* Copy that much.  */
2892	      emit_block_move (target, temp, copy_size_rtx,
2893			       TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2894
2895	      /* Figure out how much is left in TARGET that we have to clear.
2896		 Do all calculations in ptr_mode.  */
2897
2898	      addr = XEXP (target, 0);
2899	      addr = convert_modes (ptr_mode, Pmode, addr, 1);
2900
2901	      if (GET_CODE (copy_size_rtx) == CONST_INT)
2902		{
2903		  addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
2904		  size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2905		}
2906	      else
2907		{
2908		  addr = force_reg (ptr_mode, addr);
2909		  addr = expand_binop (ptr_mode, add_optab, addr,
2910				       copy_size_rtx, NULL_RTX, 0,
2911				       OPTAB_LIB_WIDEN);
2912
2913		  size = expand_binop (ptr_mode, sub_optab, size,
2914				       copy_size_rtx, NULL_RTX, 0,
2915				       OPTAB_LIB_WIDEN);
2916
2917		  emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2918				 GET_MODE (size), 0, 0);
2919		  label = gen_label_rtx ();
2920		  emit_jump_insn (gen_blt (label));
2921		}
2922
2923	      if (size != const0_rtx)
2924		{
2925#ifdef TARGET_MEM_FUNCTIONS
2926		  emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2927				     Pmode, const0_rtx, Pmode, size, ptr_mode);
2928#else
2929		  emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2930				     addr, Pmode, size, ptr_mode);
2931#endif
2932		}
2933
2934	      if (label)
2935		emit_label (label);
2936	    }
2937	}
2938      else if (GET_MODE (temp) == BLKmode)
2939	emit_block_move (target, temp, expr_size (exp),
2940			 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2941      else
2942	emit_move_insn (target, temp);
2943    }
2944
2945  /* If we don't want a value, return NULL_RTX.  */
2946  if (! want_value)
2947    return NULL_RTX;
2948
2949  /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2950     ??? The latter test doesn't seem to make sense.  */
2951  else if (dont_return_target && GET_CODE (temp) != MEM)
2952    return temp;
2953
2954  /* Return TARGET itself if it is a hard register.  */
2955  else if (want_value && GET_MODE (target) != BLKmode
2956	   && ! (GET_CODE (target) == REG
2957		 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2958    return copy_to_reg (target);
2959
2960  else
2961    return target;
2962}
2963
2964/* Store the value of constructor EXP into the rtx TARGET.
2965   TARGET is either a REG or a MEM.  */
2966
2967static void
2968store_constructor (exp, target)
2969     tree exp;
2970     rtx target;
2971{
2972  tree type = TREE_TYPE (exp);
2973
2974  /* We know our target cannot conflict, since safe_from_p has been called.  */
2975#if 0
2976  /* Don't try copying piece by piece into a hard register
2977     since that is vulnerable to being clobbered by EXP.
2978     Instead, construct in a pseudo register and then copy it all.  */
2979  if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2980    {
2981      rtx temp = gen_reg_rtx (GET_MODE (target));
2982      store_constructor (exp, temp);
2983      emit_move_insn (target, temp);
2984      return;
2985    }
2986#endif
2987
2988  if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2989      || TREE_CODE (type) == QUAL_UNION_TYPE)
2990    {
2991      register tree elt;
2992
2993      /* Inform later passes that the whole union value is dead.  */
2994      if (TREE_CODE (type) == UNION_TYPE
2995	  || TREE_CODE (type) == QUAL_UNION_TYPE)
2996	emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2997
2998      /* If we are building a static constructor into a register,
2999	 set the initial value as zero so we can fold the value into
3000	 a constant.  But if more than one register is involved,
3001	 this probably loses.  */
3002      else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3003	       && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3004	emit_move_insn (target, const0_rtx);
3005
3006      /* If the constructor has fewer fields than the structure,
3007	 clear the whole structure first.  */
3008      else if (list_length (CONSTRUCTOR_ELTS (exp))
3009	       != list_length (TYPE_FIELDS (type)))
3010	clear_storage (target, expr_size (exp));
3011      else
3012	/* Inform later passes that the old value is dead.  */
3013	emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3014
3015      /* Store each element of the constructor into
3016	 the corresponding field of TARGET.  */
3017
3018      for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3019	{
3020	  register tree field = TREE_PURPOSE (elt);
3021	  register enum machine_mode mode;
3022	  int bitsize;
3023	  int bitpos = 0;
3024	  int unsignedp;
3025	  tree pos, constant = 0, offset = 0;
3026	  rtx to_rtx = target;
3027
3028	  /* Just ignore missing fields.
3029	     We cleared the whole structure, above,
3030	     if any fields are missing.  */
3031	  if (field == 0)
3032	    continue;
3033
3034	  bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3035	  unsignedp = TREE_UNSIGNED (field);
3036	  mode = DECL_MODE (field);
3037	  if (DECL_BIT_FIELD (field))
3038	    mode = VOIDmode;
3039
3040	  pos = DECL_FIELD_BITPOS (field);
3041	  if (TREE_CODE (pos) == INTEGER_CST)
3042	    constant = pos;
3043	  else if (TREE_CODE (pos) == PLUS_EXPR
3044		   && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3045	    constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3046	  else
3047	    offset = pos;
3048
3049	  if (constant)
3050	    bitpos = TREE_INT_CST_LOW (constant);
3051
3052	  if (offset)
3053	    {
3054	      rtx offset_rtx;
3055
3056	      if (contains_placeholder_p (offset))
3057		offset = build (WITH_RECORD_EXPR, sizetype,
3058				offset, exp);
3059
3060	      offset = size_binop (FLOOR_DIV_EXPR, offset,
3061				   size_int (BITS_PER_UNIT));
3062
3063	      offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3064	      if (GET_CODE (to_rtx) != MEM)
3065		abort ();
3066
3067	      to_rtx
3068		= change_address (to_rtx, VOIDmode,
3069				  gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3070					   force_reg (ptr_mode, offset_rtx)));
3071	    }
3072
3073	  store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3074		       /* The alignment of TARGET is
3075			  at least what its type requires.  */
3076		       VOIDmode, 0,
3077		       TYPE_ALIGN (type) / BITS_PER_UNIT,
3078		       int_size_in_bytes (type));
3079	}
3080    }
3081  else if (TREE_CODE (type) == ARRAY_TYPE)
3082    {
3083      register tree elt;
3084      register int i;
3085      tree domain = TYPE_DOMAIN (type);
3086      HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3087      HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3088      tree elttype = TREE_TYPE (type);
3089
3090      /* If the constructor has fewer fields than the structure,
3091	 clear the whole structure first.  Similarly if this this is
3092	 static constructor of a non-BLKmode object.  */
3093
3094      if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3095	  || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3096	clear_storage (target, expr_size (exp));
3097      else
3098	/* Inform later passes that the old value is dead.  */
3099	emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3100
3101      /* Store each element of the constructor into
3102	 the corresponding element of TARGET, determined
3103	 by counting the elements.  */
3104      for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3105	   elt;
3106	   elt = TREE_CHAIN (elt), i++)
3107	{
3108	  register enum machine_mode mode;
3109	  int bitsize;
3110	  int bitpos;
3111	  int unsignedp;
3112	  tree index = TREE_PURPOSE (elt);
3113	  rtx xtarget = target;
3114
3115	  mode = TYPE_MODE (elttype);
3116	  bitsize = GET_MODE_BITSIZE (mode);
3117	  unsignedp = TREE_UNSIGNED (elttype);
3118
3119	  if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3120	      || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3121	    {
3122	      rtx pos_rtx, addr, xtarget;
3123	      tree position;
3124
3125	      if (index == 0)
3126		index = size_int (i);
3127
3128	      position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3129				     size_int (BITS_PER_UNIT));
3130	      position = size_binop (MULT_EXPR, index, position);
3131	      pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3132	      addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3133	      xtarget = change_address (target, mode, addr);
3134	      store_expr (TREE_VALUE (elt), xtarget, 0);
3135	    }
3136	  else
3137	    {
3138	      if (index != 0)
3139		bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3140			  * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3141	      else
3142		bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3143
3144	      store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3145			   /* The alignment of TARGET is
3146			      at least what its type requires.  */
3147			   VOIDmode, 0,
3148			   TYPE_ALIGN (type) / BITS_PER_UNIT,
3149			   int_size_in_bytes (type));
3150	    }
3151	}
3152    }
3153  /* set constructor assignments */
3154  else if (TREE_CODE (type) == SET_TYPE)
3155    {
3156      tree elt;
3157      rtx xtarget = XEXP (target, 0);
3158      int set_word_size = TYPE_ALIGN (type);
3159      int nbytes = int_size_in_bytes (type);
3160      tree non_const_elements;
3161      int need_to_clear_first;
3162      tree domain = TYPE_DOMAIN (type);
3163      tree domain_min, domain_max, bitlength;
3164
3165      /* The default implementation strategy is to extract the constant
3166	 parts of the constructor, use that to initialize the target,
3167	 and then "or" in whatever non-constant ranges we need in addition.
3168
3169	 If a large set is all zero or all ones, it is
3170	 probably better to set it using memset (if available) or bzero.
3171	 Also, if a large set has just a single range, it may also be
3172	 better to first clear all the first clear the set (using
3173	 bzero/memset), and set the bits we want. */
3174
3175      /* Check for all zeros. */
3176      if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3177	{
3178	  clear_storage (target, expr_size (exp));
3179	  return;
3180	}
3181
3182      if (nbytes < 0)
3183	abort ();
3184
3185      domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3186      domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3187      bitlength = size_binop (PLUS_EXPR,
3188			      size_binop (MINUS_EXPR, domain_max, domain_min),
3189			      size_one_node);
3190
3191      /* Check for range all ones, or at most a single range.
3192       (This optimization is only a win for big sets.) */
3193      if (GET_MODE (target) == BLKmode && nbytes > 16
3194	  && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3195	{
3196	  need_to_clear_first = 1;
3197	  non_const_elements = CONSTRUCTOR_ELTS (exp);
3198	}
3199      else
3200	{
3201	  int nbits = nbytes * BITS_PER_UNIT;
3202	  int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3203	  enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3204	  char *bit_buffer = (char*) alloca (nbits);
3205	  HOST_WIDE_INT word = 0;
3206	  int bit_pos = 0;
3207	  int ibit = 0;
3208	  int offset = 0;  /* In bytes from beginning of set. */
3209	  non_const_elements = get_set_constructor_bits (exp,
3210							 bit_buffer, nbits);
3211	  for (;;)
3212	    {
3213	      if (bit_buffer[ibit])
3214		{
3215		  if (BYTES_BIG_ENDIAN)
3216		    word |= (1 << (set_word_size - 1 - bit_pos));
3217		  else
3218		    word |= 1 << bit_pos;
3219		}
3220	      bit_pos++;  ibit++;
3221	      if (bit_pos >= set_word_size || ibit == nbits)
3222		{
3223		  rtx datum = GEN_INT (word);
3224		  rtx to_rtx;
3225		  /* The assumption here is that it is safe to use XEXP if
3226		     the set is multi-word, but not if it's single-word. */
3227		  if (GET_CODE (target) == MEM)
3228		    to_rtx = change_address (target, mode,
3229					     plus_constant (XEXP (target, 0),
3230							    offset));
3231		  else if (offset == 0)
3232		    to_rtx = target;
3233		  else
3234		    abort ();
3235		  emit_move_insn (to_rtx, datum);
3236		  if (ibit == nbits)
3237		    break;
3238		  word = 0;
3239		  bit_pos = 0;
3240		  offset += set_word_size / BITS_PER_UNIT;
3241		}
3242	    }
3243	  need_to_clear_first = 0;
3244	}
3245
3246      for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3247	{
3248	  /* start of range of element or NULL */
3249	  tree startbit = TREE_PURPOSE (elt);
3250	  /* end of range of element, or element value */
3251	  tree endbit   = TREE_VALUE (elt);
3252	  HOST_WIDE_INT startb, endb;
3253	  rtx  bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3254
3255	  bitlength_rtx = expand_expr (bitlength,
3256			    NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3257
3258	  /* handle non-range tuple element like [ expr ]  */
3259	  if (startbit == NULL_TREE)
3260	    {
3261	      startbit = save_expr (endbit);
3262	      endbit = startbit;
3263	    }
3264	  startbit = convert (sizetype, startbit);
3265	  endbit = convert (sizetype, endbit);
3266	  if (! integer_zerop (domain_min))
3267	    {
3268	      startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3269	      endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3270	    }
3271	  startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3272				      EXPAND_CONST_ADDRESS);
3273	  endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3274				    EXPAND_CONST_ADDRESS);
3275
3276	  if (REG_P (target))
3277	    {
3278	      targetx = assign_stack_temp (GET_MODE (target),
3279					   GET_MODE_SIZE (GET_MODE (target)),
3280					   0);
3281	      emit_move_insn (targetx, target);
3282	    }
3283	  else if (GET_CODE (target) == MEM)
3284	    targetx = target;
3285	  else
3286	    abort ();
3287
3288#ifdef TARGET_MEM_FUNCTIONS
3289	  /* Optimization:  If startbit and endbit are
3290	     constants divisible by BITS_PER_UNIT,
3291	     call memset instead. */
3292	  if (TREE_CODE (startbit) == INTEGER_CST
3293	      && TREE_CODE (endbit) == INTEGER_CST
3294	      && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3295	      && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3296	    {
3297
3298	      if (need_to_clear_first
3299		  && endb - startb != nbytes * BITS_PER_UNIT)
3300		clear_storage (target, expr_size (exp));
3301	      need_to_clear_first = 0;
3302	      emit_library_call (memset_libfunc, 0,
3303				 VOIDmode, 3,
3304				 plus_constant (XEXP (targetx, 0), startb),
3305				 Pmode,
3306				 constm1_rtx, Pmode,
3307				 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3308				 Pmode);
3309	    }
3310	  else
3311#endif
3312	    {
3313	      if (need_to_clear_first)
3314		{
3315		  clear_storage (target, expr_size (exp));
3316		  need_to_clear_first = 0;
3317		}
3318	      emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3319				 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3320				 bitlength_rtx, TYPE_MODE (sizetype),
3321				 startbit_rtx, TYPE_MODE (sizetype),
3322				 endbit_rtx, TYPE_MODE (sizetype));
3323	    }
3324	  if (REG_P (target))
3325	    emit_move_insn (target, targetx);
3326	}
3327    }
3328
3329  else
3330    abort ();
3331}
3332
3333/* Store the value of EXP (an expression tree)
3334   into a subfield of TARGET which has mode MODE and occupies
3335   BITSIZE bits, starting BITPOS bits from the start of TARGET.
3336   If MODE is VOIDmode, it means that we are storing into a bit-field.
3337
3338   If VALUE_MODE is VOIDmode, return nothing in particular.
3339   UNSIGNEDP is not used in this case.
3340
3341   Otherwise, return an rtx for the value stored.  This rtx
3342   has mode VALUE_MODE if that is convenient to do.
3343   In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3344
3345   ALIGN is the alignment that TARGET is known to have, measured in bytes.
3346   TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.  */
3347
3348static rtx
3349store_field (target, bitsize, bitpos, mode, exp, value_mode,
3350	     unsignedp, align, total_size)
3351     rtx target;
3352     int bitsize, bitpos;
3353     enum machine_mode mode;
3354     tree exp;
3355     enum machine_mode value_mode;
3356     int unsignedp;
3357     int align;
3358     int total_size;
3359{
3360  HOST_WIDE_INT width_mask = 0;
3361
3362  if (bitsize < HOST_BITS_PER_WIDE_INT)
3363    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3364
3365  /* If we are storing into an unaligned field of an aligned union that is
3366     in a register, we may have the mode of TARGET being an integer mode but
3367     MODE == BLKmode.  In that case, get an aligned object whose size and
3368     alignment are the same as TARGET and store TARGET into it (we can avoid
3369     the store if the field being stored is the entire width of TARGET).  Then
3370     call ourselves recursively to store the field into a BLKmode version of
3371     that object.  Finally, load from the object into TARGET.  This is not
3372     very efficient in general, but should only be slightly more expensive
3373     than the otherwise-required unaligned accesses.  Perhaps this can be
3374     cleaned up later.  */
3375
3376  if (mode == BLKmode
3377      && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3378    {
3379      rtx object = assign_stack_temp (GET_MODE (target),
3380				      GET_MODE_SIZE (GET_MODE (target)), 0);
3381      rtx blk_object = copy_rtx (object);
3382
3383      MEM_IN_STRUCT_P (object) = 1;
3384      MEM_IN_STRUCT_P (blk_object) = 1;
3385      PUT_MODE (blk_object, BLKmode);
3386
3387      if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3388	emit_move_insn (object, target);
3389
3390      store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3391		   align, total_size);
3392
3393      /* Even though we aren't returning target, we need to
3394	 give it the updated value.  */
3395      emit_move_insn (target, object);
3396
3397      return blk_object;
3398    }
3399
3400  /* If the structure is in a register or if the component
3401     is a bit field, we cannot use addressing to access it.
3402     Use bit-field techniques or SUBREG to store in it.  */
3403
3404  if (mode == VOIDmode
3405      || (mode != BLKmode && ! direct_store[(int) mode])
3406      || GET_CODE (target) == REG
3407      || GET_CODE (target) == SUBREG
3408      /* If the field isn't aligned enough to store as an ordinary memref,
3409	 store it as a bit field.  */
3410      || (SLOW_UNALIGNED_ACCESS
3411	  && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3412      || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3413    {
3414      rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3415
3416      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3417	 MODE.  */
3418      if (mode != VOIDmode && mode != BLKmode
3419	  && mode != TYPE_MODE (TREE_TYPE (exp)))
3420	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3421
3422      /* Store the value in the bitfield.  */
3423      store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3424      if (value_mode != VOIDmode)
3425	{
3426	  /* The caller wants an rtx for the value.  */
3427	  /* If possible, avoid refetching from the bitfield itself.  */
3428	  if (width_mask != 0
3429	      && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3430	    {
3431	      tree count;
3432	      enum machine_mode tmode;
3433
3434	      if (unsignedp)
3435		return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3436	      tmode = GET_MODE (temp);
3437	      if (tmode == VOIDmode)
3438		tmode = value_mode;
3439	      count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3440	      temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3441	      return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3442	    }
3443	  return extract_bit_field (target, bitsize, bitpos, unsignedp,
3444				    NULL_RTX, value_mode, 0, align,
3445				    total_size);
3446	}
3447      return const0_rtx;
3448    }
3449  else
3450    {
3451      rtx addr = XEXP (target, 0);
3452      rtx to_rtx;
3453
3454      /* If a value is wanted, it must be the lhs;
3455	 so make the address stable for multiple use.  */
3456
3457      if (value_mode != VOIDmode && GET_CODE (addr) != REG
3458	  && ! CONSTANT_ADDRESS_P (addr)
3459	  /* A frame-pointer reference is already stable.  */
3460	  && ! (GET_CODE (addr) == PLUS
3461		&& GET_CODE (XEXP (addr, 1)) == CONST_INT
3462		&& (XEXP (addr, 0) == virtual_incoming_args_rtx
3463		    || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3464	addr = copy_to_reg (addr);
3465
3466      /* Now build a reference to just the desired component.  */
3467
3468      to_rtx = change_address (target, mode,
3469			       plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3470      MEM_IN_STRUCT_P (to_rtx) = 1;
3471
3472      return store_expr (exp, to_rtx, value_mode != VOIDmode);
3473    }
3474}
3475
3476/* Return true if any object containing the innermost array is an unaligned
3477   packed structure field.  */
3478
3479static int
3480get_inner_unaligned_p (exp)
3481     tree exp;
3482{
3483  int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3484
3485  while (1)
3486    {
3487      if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3488	{
3489	  if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3490	      < needed_alignment)
3491	    return 1;
3492	}
3493      else if (TREE_CODE (exp) != ARRAY_REF
3494	       && TREE_CODE (exp) != NON_LVALUE_EXPR
3495	       && ! ((TREE_CODE (exp) == NOP_EXPR
3496		      || TREE_CODE (exp) == CONVERT_EXPR)
3497		     && (TYPE_MODE (TREE_TYPE (exp))
3498			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3499	break;
3500
3501      exp = TREE_OPERAND (exp, 0);
3502    }
3503
3504  return 0;
3505}
3506
3507/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3508   or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3509   ARRAY_REFs and find the ultimate containing object, which we return.
3510
3511   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3512   bit position, and *PUNSIGNEDP to the signedness of the field.
3513   If the position of the field is variable, we store a tree
3514   giving the variable offset (in units) in *POFFSET.
3515   This offset is in addition to the bit position.
3516   If the position is not variable, we store 0 in *POFFSET.
3517
3518   If any of the extraction expressions is volatile,
3519   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
3520
3521   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
3522   is a mode that can be used to access the field.  In that case, *PBITSIZE
3523   is redundant.
3524
3525   If the field describes a variable-sized object, *PMODE is set to
3526   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
3527   this case, but the address of the object can be found.  */
3528
3529tree
3530get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3531		     punsignedp, pvolatilep)
3532     tree exp;
3533     int *pbitsize;
3534     int *pbitpos;
3535     tree *poffset;
3536     enum machine_mode *pmode;
3537     int *punsignedp;
3538     int *pvolatilep;
3539{
3540  tree orig_exp = exp;
3541  tree size_tree = 0;
3542  enum machine_mode mode = VOIDmode;
3543  tree offset = integer_zero_node;
3544
3545  if (TREE_CODE (exp) == COMPONENT_REF)
3546    {
3547      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3548      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3549	mode = DECL_MODE (TREE_OPERAND (exp, 1));
3550      *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3551    }
3552  else if (TREE_CODE (exp) == BIT_FIELD_REF)
3553    {
3554      size_tree = TREE_OPERAND (exp, 1);
3555      *punsignedp = TREE_UNSIGNED (exp);
3556    }
3557  else
3558    {
3559      mode = TYPE_MODE (TREE_TYPE (exp));
3560      *pbitsize = GET_MODE_BITSIZE (mode);
3561      *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3562    }
3563
3564  if (size_tree)
3565    {
3566      if (TREE_CODE (size_tree) != INTEGER_CST)
3567	mode = BLKmode, *pbitsize = -1;
3568      else
3569	*pbitsize = TREE_INT_CST_LOW (size_tree);
3570    }
3571
3572  /* Compute cumulative bit-offset for nested component-refs and array-refs,
3573     and find the ultimate containing object.  */
3574
3575  *pbitpos = 0;
3576
3577  while (1)
3578    {
3579      if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3580	{
3581	  tree pos = (TREE_CODE (exp) == COMPONENT_REF
3582		      ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3583		      : TREE_OPERAND (exp, 2));
3584	  tree constant = integer_zero_node, var = pos;
3585
3586	  /* If this field hasn't been filled in yet, don't go
3587	     past it.  This should only happen when folding expressions
3588	     made during type construction.  */
3589	  if (pos == 0)
3590	    break;
3591
3592	  /* Assume here that the offset is a multiple of a unit.
3593	     If not, there should be an explicitly added constant.  */
3594	  if (TREE_CODE (pos) == PLUS_EXPR
3595	      && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3596	    constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3597	  else if (TREE_CODE (pos) == INTEGER_CST)
3598	    constant = pos, var = integer_zero_node;
3599
3600	  *pbitpos += TREE_INT_CST_LOW (constant);
3601
3602	  if (var)
3603	    offset = size_binop (PLUS_EXPR, offset,
3604				 size_binop (EXACT_DIV_EXPR, var,
3605					     size_int (BITS_PER_UNIT)));
3606	}
3607
3608      else if (TREE_CODE (exp) == ARRAY_REF)
3609	{
3610	  /* This code is based on the code in case ARRAY_REF in expand_expr
3611	     below.  We assume here that the size of an array element is
3612	     always an integral multiple of BITS_PER_UNIT.  */
3613
3614	  tree index = TREE_OPERAND (exp, 1);
3615	  tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3616	  tree low_bound
3617	    = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3618	  tree index_type = TREE_TYPE (index);
3619
3620	  if (! integer_zerop (low_bound))
3621	    index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3622
3623	  if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
3624	    {
3625	      index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
3626			       index);
3627	      index_type = TREE_TYPE (index);
3628	    }
3629
3630	  index = fold (build (MULT_EXPR, index_type, index,
3631			       TYPE_SIZE (TREE_TYPE (exp))));
3632
3633	  if (TREE_CODE (index) == INTEGER_CST
3634	      && TREE_INT_CST_HIGH (index) == 0)
3635	    *pbitpos += TREE_INT_CST_LOW (index);
3636	  else
3637	    offset = size_binop (PLUS_EXPR, offset,
3638				 size_binop (FLOOR_DIV_EXPR, index,
3639					     size_int (BITS_PER_UNIT)));
3640	}
3641      else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3642	       && ! ((TREE_CODE (exp) == NOP_EXPR
3643		      || TREE_CODE (exp) == CONVERT_EXPR)
3644		     && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3645			   && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3646			       != UNION_TYPE))
3647		     && (TYPE_MODE (TREE_TYPE (exp))
3648			 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3649	break;
3650
3651      /* If any reference in the chain is volatile, the effect is volatile.  */
3652      if (TREE_THIS_VOLATILE (exp))
3653	*pvolatilep = 1;
3654      exp = TREE_OPERAND (exp, 0);
3655    }
3656
3657  /* If this was a bit-field, see if there is a mode that allows direct
3658     access in case EXP is in memory.  */
3659  if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3660    {
3661      mode = mode_for_size (*pbitsize, MODE_INT, 0);
3662      if (mode == BLKmode)
3663	mode = VOIDmode;
3664    }
3665
3666  if (integer_zerop (offset))
3667    offset = 0;
3668
3669  if (offset != 0 && contains_placeholder_p (offset))
3670    offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3671
3672  *pmode = mode;
3673  *poffset = offset;
3674  return exp;
3675}
3676
3677/* Given an rtx VALUE that may contain additions and multiplications,
3678   return an equivalent value that just refers to a register or memory.
3679   This is done by generating instructions to perform the arithmetic
3680   and returning a pseudo-register containing the value.
3681
3682   The returned value may be a REG, SUBREG, MEM or constant.  */
3683
3684rtx
3685force_operand (value, target)
3686     rtx value, target;
3687{
3688  register optab binoptab = 0;
3689  /* Use a temporary to force order of execution of calls to
3690     `force_operand'.  */
3691  rtx tmp;
3692  register rtx op2;
3693  /* Use subtarget as the target for operand 0 of a binary operation.  */
3694  register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3695
3696  if (GET_CODE (value) == PLUS)
3697    binoptab = add_optab;
3698  else if (GET_CODE (value) == MINUS)
3699    binoptab = sub_optab;
3700  else if (GET_CODE (value) == MULT)
3701    {
3702      op2 = XEXP (value, 1);
3703      if (!CONSTANT_P (op2)
3704	  && !(GET_CODE (op2) == REG && op2 != subtarget))
3705	subtarget = 0;
3706      tmp = force_operand (XEXP (value, 0), subtarget);
3707      return expand_mult (GET_MODE (value), tmp,
3708			  force_operand (op2, NULL_RTX),
3709			  target, 0);
3710    }
3711
3712  if (binoptab)
3713    {
3714      op2 = XEXP (value, 1);
3715      if (!CONSTANT_P (op2)
3716	  && !(GET_CODE (op2) == REG && op2 != subtarget))
3717	subtarget = 0;
3718      if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3719	{
3720	  binoptab = add_optab;
3721	  op2 = negate_rtx (GET_MODE (value), op2);
3722	}
3723
3724      /* Check for an addition with OP2 a constant integer and our first
3725	 operand a PLUS of a virtual register and something else.  In that
3726	 case, we want to emit the sum of the virtual register and the
3727	 constant first and then add the other value.  This allows virtual
3728	 register instantiation to simply modify the constant rather than
3729	 creating another one around this addition.  */
3730      if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3731	  && GET_CODE (XEXP (value, 0)) == PLUS
3732	  && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3733	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3734	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3735	{
3736	  rtx temp = expand_binop (GET_MODE (value), binoptab,
3737				   XEXP (XEXP (value, 0), 0), op2,
3738				   subtarget, 0, OPTAB_LIB_WIDEN);
3739	  return expand_binop (GET_MODE (value), binoptab, temp,
3740			       force_operand (XEXP (XEXP (value, 0), 1), 0),
3741			       target, 0, OPTAB_LIB_WIDEN);
3742	}
3743
3744      tmp = force_operand (XEXP (value, 0), subtarget);
3745      return expand_binop (GET_MODE (value), binoptab, tmp,
3746			   force_operand (op2, NULL_RTX),
3747			   target, 0, OPTAB_LIB_WIDEN);
3748      /* We give UNSIGNEDP = 0 to expand_binop
3749	 because the only operations we are expanding here are signed ones.  */
3750    }
3751  return value;
3752}
3753
3754/* Subroutine of expand_expr:
3755   save the non-copied parts (LIST) of an expr (LHS), and return a list
3756   which can restore these values to their previous values,
3757   should something modify their storage.  */
3758
3759static tree
3760save_noncopied_parts (lhs, list)
3761     tree lhs;
3762     tree list;
3763{
3764  tree tail;
3765  tree parts = 0;
3766
3767  for (tail = list; tail; tail = TREE_CHAIN (tail))
3768    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3769      parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3770    else
3771      {
3772	tree part = TREE_VALUE (tail);
3773	tree part_type = TREE_TYPE (part);
3774	tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3775	rtx target = assign_stack_temp (TYPE_MODE (part_type),
3776					int_size_in_bytes (part_type), 0);
3777	MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3778	if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3779	  target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3780	parts = tree_cons (to_be_saved,
3781			   build (RTL_EXPR, part_type, NULL_TREE,
3782				  (tree) target),
3783			   parts);
3784	store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3785      }
3786  return parts;
3787}
3788
3789/* Subroutine of expand_expr:
3790   record the non-copied parts (LIST) of an expr (LHS), and return a list
3791   which specifies the initial values of these parts.  */
3792
3793static tree
3794init_noncopied_parts (lhs, list)
3795     tree lhs;
3796     tree list;
3797{
3798  tree tail;
3799  tree parts = 0;
3800
3801  for (tail = list; tail; tail = TREE_CHAIN (tail))
3802    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3803      parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3804    else
3805      {
3806	tree part = TREE_VALUE (tail);
3807	tree part_type = TREE_TYPE (part);
3808	tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3809	parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3810      }
3811  return parts;
3812}
3813
3814/* Subroutine of expand_expr: return nonzero iff there is no way that
3815   EXP can reference X, which is being modified.  */
3816
3817static int
3818safe_from_p (x, exp)
3819     rtx x;
3820     tree exp;
3821{
3822  rtx exp_rtl = 0;
3823  int i, nops;
3824
3825  if (x == 0
3826      /* If EXP has varying size, we MUST use a target since we currently
3827	 have no way of allocating temporaries of variable size.  So we
3828	 assume here that something at a higher level has prevented a
3829	 clash.  This is somewhat bogus, but the best we can do.  Only
3830	 do this when X is BLKmode.  */
3831      || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3832	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
3833	  && GET_MODE (x) == BLKmode))
3834    return 1;
3835
3836  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3837     find the underlying pseudo.  */
3838  if (GET_CODE (x) == SUBREG)
3839    {
3840      x = SUBREG_REG (x);
3841      if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3842	return 0;
3843    }
3844
3845  /* If X is a location in the outgoing argument area, it is always safe.  */
3846  if (GET_CODE (x) == MEM
3847      && (XEXP (x, 0) == virtual_outgoing_args_rtx
3848	  || (GET_CODE (XEXP (x, 0)) == PLUS
3849	      && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3850    return 1;
3851
3852  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3853    {
3854    case 'd':
3855      exp_rtl = DECL_RTL (exp);
3856      break;
3857
3858    case 'c':
3859      return 1;
3860
3861    case 'x':
3862      if (TREE_CODE (exp) == TREE_LIST)
3863	return ((TREE_VALUE (exp) == 0
3864		 || safe_from_p (x, TREE_VALUE (exp)))
3865		&& (TREE_CHAIN (exp) == 0
3866		    || safe_from_p (x, TREE_CHAIN (exp))));
3867      else
3868	return 0;
3869
3870    case '1':
3871      return safe_from_p (x, TREE_OPERAND (exp, 0));
3872
3873    case '2':
3874    case '<':
3875      return (safe_from_p (x, TREE_OPERAND (exp, 0))
3876	      && safe_from_p (x, TREE_OPERAND (exp, 1)));
3877
3878    case 'e':
3879    case 'r':
3880      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
3881	 the expression.  If it is set, we conflict iff we are that rtx or
3882	 both are in memory.  Otherwise, we check all operands of the
3883	 expression recursively.  */
3884
3885      switch (TREE_CODE (exp))
3886	{
3887	case ADDR_EXPR:
3888	  return (staticp (TREE_OPERAND (exp, 0))
3889		  || safe_from_p (x, TREE_OPERAND (exp, 0)));
3890
3891	case INDIRECT_REF:
3892	  if (GET_CODE (x) == MEM)
3893	    return 0;
3894	  break;
3895
3896	case CALL_EXPR:
3897	  exp_rtl = CALL_EXPR_RTL (exp);
3898	  if (exp_rtl == 0)
3899	    {
3900	      /* Assume that the call will clobber all hard registers and
3901		 all of memory.  */
3902	      if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3903		  || GET_CODE (x) == MEM)
3904		return 0;
3905	    }
3906
3907	  break;
3908
3909	case RTL_EXPR:
3910	  /* If a sequence exists, we would have to scan every instruction
3911	     in the sequence to see if it was safe.  This is probably not
3912	     worthwhile.  */
3913	  if (RTL_EXPR_SEQUENCE (exp))
3914	    return 0;
3915
3916	  exp_rtl = RTL_EXPR_RTL (exp);
3917	  break;
3918
3919	case WITH_CLEANUP_EXPR:
3920	  exp_rtl = RTL_EXPR_RTL (exp);
3921	  break;
3922
3923	case CLEANUP_POINT_EXPR:
3924	  return safe_from_p (x, TREE_OPERAND (exp, 0));
3925
3926	case SAVE_EXPR:
3927	  exp_rtl = SAVE_EXPR_RTL (exp);
3928	  break;
3929
3930	case BIND_EXPR:
3931	  /* The only operand we look at is operand 1.  The rest aren't
3932	     part of the expression.  */
3933	  return safe_from_p (x, TREE_OPERAND (exp, 1));
3934
3935	case METHOD_CALL_EXPR:
3936	  /* This takes a rtx argument, but shouldn't appear here. */
3937	  abort ();
3938	}
3939
3940      /* If we have an rtx, we do not need to scan our operands.  */
3941      if (exp_rtl)
3942	break;
3943
3944      nops = tree_code_length[(int) TREE_CODE (exp)];
3945      for (i = 0; i < nops; i++)
3946	if (TREE_OPERAND (exp, i) != 0
3947	    && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3948	  return 0;
3949    }
3950
3951  /* If we have an rtl, find any enclosed object.  Then see if we conflict
3952     with it.  */
3953  if (exp_rtl)
3954    {
3955      if (GET_CODE (exp_rtl) == SUBREG)
3956	{
3957	  exp_rtl = SUBREG_REG (exp_rtl);
3958	  if (GET_CODE (exp_rtl) == REG
3959	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3960	    return 0;
3961	}
3962
3963      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
3964	 are memory and EXP is not readonly.  */
3965      return ! (rtx_equal_p (x, exp_rtl)
3966		|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3967		    && ! TREE_READONLY (exp)));
3968    }
3969
3970  /* If we reach here, it is safe.  */
3971  return 1;
3972}
3973
3974/* Subroutine of expand_expr: return nonzero iff EXP is an
3975   expression whose type is statically determinable.  */
3976
3977static int
3978fixed_type_p (exp)
3979     tree exp;
3980{
3981  if (TREE_CODE (exp) == PARM_DECL
3982      || TREE_CODE (exp) == VAR_DECL
3983      || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3984      || TREE_CODE (exp) == COMPONENT_REF
3985      || TREE_CODE (exp) == ARRAY_REF)
3986    return 1;
3987  return 0;
3988}
3989
3990/* expand_expr: generate code for computing expression EXP.
3991   An rtx for the computed value is returned.  The value is never null.
3992   In the case of a void EXP, const0_rtx is returned.
3993
3994   The value may be stored in TARGET if TARGET is nonzero.
3995   TARGET is just a suggestion; callers must assume that
3996   the rtx returned may not be the same as TARGET.
3997
3998   If TARGET is CONST0_RTX, it means that the value will be ignored.
3999
4000   If TMODE is not VOIDmode, it suggests generating the
4001   result in mode TMODE.  But this is done only when convenient.
4002   Otherwise, TMODE is ignored and the value generated in its natural mode.
4003   TMODE is just a suggestion; callers must assume that
4004   the rtx returned may not have mode TMODE.
4005
4006   Note that TARGET may have neither TMODE nor MODE.  In that case, it
4007   probably will not be used.
4008
4009   If MODIFIER is EXPAND_SUM then when EXP is an addition
4010   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4011   or a nest of (PLUS ...) and (MINUS ...) where the terms are
4012   products as above, or REG or MEM, or constant.
4013   Ordinarily in such cases we would output mul or add instructions
4014   and then return a pseudo reg containing the sum.
4015
4016   EXPAND_INITIALIZER is much like EXPAND_SUM except that
4017   it also marks a label as absolutely required (it can't be dead).
4018   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4019   This is used for outputting expressions used in initializers.
4020
4021   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4022   with a constant address even if that address is not normally legitimate.
4023   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.  */
4024
4025rtx
4026expand_expr (exp, target, tmode, modifier)
4027     register tree exp;
4028     rtx target;
4029     enum machine_mode tmode;
4030     enum expand_modifier modifier;
4031{
4032  /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4033     This is static so it will be accessible to our recursive callees.  */
4034  static tree placeholder_list = 0;
4035  register rtx op0, op1, temp;
4036  tree type = TREE_TYPE (exp);
4037  int unsignedp = TREE_UNSIGNED (type);
4038  register enum machine_mode mode = TYPE_MODE (type);
4039  register enum tree_code code = TREE_CODE (exp);
4040  optab this_optab;
4041  /* Use subtarget as the target for operand 0 of a binary operation.  */
4042  rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4043  rtx original_target = target;
4044  /* Maybe defer this until sure not doing bytecode?  */
4045  int ignore = (target == const0_rtx
4046		|| ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4047		     || code == CONVERT_EXPR || code == REFERENCE_EXPR
4048		     || code == COND_EXPR)
4049		    && TREE_CODE (type) == VOID_TYPE));
4050  tree context;
4051
4052
4053  if (output_bytecode && modifier != EXPAND_INITIALIZER)
4054    {
4055      bc_expand_expr (exp);
4056      return NULL;
4057    }
4058
4059  /* Don't use hard regs as subtargets, because the combiner
4060     can only handle pseudo regs.  */
4061  if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4062    subtarget = 0;
4063  /* Avoid subtargets inside loops,
4064     since they hide some invariant expressions.  */
4065  if (preserve_subexpressions_p ())
4066    subtarget = 0;
4067
4068  /* If we are going to ignore this result, we need only do something
4069     if there is a side-effect somewhere in the expression.  If there
4070     is, short-circuit the most common cases here.  Note that we must
4071     not call expand_expr with anything but const0_rtx in case this
4072     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
4073
4074  if (ignore)
4075    {
4076      if (! TREE_SIDE_EFFECTS (exp))
4077	return const0_rtx;
4078
4079      /* Ensure we reference a volatile object even if value is ignored.  */
4080      if (TREE_THIS_VOLATILE (exp)
4081	  && TREE_CODE (exp) != FUNCTION_DECL
4082	  && mode != VOIDmode && mode != BLKmode)
4083	{
4084	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4085	  if (GET_CODE (temp) == MEM)
4086	    temp = copy_to_reg (temp);
4087	  return const0_rtx;
4088	}
4089
4090      if (TREE_CODE_CLASS (code) == '1')
4091	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4092			    VOIDmode, modifier);
4093      else if (TREE_CODE_CLASS (code) == '2'
4094	       || TREE_CODE_CLASS (code) == '<')
4095	{
4096	  expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4097	  expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4098	  return const0_rtx;
4099	}
4100      else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4101	       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4102	/* If the second operand has no side effects, just evaluate
4103	   the first. */
4104	return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4105			    VOIDmode, modifier);
4106
4107      target = 0;
4108    }
4109
4110  /* If will do cse, generate all results into pseudo registers
4111     since 1) that allows cse to find more things
4112     and 2) otherwise cse could produce an insn the machine
4113     cannot support.  */
4114
4115  if (! cse_not_expected && mode != BLKmode && target
4116      && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4117    target = subtarget;
4118
4119  switch (code)
4120    {
4121    case LABEL_DECL:
4122      {
4123	tree function = decl_function_context (exp);
4124	/* Handle using a label in a containing function.  */
4125	if (function != current_function_decl && function != 0)
4126	  {
4127	    struct function *p = find_function_data (function);
4128	    /* Allocate in the memory associated with the function
4129	       that the label is in.  */
4130	    push_obstacks (p->function_obstack,
4131			   p->function_maybepermanent_obstack);
4132
4133	    p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4134					label_rtx (exp), p->forced_labels);
4135	    pop_obstacks ();
4136	  }
4137	else if (modifier == EXPAND_INITIALIZER)
4138	  forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4139				   label_rtx (exp), forced_labels);
4140	temp = gen_rtx (MEM, FUNCTION_MODE,
4141			gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4142	if (function != current_function_decl && function != 0)
4143	  LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4144	return temp;
4145      }
4146
4147    case PARM_DECL:
4148      if (DECL_RTL (exp) == 0)
4149	{
4150	  error_with_decl (exp, "prior parameter's size depends on `%s'");
4151	  return CONST0_RTX (mode);
4152	}
4153
4154      /* ... fall through ... */
4155
4156    case VAR_DECL:
4157      /* If a static var's type was incomplete when the decl was written,
4158	 but the type is complete now, lay out the decl now.  */
4159      if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4160	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4161	{
4162	  push_obstacks_nochange ();
4163	  end_temporary_allocation ();
4164	  layout_decl (exp, 0);
4165	  PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4166	  pop_obstacks ();
4167	}
4168
4169      /* ... fall through ... */
4170
4171    case FUNCTION_DECL:
4172    case RESULT_DECL:
4173      if (DECL_RTL (exp) == 0)
4174	abort ();
4175
4176      /* Ensure variable marked as used even if it doesn't go through
4177	 a parser.  If it hasn't be used yet, write out an external
4178	 definition.  */
4179      if (! TREE_USED (exp))
4180	{
4181	  assemble_external (exp);
4182	  TREE_USED (exp) = 1;
4183	}
4184
4185      /* Handle variables inherited from containing functions.  */
4186      context = decl_function_context (exp);
4187
4188      /* We treat inline_function_decl as an alias for the current function
4189	 because that is the inline function whose vars, types, etc.
4190	 are being merged into the current function.
4191	 See expand_inline_function.  */
4192
4193      if (context != 0 && context != current_function_decl
4194	  && context != inline_function_decl
4195	  /* If var is static, we don't need a static chain to access it.  */
4196	  && ! (GET_CODE (DECL_RTL (exp)) == MEM
4197		&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4198	{
4199	  rtx addr;
4200
4201	  /* Mark as non-local and addressable.  */
4202	  DECL_NONLOCAL (exp) = 1;
4203	  mark_addressable (exp);
4204	  if (GET_CODE (DECL_RTL (exp)) != MEM)
4205	    abort ();
4206	  addr = XEXP (DECL_RTL (exp), 0);
4207	  if (GET_CODE (addr) == MEM)
4208	    addr = gen_rtx (MEM, Pmode,
4209			    fix_lexical_addr (XEXP (addr, 0), exp));
4210	  else
4211	    addr = fix_lexical_addr (addr, exp);
4212	  return change_address (DECL_RTL (exp), mode, addr);
4213	}
4214
4215      /* This is the case of an array whose size is to be determined
4216	 from its initializer, while the initializer is still being parsed.
4217	 See expand_decl.  */
4218
4219      if (GET_CODE (DECL_RTL (exp)) == MEM
4220	  && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4221	return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4222			       XEXP (DECL_RTL (exp), 0));
4223
4224      /* If DECL_RTL is memory, we are in the normal case and either
4225	 the address is not valid or it is not a register and -fforce-addr
4226	 is specified, get the address into a register.  */
4227
4228      if (GET_CODE (DECL_RTL (exp)) == MEM
4229	  && modifier != EXPAND_CONST_ADDRESS
4230	  && modifier != EXPAND_SUM
4231	  && modifier != EXPAND_INITIALIZER
4232	  && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4233	      || (flag_force_addr
4234		  && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4235	return change_address (DECL_RTL (exp), VOIDmode,
4236			       copy_rtx (XEXP (DECL_RTL (exp), 0)));
4237
4238      /* If the mode of DECL_RTL does not match that of the decl, it
4239	 must be a promoted value.  We return a SUBREG of the wanted mode,
4240	 but mark it so that we know that it was already extended.  */
4241
4242      if (GET_CODE (DECL_RTL (exp)) == REG
4243	  && GET_MODE (DECL_RTL (exp)) != mode)
4244	{
4245	  /* Get the signedness used for this variable.  Ensure we get the
4246	     same mode we got when the variable was declared.  */
4247	  if (GET_MODE (DECL_RTL (exp))
4248	      != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4249	    abort ();
4250
4251	  temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4252	  SUBREG_PROMOTED_VAR_P (temp) = 1;
4253	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4254	  return temp;
4255	}
4256
4257      return DECL_RTL (exp);
4258
4259    case INTEGER_CST:
4260      return immed_double_const (TREE_INT_CST_LOW (exp),
4261				 TREE_INT_CST_HIGH (exp),
4262				 mode);
4263
4264    case CONST_DECL:
4265      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4266
4267    case REAL_CST:
4268      /* If optimized, generate immediate CONST_DOUBLE
4269	 which will be turned into memory by reload if necessary.
4270
4271	 We used to force a register so that loop.c could see it.  But
4272	 this does not allow gen_* patterns to perform optimizations with
4273	 the constants.  It also produces two insns in cases like "x = 1.0;".
4274	 On most machines, floating-point constants are not permitted in
4275	 many insns, so we'd end up copying it to a register in any case.
4276
4277	 Now, we do the copying in expand_binop, if appropriate.  */
4278      return immed_real_const (exp);
4279
4280    case COMPLEX_CST:
4281    case STRING_CST:
4282      if (! TREE_CST_RTL (exp))
4283	output_constant_def (exp);
4284
4285      /* TREE_CST_RTL probably contains a constant address.
4286	 On RISC machines where a constant address isn't valid,
4287	 make some insns to get that address into a register.  */
4288      if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4289	  && modifier != EXPAND_CONST_ADDRESS
4290	  && modifier != EXPAND_INITIALIZER
4291	  && modifier != EXPAND_SUM
4292	  && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4293	      || (flag_force_addr
4294		  && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4295	return change_address (TREE_CST_RTL (exp), VOIDmode,
4296			       copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4297      return TREE_CST_RTL (exp);
4298
4299    case SAVE_EXPR:
4300      context = decl_function_context (exp);
4301
4302      /* We treat inline_function_decl as an alias for the current function
4303	 because that is the inline function whose vars, types, etc.
4304	 are being merged into the current function.
4305	 See expand_inline_function.  */
4306      if (context == current_function_decl || context == inline_function_decl)
4307	context = 0;
4308
4309      /* If this is non-local, handle it.  */
4310      if (context)
4311	{
4312	  temp = SAVE_EXPR_RTL (exp);
4313	  if (temp && GET_CODE (temp) == REG)
4314	    {
4315	      put_var_into_stack (exp);
4316	      temp = SAVE_EXPR_RTL (exp);
4317	    }
4318	  if (temp == 0 || GET_CODE (temp) != MEM)
4319	    abort ();
4320	  return change_address (temp, mode,
4321				 fix_lexical_addr (XEXP (temp, 0), exp));
4322	}
4323      if (SAVE_EXPR_RTL (exp) == 0)
4324	{
4325	  if (mode == BLKmode)
4326	    {
4327	      temp
4328		= assign_stack_temp (mode, int_size_in_bytes (type), 0);
4329	      MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4330	    }
4331	  else if (mode == VOIDmode)
4332	    temp = const0_rtx;
4333	  else
4334	    temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4335
4336	  SAVE_EXPR_RTL (exp) = temp;
4337	  if (!optimize && GET_CODE (temp) == REG)
4338	    save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4339				      save_expr_regs);
4340
4341	  /* If the mode of TEMP does not match that of the expression, it
4342	     must be a promoted value.  We pass store_expr a SUBREG of the
4343	     wanted mode but mark it so that we know that it was already
4344	     extended.  Note that `unsignedp' was modified above in
4345	     this case.  */
4346
4347	  if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4348	    {
4349	      temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4350	      SUBREG_PROMOTED_VAR_P (temp) = 1;
4351	      SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4352	    }
4353
4354	  if (temp == const0_rtx)
4355	    expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4356	  else
4357	    store_expr (TREE_OPERAND (exp, 0), temp, 0);
4358	}
4359
4360      /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4361	 must be a promoted value.  We return a SUBREG of the wanted mode,
4362	 but mark it so that we know that it was already extended. */
4363
4364      if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4365	  && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4366	{
4367	  /* Compute the signedness and make the proper SUBREG.  */
4368	  promote_mode (type, mode, &unsignedp, 0);
4369	  temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4370	  SUBREG_PROMOTED_VAR_P (temp) = 1;
4371	  SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4372	  return temp;
4373	}
4374
4375      return SAVE_EXPR_RTL (exp);
4376
4377    case PLACEHOLDER_EXPR:
4378      /* If there is an object on the head of the placeholder list,
4379	 see if some object in it's references is of type TYPE.  For
4380	 further information, see tree.def.  */
4381      if (placeholder_list)
4382	{
4383	  tree object;
4384	  tree old_list = placeholder_list;
4385
4386	  for (object = TREE_PURPOSE (placeholder_list);
4387	       (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4388		!= TYPE_MAIN_VARIANT (type))
4389	       && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4390		   || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4391		   || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4392		   || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4393	       object = TREE_OPERAND (object, 0))
4394	    ;
4395
4396	  if (object != 0
4397	      && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4398		  == TYPE_MAIN_VARIANT (type)))
4399	    {
4400	      /* Expand this object skipping the list entries before
4401		 it was found in case it is also a PLACEHOLDER_EXPR.
4402		 In that case, we want to translate it using subsequent
4403		 entries.  */
4404	      placeholder_list = TREE_CHAIN (placeholder_list);
4405	      temp = expand_expr (object, original_target, tmode, modifier);
4406	      placeholder_list = old_list;
4407	      return temp;
4408	    }
4409	}
4410
4411      /* We can't find the object or there was a missing WITH_RECORD_EXPR.  */
4412      abort ();
4413
4414    case WITH_RECORD_EXPR:
4415      /* Put the object on the placeholder list, expand our first operand,
4416	 and pop the list.  */
4417      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4418				    placeholder_list);
4419      target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4420			    tmode, modifier);
4421      placeholder_list = TREE_CHAIN (placeholder_list);
4422      return target;
4423
4424    case EXIT_EXPR:
4425      expand_exit_loop_if_false (NULL_PTR,
4426				 invert_truthvalue (TREE_OPERAND (exp, 0)));
4427      return const0_rtx;
4428
4429    case LOOP_EXPR:
4430      push_temp_slots ();
4431      expand_start_loop (1);
4432      expand_expr_stmt (TREE_OPERAND (exp, 0));
4433      expand_end_loop ();
4434      pop_temp_slots ();
4435
4436      return const0_rtx;
4437
4438    case BIND_EXPR:
4439      {
4440	tree vars = TREE_OPERAND (exp, 0);
4441	int vars_need_expansion = 0;
4442
4443	/* Need to open a binding contour here because
4444	   if there are any cleanups they most be contained here.  */
4445	expand_start_bindings (0);
4446
4447	/* Mark the corresponding BLOCK for output in its proper place.  */
4448	if (TREE_OPERAND (exp, 2) != 0
4449	    && ! TREE_USED (TREE_OPERAND (exp, 2)))
4450	  insert_block (TREE_OPERAND (exp, 2));
4451
4452	/* If VARS have not yet been expanded, expand them now.  */
4453	while (vars)
4454	  {
4455	    if (DECL_RTL (vars) == 0)
4456	      {
4457		vars_need_expansion = 1;
4458		expand_decl (vars);
4459	      }
4460	    expand_decl_init (vars);
4461	    vars = TREE_CHAIN (vars);
4462	  }
4463
4464	temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4465
4466	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4467
4468	return temp;
4469      }
4470
4471    case RTL_EXPR:
4472      if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4473	abort ();
4474      emit_insns (RTL_EXPR_SEQUENCE (exp));
4475      RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4476      preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4477      free_temps_for_rtl_expr (exp);
4478      return RTL_EXPR_RTL (exp);
4479
4480    case CONSTRUCTOR:
4481      /* If we don't need the result, just ensure we evaluate any
4482	 subexpressions.  */
4483      if (ignore)
4484	{
4485	  tree elt;
4486	  for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4487	    expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4488	  return const0_rtx;
4489	}
4490
4491      /* All elts simple constants => refer to a constant in memory.  But
4492	 if this is a non-BLKmode mode, let it store a field at a time
4493	 since that should make a CONST_INT or CONST_DOUBLE when we
4494	 fold.  Likewise, if we have a target we can use, it is best to
4495	 store directly into the target unless the type is large enough
4496	 that memcpy will be used.  If we are making an initializer and
4497	 all operands are constant, put it in memory as well.  */
4498      else if ((TREE_STATIC (exp)
4499		&& ((mode == BLKmode
4500		     && ! (target != 0 && safe_from_p (target, exp)))
4501		    || TREE_ADDRESSABLE (exp)
4502		    || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4503			&& (move_by_pieces_ninsns
4504			    (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4505			     TYPE_ALIGN (type) / BITS_PER_UNIT)
4506			    > MOVE_RATIO))))
4507	       || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4508	{
4509	  rtx constructor = output_constant_def (exp);
4510	  if (modifier != EXPAND_CONST_ADDRESS
4511	      && modifier != EXPAND_INITIALIZER
4512	      && modifier != EXPAND_SUM
4513	      && (! memory_address_p (GET_MODE (constructor),
4514				      XEXP (constructor, 0))
4515		  || (flag_force_addr
4516		      && GET_CODE (XEXP (constructor, 0)) != REG)))
4517	    constructor = change_address (constructor, VOIDmode,
4518					  XEXP (constructor, 0));
4519	  return constructor;
4520	}
4521
4522      else
4523	{
4524	  if (target == 0 || ! safe_from_p (target, exp))
4525	    {
4526	      if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4527		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4528	      else
4529		{
4530		  target
4531		    = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4532		  if (AGGREGATE_TYPE_P (type))
4533		    MEM_IN_STRUCT_P (target) = 1;
4534		}
4535	    }
4536	  store_constructor (exp, target);
4537	  return target;
4538	}
4539
4540    case INDIRECT_REF:
4541      {
4542	tree exp1 = TREE_OPERAND (exp, 0);
4543	tree exp2;
4544
4545	/* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4546	   for  *PTR += ANYTHING  where PTR is put inside the SAVE_EXPR.
4547	   This code has the same general effect as simply doing
4548	   expand_expr on the save expr, except that the expression PTR
4549	   is computed for use as a memory address.  This means different
4550	   code, suitable for indexing, may be generated.  */
4551	if (TREE_CODE (exp1) == SAVE_EXPR
4552	    && SAVE_EXPR_RTL (exp1) == 0
4553	    && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
4554	  {
4555	    temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4556				VOIDmode, EXPAND_SUM);
4557	    op0 = memory_address (mode, temp);
4558	    op0 = copy_all_regs (op0);
4559	    SAVE_EXPR_RTL (exp1) = op0;
4560	  }
4561	else
4562	  {
4563	    op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4564	    op0 = memory_address (mode, op0);
4565	  }
4566
4567	temp = gen_rtx (MEM, mode, op0);
4568	/* If address was computed by addition,
4569	   mark this as an element of an aggregate.  */
4570	if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4571	    || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4572		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4573	    || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4574	    || (TREE_CODE (exp1) == ADDR_EXPR
4575		&& (exp2 = TREE_OPERAND (exp1, 0))
4576		&& AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4577	  MEM_IN_STRUCT_P (temp) = 1;
4578	MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4579
4580	/* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
4581	   here, because, in C and C++, the fact that a location is accessed
4582	   through a pointer to const does not mean that the value there can
4583	   never change.  Languages where it can never change should
4584	   also set TREE_STATIC.  */
4585	RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) | TREE_STATIC (exp);
4586	return temp;
4587      }
4588
4589    case ARRAY_REF:
4590      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4591	abort ();
4592
4593      {
4594	tree array = TREE_OPERAND (exp, 0);
4595	tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4596	tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4597	tree index = TREE_OPERAND (exp, 1);
4598	tree index_type = TREE_TYPE (index);
4599	int i;
4600
4601	if (TREE_CODE (low_bound) != INTEGER_CST
4602	    && contains_placeholder_p (low_bound))
4603	  low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4604
4605	/* Optimize the special-case of a zero lower bound.
4606
4607	   We convert the low_bound to sizetype to avoid some problems
4608	   with constant folding.  (E.g. suppose the lower bound is 1,
4609	   and its mode is QI.  Without the conversion,  (ARRAY
4610	   +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4611	   +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)
4612
4613	   But sizetype isn't quite right either (especially if
4614	   the lowbound is negative).  FIXME */
4615
4616	if (! integer_zerop (low_bound))
4617	  index = fold (build (MINUS_EXPR, index_type, index,
4618			       convert (sizetype, low_bound)));
4619
4620	if ((TREE_CODE (index) != INTEGER_CST
4621	     || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4622	    && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4623	  {
4624	    /* Nonconstant array index or nonconstant element size, and
4625	       not an array in an unaligned (packed) structure field.
4626	       Generate the tree for *(&array+index) and expand that,
4627	       except do it in a language-independent way
4628	       and don't complain about non-lvalue arrays.
4629	       `mark_addressable' should already have been called
4630	       for any array for which this case will be reached.  */
4631
4632	    /* Don't forget the const or volatile flag from the array
4633	       element. */
4634	    tree variant_type = build_type_variant (type,
4635						    TREE_READONLY (exp),
4636						    TREE_THIS_VOLATILE (exp));
4637	    tree array_adr = build1 (ADDR_EXPR,
4638				     build_pointer_type (variant_type), array);
4639	    tree elt;
4640	    tree size = size_in_bytes (type);
4641
4642	    /* Convert the integer argument to a type the same size as sizetype
4643	       so the multiply won't overflow spuriously.  */
4644	    if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4645	      index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4646			       index);
4647
4648	    if (TREE_CODE (size) != INTEGER_CST
4649		&& contains_placeholder_p (size))
4650	      size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4651
4652	    /* Don't think the address has side effects
4653	       just because the array does.
4654	       (In some cases the address might have side effects,
4655	       and we fail to record that fact here.  However, it should not
4656	       matter, since expand_expr should not care.)  */
4657	    TREE_SIDE_EFFECTS (array_adr) = 0;
4658
4659	    elt
4660	      = build1
4661		(INDIRECT_REF, type,
4662		 fold (build (PLUS_EXPR,
4663			      TYPE_POINTER_TO (variant_type),
4664			      array_adr,
4665			      fold
4666			      (build1
4667			       (NOP_EXPR,
4668				TYPE_POINTER_TO (variant_type),
4669				fold (build (MULT_EXPR, TREE_TYPE (index),
4670					     index,
4671					     convert (TREE_TYPE (index),
4672						      size))))))));;
4673
4674	    /* Volatility, etc., of new expression is same as old
4675	       expression.  */
4676	    TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4677	    TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4678	    TREE_READONLY (elt) = TREE_READONLY (exp);
4679
4680	    return expand_expr (elt, target, tmode, modifier);
4681	  }
4682
4683	/* Fold an expression like: "foo"[2].
4684	   This is not done in fold so it won't happen inside &.
4685	   Don't fold if this is for wide characters since it's too
4686	   difficult to do correctly and this is a very rare case.  */
4687
4688	if (TREE_CODE (array) == STRING_CST
4689	    && TREE_CODE (index) == INTEGER_CST
4690	    && !TREE_INT_CST_HIGH (index)
4691	    && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4692	    && GET_MODE_CLASS (mode) == MODE_INT
4693	    && GET_MODE_SIZE (mode) == 1)
4694	  return GEN_INT (TREE_STRING_POINTER (array)[i]);
4695
4696	/* If this is a constant index into a constant array,
4697	   just get the value from the array.  Handle both the cases when
4698	   we have an explicit constructor and when our operand is a variable
4699	   that was declared const.  */
4700
4701	if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4702	  {
4703	    if (TREE_CODE (index) == INTEGER_CST
4704		&& TREE_INT_CST_HIGH (index) == 0)
4705	      {
4706		tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4707
4708		i = TREE_INT_CST_LOW (index);
4709		while (elem && i--)
4710		  elem = TREE_CHAIN (elem);
4711		if (elem)
4712		  return expand_expr (fold (TREE_VALUE (elem)), target,
4713				      tmode, modifier);
4714	      }
4715	  }
4716
4717	else if (optimize >= 1
4718		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4719		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4720		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4721	  {
4722	    if (TREE_CODE (index) == INTEGER_CST
4723		&& TREE_INT_CST_HIGH (index) == 0)
4724	      {
4725		tree init = DECL_INITIAL (array);
4726
4727		i = TREE_INT_CST_LOW (index);
4728		if (TREE_CODE (init) == CONSTRUCTOR)
4729		  {
4730		    tree elem = CONSTRUCTOR_ELTS (init);
4731
4732		    while (elem
4733			   && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4734		      elem = TREE_CHAIN (elem);
4735		    if (elem)
4736		      return expand_expr (fold (TREE_VALUE (elem)), target,
4737					  tmode, modifier);
4738		  }
4739		else if (TREE_CODE (init) == STRING_CST
4740			 && i < TREE_STRING_LENGTH (init))
4741		  return GEN_INT (TREE_STRING_POINTER (init)[i]);
4742	      }
4743	  }
4744      }
4745
4746      /* Treat array-ref with constant index as a component-ref.  */
4747
4748    case COMPONENT_REF:
4749    case BIT_FIELD_REF:
4750      /* If the operand is a CONSTRUCTOR, we can just extract the
4751	 appropriate field if it is present.  Don't do this if we have
4752	 already written the data since we want to refer to that copy
4753	 and varasm.c assumes that's what we'll do.  */
4754      if (code != ARRAY_REF
4755	  && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4756	  && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4757	{
4758	  tree elt;
4759
4760	  for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4761	       elt = TREE_CHAIN (elt))
4762	    if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4763	      return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4764	}
4765
4766      {
4767	enum machine_mode mode1;
4768	int bitsize;
4769	int bitpos;
4770	tree offset;
4771	int volatilep = 0;
4772	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4773					&mode1, &unsignedp, &volatilep);
4774	int alignment;
4775
4776	/* If we got back the original object, something is wrong.  Perhaps
4777	   we are evaluating an expression too early.  In any event, don't
4778	   infinitely recurse.  */
4779	if (tem == exp)
4780	  abort ();
4781
4782	/* In some cases, we will be offsetting OP0's address by a constant.
4783	   So get it as a sum, if possible.  If we will be using it
4784	   directly in an insn, we validate it.
4785
4786	   If TEM's type is a union of variable size, pass TARGET to the inner
4787	   computation, since it will need a temporary and TARGET is known
4788	   to have to do.  This occurs in unchecked conversion in Ada.  */
4789
4790	op0 = expand_expr (tem,
4791			   (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
4792			    && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
4793				!= INTEGER_CST)
4794			    ? target : NULL_RTX),
4795			   VOIDmode, EXPAND_SUM);
4796
4797	/* If this is a constant, put it into a register if it is a
4798	   legitimate constant and memory if it isn't.  */
4799	if (CONSTANT_P (op0))
4800	  {
4801	    enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4802	    if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4803	      op0 = force_reg (mode, op0);
4804	    else
4805	      op0 = validize_mem (force_const_mem (mode, op0));
4806	  }
4807
4808	alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4809	if (offset != 0)
4810	  {
4811	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4812
4813	    if (GET_CODE (op0) != MEM)
4814	      abort ();
4815	    op0 = change_address (op0, VOIDmode,
4816				  gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
4817					   force_reg (ptr_mode, offset_rtx)));
4818	  /* If we have a variable offset, the known alignment
4819	     is only that of the innermost structure containing the field.
4820	     (Actually, we could sometimes do better by using the
4821	     size of an element of the innermost array, but no need.)  */
4822	  if (TREE_CODE (exp) == COMPONENT_REF
4823	      || TREE_CODE (exp) == BIT_FIELD_REF)
4824	    alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4825			 / BITS_PER_UNIT);
4826	  }
4827
4828	/* Don't forget about volatility even if this is a bitfield.  */
4829	if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4830	  {
4831	    op0 = copy_rtx (op0);
4832	    MEM_VOLATILE_P (op0) = 1;
4833	  }
4834
4835	/* In cases where an aligned union has an unaligned object
4836	   as a field, we might be extracting a BLKmode value from
4837	   an integer-mode (e.g., SImode) object.  Handle this case
4838	   by doing the extract into an object as wide as the field
4839	   (which we know to be the width of a basic mode), then
4840	   storing into memory, and changing the mode to BLKmode.  */
4841	if (mode1 == VOIDmode
4842	    || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4843	    || (modifier != EXPAND_CONST_ADDRESS
4844		&& modifier != EXPAND_SUM
4845		&& modifier != EXPAND_INITIALIZER
4846		&& ((mode1 != BLKmode && ! direct_load[(int) mode1])
4847		    /* If the field isn't aligned enough to fetch as a memref,
4848		       fetch it as a bit field.  */
4849		    || (SLOW_UNALIGNED_ACCESS
4850			&& ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4851			    || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
4852	  {
4853	    enum machine_mode ext_mode = mode;
4854
4855	    if (ext_mode == BLKmode)
4856	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4857
4858	    if (ext_mode == BLKmode)
4859	      abort ();
4860
4861	    op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4862				     unsignedp, target, ext_mode, ext_mode,
4863				     alignment,
4864				     int_size_in_bytes (TREE_TYPE (tem)));
4865	    if (mode == BLKmode)
4866	      {
4867		rtx new = assign_stack_temp (ext_mode,
4868					     bitsize / BITS_PER_UNIT, 0);
4869
4870		emit_move_insn (new, op0);
4871		op0 = copy_rtx (new);
4872		PUT_MODE (op0, BLKmode);
4873		MEM_IN_STRUCT_P (op0) = 1;
4874	      }
4875
4876	    return op0;
4877	  }
4878
4879	/* Get a reference to just this component.  */
4880	if (modifier == EXPAND_CONST_ADDRESS
4881	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4882	  op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4883						    (bitpos / BITS_PER_UNIT)));
4884	else
4885	  op0 = change_address (op0, mode1,
4886				plus_constant (XEXP (op0, 0),
4887					       (bitpos / BITS_PER_UNIT)));
4888	MEM_IN_STRUCT_P (op0) = 1;
4889	MEM_VOLATILE_P (op0) |= volatilep;
4890	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4891	  return op0;
4892	if (target == 0)
4893	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4894	convert_move (target, op0, unsignedp);
4895	return target;
4896      }
4897
4898    case OFFSET_REF:
4899      {
4900	tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4901	tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4902	op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4903	temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4904	MEM_IN_STRUCT_P (temp) = 1;
4905	MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4906#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4907	 a location is accessed through a pointer to const does not mean
4908	 that the value there can never change.  */
4909	RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4910#endif
4911	return temp;
4912      }
4913
4914      /* Intended for a reference to a buffer of a file-object in Pascal.
4915	 But it's not certain that a special tree code will really be
4916	 necessary for these.  INDIRECT_REF might work for them.  */
4917    case BUFFER_REF:
4918      abort ();
4919
4920    case IN_EXPR:
4921      {
4922	/* Pascal set IN expression.
4923
4924	   Algorithm:
4925	       rlo       = set_low - (set_low%bits_per_word);
4926	       the_word  = set [ (index - rlo)/bits_per_word ];
4927	       bit_index = index % bits_per_word;
4928	       bitmask   = 1 << bit_index;
4929	       return !!(the_word & bitmask);  */
4930
4931	tree set = TREE_OPERAND (exp, 0);
4932	tree index = TREE_OPERAND (exp, 1);
4933	int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4934	tree set_type = TREE_TYPE (set);
4935	tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4936	tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4937	rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4938	rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4939	rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4940	rtx setval = expand_expr (set, 0, VOIDmode, 0);
4941	rtx setaddr = XEXP (setval, 0);
4942	enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4943	rtx rlow;
4944	rtx diff, quo, rem, addr, bit, result;
4945
4946	preexpand_calls (exp);
4947
4948	/* If domain is empty, answer is no.  Likewise if index is constant
4949	   and out of bounds.  */
4950	if ((TREE_CODE (set_high_bound) == INTEGER_CST
4951	     && TREE_CODE (set_low_bound) == INTEGER_CST
4952	     && tree_int_cst_lt (set_high_bound, set_low_bound)
4953	     || (TREE_CODE (index) == INTEGER_CST
4954		 && TREE_CODE (set_low_bound) == INTEGER_CST
4955		 && tree_int_cst_lt (index, set_low_bound))
4956	     || (TREE_CODE (set_high_bound) == INTEGER_CST
4957		 && TREE_CODE (index) == INTEGER_CST
4958		 && tree_int_cst_lt (set_high_bound, index))))
4959	  return const0_rtx;
4960
4961	if (target == 0)
4962	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4963
4964	/* If we get here, we have to generate the code for both cases
4965	   (in range and out of range).  */
4966
4967	op0 = gen_label_rtx ();
4968	op1 = gen_label_rtx ();
4969
4970	if (! (GET_CODE (index_val) == CONST_INT
4971	       && GET_CODE (lo_r) == CONST_INT))
4972	  {
4973	    emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4974			   GET_MODE (index_val), iunsignedp, 0);
4975	    emit_jump_insn (gen_blt (op1));
4976	  }
4977
4978	if (! (GET_CODE (index_val) == CONST_INT
4979	       && GET_CODE (hi_r) == CONST_INT))
4980	  {
4981	    emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4982			   GET_MODE (index_val), iunsignedp, 0);
4983	    emit_jump_insn (gen_bgt (op1));
4984	  }
4985
4986	/* Calculate the element number of bit zero in the first word
4987	   of the set.  */
4988	if (GET_CODE (lo_r) == CONST_INT)
4989	  rlow = GEN_INT (INTVAL (lo_r)
4990			  & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4991	else
4992	  rlow = expand_binop (index_mode, and_optab, lo_r,
4993			       GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4994			       NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4995
4996	diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4997			     NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4998
4999	quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5000			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5001	rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5002			     GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5003
5004	addr = memory_address (byte_mode,
5005			       expand_binop (index_mode, add_optab, diff,
5006					     setaddr, NULL_RTX, iunsignedp,
5007					     OPTAB_LIB_WIDEN));
5008
5009	/* Extract the bit we want to examine */
5010	bit = expand_shift (RSHIFT_EXPR, byte_mode,
5011			    gen_rtx (MEM, byte_mode, addr),
5012			    make_tree (TREE_TYPE (index), rem),
5013			    NULL_RTX, 1);
5014	result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5015			       GET_MODE (target) == byte_mode ? target : 0,
5016			       1, OPTAB_LIB_WIDEN);
5017
5018	if (result != target)
5019	  convert_move (target, result, 1);
5020
5021	/* Output the code to handle the out-of-range case.  */
5022	emit_jump (op0);
5023	emit_label (op1);
5024	emit_move_insn (target, const0_rtx);
5025	emit_label (op0);
5026	return target;
5027      }
5028
5029    case WITH_CLEANUP_EXPR:
5030      if (RTL_EXPR_RTL (exp) == 0)
5031	{
5032	  RTL_EXPR_RTL (exp)
5033	    = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5034	  cleanups_this_call
5035	    = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5036	  /* That's it for this cleanup.  */
5037	  TREE_OPERAND (exp, 2) = 0;
5038	  (*interim_eh_hook) (NULL_TREE);
5039	}
5040      return RTL_EXPR_RTL (exp);
5041
5042    case CLEANUP_POINT_EXPR:
5043      {
5044	extern int temp_slot_level;
5045	tree old_cleanups = cleanups_this_call;
5046	int old_temp_level = target_temp_slot_level;
5047	push_temp_slots ();
5048	target_temp_slot_level = temp_slot_level;
5049	op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5050	/* If we're going to use this value, load it up now.  */
5051	if (! ignore)
5052	  op0 = force_not_mem (op0);
5053	expand_cleanups_to (old_cleanups);
5054	preserve_temp_slots (op0);
5055	free_temp_slots ();
5056	pop_temp_slots ();
5057	target_temp_slot_level = old_temp_level;
5058      }
5059      return op0;
5060
5061    case CALL_EXPR:
5062      /* Check for a built-in function.  */
5063      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5064	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5065	      == FUNCTION_DECL)
5066	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5067	return expand_builtin (exp, target, subtarget, tmode, ignore);
5068
5069      /* If this call was expanded already by preexpand_calls,
5070	 just return the result we got.  */
5071      if (CALL_EXPR_RTL (exp) != 0)
5072	return CALL_EXPR_RTL (exp);
5073
5074      return expand_call (exp, target, ignore);
5075
5076    case NON_LVALUE_EXPR:
5077    case NOP_EXPR:
5078    case CONVERT_EXPR:
5079    case REFERENCE_EXPR:
5080      if (TREE_CODE (type) == UNION_TYPE)
5081	{
5082	  tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5083	  if (target == 0)
5084	    {
5085	      if (mode == BLKmode)
5086		{
5087		  if (TYPE_SIZE (type) == 0
5088		      || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5089		    abort ();
5090		  target = assign_stack_temp (BLKmode,
5091					      (TREE_INT_CST_LOW (TYPE_SIZE (type))
5092					       + BITS_PER_UNIT - 1)
5093					      / BITS_PER_UNIT, 0);
5094		  MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5095		}
5096	      else
5097		target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5098	    }
5099
5100	  if (GET_CODE (target) == MEM)
5101	    /* Store data into beginning of memory target.  */
5102	    store_expr (TREE_OPERAND (exp, 0),
5103			change_address (target, TYPE_MODE (valtype), 0), 0);
5104
5105	  else if (GET_CODE (target) == REG)
5106	    /* Store this field into a union of the proper type.  */
5107	    store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5108			 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5109			 VOIDmode, 0, 1,
5110			 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5111	  else
5112	    abort ();
5113
5114	  /* Return the entire union.  */
5115	  return target;
5116	}
5117
5118      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5119	{
5120	  op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5121			     modifier);
5122
5123	  /* If the signedness of the conversion differs and OP0 is
5124	     a promoted SUBREG, clear that indication since we now
5125	     have to do the proper extension.  */
5126	  if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5127	      && GET_CODE (op0) == SUBREG)
5128	    SUBREG_PROMOTED_VAR_P (op0) = 0;
5129
5130	  return op0;
5131	}
5132
5133      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5134      if (GET_MODE (op0) == mode)
5135	return op0;
5136
5137      /* If OP0 is a constant, just convert it into the proper mode.  */
5138      if (CONSTANT_P (op0))
5139	return
5140	  convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5141			 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5142
5143      if (modifier == EXPAND_INITIALIZER)
5144	return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5145
5146      if (flag_force_mem && GET_CODE (op0) == MEM)
5147	op0 = copy_to_reg (op0);
5148
5149      if (target == 0)
5150	return
5151	  convert_to_mode (mode, op0,
5152			   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5153      else
5154	convert_move (target, op0,
5155		      TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5156      return target;
5157
5158    case PLUS_EXPR:
5159      /* We come here from MINUS_EXPR when the second operand is a constant. */
5160    plus_expr:
5161      this_optab = add_optab;
5162
5163      /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5164	 something else, make sure we add the register to the constant and
5165	 then to the other thing.  This case can occur during strength
5166	 reduction and doing it this way will produce better code if the
5167	 frame pointer or argument pointer is eliminated.
5168
5169	 fold-const.c will ensure that the constant is always in the inner
5170	 PLUS_EXPR, so the only case we need to do anything about is if
5171	 sp, ap, or fp is our second argument, in which case we must swap
5172	 the innermost first argument and our second argument.  */
5173
5174      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5175	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5176	  && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5177	  && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5178	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5179	      || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5180	{
5181	  tree t = TREE_OPERAND (exp, 1);
5182
5183	  TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5184	  TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5185	}
5186
5187      /* If the result is to be ptr_mode and we are adding an integer to
5188	 something, we might be forming a constant.  So try to use
5189	 plus_constant.  If it produces a sum and we can't accept it,
5190	 use force_operand.  This allows P = &ARR[const] to generate
5191	 efficient code on machines where a SYMBOL_REF is not a valid
5192	 address.
5193
5194	 If this is an EXPAND_SUM call, always return the sum.  */
5195      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5196	  || mode == ptr_mode)
5197	{
5198	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5199	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5200	      && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5201	    {
5202	      op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5203				 EXPAND_SUM);
5204	      op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5205	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5206		op1 = force_operand (op1, target);
5207	      return op1;
5208	    }
5209
5210	  else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5211		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5212		   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5213	    {
5214	      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5215				 EXPAND_SUM);
5216	      if (! CONSTANT_P (op0))
5217		{
5218		  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5219				     VOIDmode, modifier);
5220		  /* Don't go to both_summands if modifier
5221		     says it's not right to return a PLUS.  */
5222		  if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5223		    goto binop2;
5224		  goto both_summands;
5225		}
5226	      op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5227	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5228		op0 = force_operand (op0, target);
5229	      return op0;
5230	    }
5231	}
5232
5233      /* No sense saving up arithmetic to be done
5234	 if it's all in the wrong mode to form part of an address.
5235	 And force_operand won't know whether to sign-extend or
5236	 zero-extend.  */
5237      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5238	  || mode != ptr_mode)
5239	goto binop;
5240
5241      preexpand_calls (exp);
5242      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5243	subtarget = 0;
5244
5245      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5246      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5247
5248    both_summands:
5249      /* Make sure any term that's a sum with a constant comes last.  */
5250      if (GET_CODE (op0) == PLUS
5251	  && CONSTANT_P (XEXP (op0, 1)))
5252	{
5253	  temp = op0;
5254	  op0 = op1;
5255	  op1 = temp;
5256	}
5257      /* If adding to a sum including a constant,
5258	 associate it to put the constant outside.  */
5259      if (GET_CODE (op1) == PLUS
5260	  && CONSTANT_P (XEXP (op1, 1)))
5261	{
5262	  rtx constant_term = const0_rtx;
5263
5264	  temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5265	  if (temp != 0)
5266	    op0 = temp;
5267	  /* Ensure that MULT comes first if there is one.  */
5268	  else if (GET_CODE (op0) == MULT)
5269	    op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5270	  else
5271	    op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5272
5273	  /* Let's also eliminate constants from op0 if possible.  */
5274	  op0 = eliminate_constant_term (op0, &constant_term);
5275
5276	  /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5277	     their sum should be a constant.  Form it into OP1, since the
5278	     result we want will then be OP0 + OP1.  */
5279
5280	  temp = simplify_binary_operation (PLUS, mode, constant_term,
5281					    XEXP (op1, 1));
5282	  if (temp != 0)
5283	    op1 = temp;
5284	  else
5285	    op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5286	}
5287
5288      /* Put a constant term last and put a multiplication first.  */
5289      if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5290	temp = op1, op1 = op0, op0 = temp;
5291
5292      temp = simplify_binary_operation (PLUS, mode, op0, op1);
5293      return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5294
5295    case MINUS_EXPR:
5296      /* For initializers, we are allowed to return a MINUS of two
5297	 symbolic constants.  Here we handle all cases when both operands
5298	 are constant.  */
5299      /* Handle difference of two symbolic constants,
5300	 for the sake of an initializer.  */
5301      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5302	  && really_constant_p (TREE_OPERAND (exp, 0))
5303	  && really_constant_p (TREE_OPERAND (exp, 1)))
5304	{
5305	  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5306				 VOIDmode, modifier);
5307	  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5308				 VOIDmode, modifier);
5309
5310	  /* If the last operand is a CONST_INT, use plus_constant of
5311	     the negated constant.  Else make the MINUS.  */
5312	  if (GET_CODE (op1) == CONST_INT)
5313	    return plus_constant (op0, - INTVAL (op1));
5314	  else
5315	    return gen_rtx (MINUS, mode, op0, op1);
5316	}
5317      /* Convert A - const to A + (-const).  */
5318      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5319	{
5320	  tree negated = fold (build1 (NEGATE_EXPR, type,
5321				       TREE_OPERAND (exp, 1)));
5322
5323	  /* Deal with the case where we can't negate the constant
5324	     in TYPE.  */
5325	  if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5326	    {
5327	      tree newtype = signed_type (type);
5328	      tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5329	      tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5330	      tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5331
5332	      if (! TREE_OVERFLOW (newneg))
5333		return expand_expr (convert (type,
5334					     build (PLUS_EXPR, newtype,
5335						    newop0, newneg)),
5336				    target, tmode, modifier);
5337	    }
5338	  else
5339	    {
5340	      exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5341	      goto plus_expr;
5342	    }
5343	}
5344      this_optab = sub_optab;
5345      goto binop;
5346
5347    case MULT_EXPR:
5348      preexpand_calls (exp);
5349      /* If first operand is constant, swap them.
5350	 Thus the following special case checks need only
5351	 check the second operand.  */
5352      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5353	{
5354	  register tree t1 = TREE_OPERAND (exp, 0);
5355	  TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5356	  TREE_OPERAND (exp, 1) = t1;
5357	}
5358
5359      /* Attempt to return something suitable for generating an
5360	 indexed address, for machines that support that.  */
5361
5362      if (modifier == EXPAND_SUM && mode == ptr_mode
5363	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5364	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5365	{
5366	  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5367
5368	  /* Apply distributive law if OP0 is x+c.  */
5369	  if (GET_CODE (op0) == PLUS
5370	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5371	    return gen_rtx (PLUS, mode,
5372			    gen_rtx (MULT, mode, XEXP (op0, 0),
5373				     GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5374			    GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5375				     * INTVAL (XEXP (op0, 1))));
5376
5377	  if (GET_CODE (op0) != REG)
5378	    op0 = force_operand (op0, NULL_RTX);
5379	  if (GET_CODE (op0) != REG)
5380	    op0 = copy_to_mode_reg (mode, op0);
5381
5382	  return gen_rtx (MULT, mode, op0,
5383			  GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5384	}
5385
5386      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5387	subtarget = 0;
5388
5389      /* Check for multiplying things that have been extended
5390	 from a narrower type.  If this machine supports multiplying
5391	 in that narrower type with a result in the desired type,
5392	 do it that way, and avoid the explicit type-conversion.  */
5393      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5394	  && TREE_CODE (type) == INTEGER_TYPE
5395	  && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5396	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5397	  && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5398	       && int_fits_type_p (TREE_OPERAND (exp, 1),
5399				   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5400	       /* Don't use a widening multiply if a shift will do.  */
5401	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5402		    > HOST_BITS_PER_WIDE_INT)
5403		   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5404	      ||
5405	      (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5406	       && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5407		   ==
5408		   TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5409	       /* If both operands are extended, they must either both
5410		  be zero-extended or both be sign-extended.  */
5411	       && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5412		   ==
5413		   TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5414	{
5415	  enum machine_mode innermode
5416	    = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5417	  this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5418			? umul_widen_optab : smul_widen_optab);
5419	  if (mode == GET_MODE_WIDER_MODE (innermode)
5420	      && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5421	    {
5422	      op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5423				 NULL_RTX, VOIDmode, 0);
5424	      if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5425		op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5426				   VOIDmode, 0);
5427	      else
5428		op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5429				   NULL_RTX, VOIDmode, 0);
5430	      goto binop2;
5431	    }
5432	}
5433      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5434      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5435      return expand_mult (mode, op0, op1, target, unsignedp);
5436
5437    case TRUNC_DIV_EXPR:
5438    case FLOOR_DIV_EXPR:
5439    case CEIL_DIV_EXPR:
5440    case ROUND_DIV_EXPR:
5441    case EXACT_DIV_EXPR:
5442      preexpand_calls (exp);
5443      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5444	subtarget = 0;
5445      /* Possible optimization: compute the dividend with EXPAND_SUM
5446	 then if the divisor is constant can optimize the case
5447	 where some terms of the dividend have coeffs divisible by it.  */
5448      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5449      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5450      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5451
5452    case RDIV_EXPR:
5453      this_optab = flodiv_optab;
5454      goto binop;
5455
5456    case TRUNC_MOD_EXPR:
5457    case FLOOR_MOD_EXPR:
5458    case CEIL_MOD_EXPR:
5459    case ROUND_MOD_EXPR:
5460      preexpand_calls (exp);
5461      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5462	subtarget = 0;
5463      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5464      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5465      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5466
5467    case FIX_ROUND_EXPR:
5468    case FIX_FLOOR_EXPR:
5469    case FIX_CEIL_EXPR:
5470      abort ();			/* Not used for C.  */
5471
5472    case FIX_TRUNC_EXPR:
5473      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5474      if (target == 0)
5475	target = gen_reg_rtx (mode);
5476      expand_fix (target, op0, unsignedp);
5477      return target;
5478
5479    case FLOAT_EXPR:
5480      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5481      if (target == 0)
5482	target = gen_reg_rtx (mode);
5483      /* expand_float can't figure out what to do if FROM has VOIDmode.
5484	 So give it the correct mode.  With -O, cse will optimize this.  */
5485      if (GET_MODE (op0) == VOIDmode)
5486	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5487				op0);
5488      expand_float (target, op0,
5489		    TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5490      return target;
5491
5492    case NEGATE_EXPR:
5493      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5494      temp = expand_unop (mode, neg_optab, op0, target, 0);
5495      if (temp == 0)
5496	abort ();
5497      return temp;
5498
5499    case ABS_EXPR:
5500      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5501
5502      /* Handle complex values specially.  */
5503      if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5504	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5505	return expand_complex_abs (mode, op0, target, unsignedp);
5506
5507      /* Unsigned abs is simply the operand.  Testing here means we don't
5508	 risk generating incorrect code below.  */
5509      if (TREE_UNSIGNED (type))
5510	return op0;
5511
5512      return expand_abs (mode, op0, target, unsignedp,
5513			 safe_from_p (target, TREE_OPERAND (exp, 0)));
5514
5515    case MAX_EXPR:
5516    case MIN_EXPR:
5517      target = original_target;
5518      if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5519	  || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5520	  || GET_MODE (target) != mode
5521	  || (GET_CODE (target) == REG
5522	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
5523	target = gen_reg_rtx (mode);
5524      op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5525      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5526
5527      /* First try to do it with a special MIN or MAX instruction.
5528	 If that does not win, use a conditional jump to select the proper
5529	 value.  */
5530      this_optab = (TREE_UNSIGNED (type)
5531		    ? (code == MIN_EXPR ? umin_optab : umax_optab)
5532		    : (code == MIN_EXPR ? smin_optab : smax_optab));
5533
5534      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5535			   OPTAB_WIDEN);
5536      if (temp != 0)
5537	return temp;
5538
5539      /* At this point, a MEM target is no longer useful; we will get better
5540	 code without it.  */
5541
5542      if (GET_CODE (target) == MEM)
5543	target = gen_reg_rtx (mode);
5544
5545      if (target != op0)
5546	emit_move_insn (target, op0);
5547
5548      op0 = gen_label_rtx ();
5549
5550      /* If this mode is an integer too wide to compare properly,
5551	 compare word by word.  Rely on cse to optimize constant cases.  */
5552      if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5553	{
5554	  if (code == MAX_EXPR)
5555	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5556					  target, op1, NULL_RTX, op0);
5557	  else
5558	    do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5559					  op1, target, NULL_RTX, op0);
5560	  emit_move_insn (target, op1);
5561	}
5562      else
5563	{
5564	  if (code == MAX_EXPR)
5565	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5566		    ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5567		    : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5568	  else
5569	    temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5570		    ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5571		    : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5572	  if (temp == const0_rtx)
5573	    emit_move_insn (target, op1);
5574	  else if (temp != const_true_rtx)
5575	    {
5576	      if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5577		emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5578	      else
5579		abort ();
5580	      emit_move_insn (target, op1);
5581	    }
5582	}
5583      emit_label (op0);
5584      return target;
5585
5586    case BIT_NOT_EXPR:
5587      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5588      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5589      if (temp == 0)
5590	abort ();
5591      return temp;
5592
5593    case FFS_EXPR:
5594      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5595      temp = expand_unop (mode, ffs_optab, op0, target, 1);
5596      if (temp == 0)
5597	abort ();
5598      return temp;
5599
5600      /* ??? Can optimize bitwise operations with one arg constant.
5601	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5602	 and (a bitwise1 b) bitwise2 b (etc)
5603	 but that is probably not worth while.  */
5604
5605      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
5606	 boolean values when we want in all cases to compute both of them.  In
5607	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5608	 as actual zero-or-1 values and then bitwise anding.  In cases where
5609	 there cannot be any side effects, better code would be made by
5610	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5611	 how to recognize those cases.  */
5612
5613    case TRUTH_AND_EXPR:
5614    case BIT_AND_EXPR:
5615      this_optab = and_optab;
5616      goto binop;
5617
5618    case TRUTH_OR_EXPR:
5619    case BIT_IOR_EXPR:
5620      this_optab = ior_optab;
5621      goto binop;
5622
5623    case TRUTH_XOR_EXPR:
5624    case BIT_XOR_EXPR:
5625      this_optab = xor_optab;
5626      goto binop;
5627
5628    case LSHIFT_EXPR:
5629    case RSHIFT_EXPR:
5630    case LROTATE_EXPR:
5631    case RROTATE_EXPR:
5632      preexpand_calls (exp);
5633      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5634	subtarget = 0;
5635      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5636      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5637			   unsignedp);
5638
5639      /* Could determine the answer when only additive constants differ.  Also,
5640	 the addition of one can be handled by changing the condition.  */
5641    case LT_EXPR:
5642    case LE_EXPR:
5643    case GT_EXPR:
5644    case GE_EXPR:
5645    case EQ_EXPR:
5646    case NE_EXPR:
5647      preexpand_calls (exp);
5648      temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5649      if (temp != 0)
5650	return temp;
5651
5652      /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5653      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5654	  && original_target
5655	  && GET_CODE (original_target) == REG
5656	  && (GET_MODE (original_target)
5657	      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5658	{
5659	  temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5660			      VOIDmode, 0);
5661
5662	  if (temp != original_target)
5663	    temp = copy_to_reg (temp);
5664
5665	  op1 = gen_label_rtx ();
5666	  emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5667			 GET_MODE (temp), unsignedp, 0);
5668	  emit_jump_insn (gen_beq (op1));
5669	  emit_move_insn (temp, const1_rtx);
5670	  emit_label (op1);
5671	  return temp;
5672	}
5673
5674      /* If no set-flag instruction, must generate a conditional
5675	 store into a temporary variable.  Drop through
5676	 and handle this like && and ||.  */
5677
5678    case TRUTH_ANDIF_EXPR:
5679    case TRUTH_ORIF_EXPR:
5680      if (! ignore
5681	  && (target == 0 || ! safe_from_p (target, exp)
5682	      /* Make sure we don't have a hard reg (such as function's return
5683		 value) live across basic blocks, if not optimizing.  */
5684	      || (!optimize && GET_CODE (target) == REG
5685		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5686	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5687
5688      if (target)
5689	emit_clr_insn (target);
5690
5691      op1 = gen_label_rtx ();
5692      jumpifnot (exp, op1);
5693
5694      if (target)
5695	emit_0_to_1_insn (target);
5696
5697      emit_label (op1);
5698      return ignore ? const0_rtx : target;
5699
5700    case TRUTH_NOT_EXPR:
5701      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5702      /* The parser is careful to generate TRUTH_NOT_EXPR
5703	 only with operands that are always zero or one.  */
5704      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5705			   target, 1, OPTAB_LIB_WIDEN);
5706      if (temp == 0)
5707	abort ();
5708      return temp;
5709
5710    case COMPOUND_EXPR:
5711      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5712      emit_queue ();
5713      return expand_expr (TREE_OPERAND (exp, 1),
5714			  (ignore ? const0_rtx : target),
5715			  VOIDmode, 0);
5716
5717    case COND_EXPR:
5718      {
5719	rtx flag = NULL_RTX;
5720	tree left_cleanups = NULL_TREE;
5721	tree right_cleanups = NULL_TREE;
5722
5723	/* Used to save a pointer to the place to put the setting of
5724	   the flag that indicates if this side of the conditional was
5725	   taken.  We backpatch the code, if we find out later that we
5726	   have any conditional cleanups that need to be performed. */
5727	rtx dest_right_flag = NULL_RTX;
5728	rtx dest_left_flag = NULL_RTX;
5729
5730	/* Note that COND_EXPRs whose type is a structure or union
5731	   are required to be constructed to contain assignments of
5732	   a temporary variable, so that we can evaluate them here
5733	   for side effect only.  If type is void, we must do likewise.  */
5734
5735	/* If an arm of the branch requires a cleanup,
5736	   only that cleanup is performed.  */
5737
5738	tree singleton = 0;
5739	tree binary_op = 0, unary_op = 0;
5740	tree old_cleanups = cleanups_this_call;
5741
5742	/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5743	   convert it to our mode, if necessary.  */
5744	if (integer_onep (TREE_OPERAND (exp, 1))
5745	    && integer_zerop (TREE_OPERAND (exp, 2))
5746	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5747	  {
5748	    if (ignore)
5749	      {
5750		expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5751			     modifier);
5752		return const0_rtx;
5753	      }
5754
5755	    op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5756	    if (GET_MODE (op0) == mode)
5757	      return op0;
5758
5759	    if (target == 0)
5760	      target = gen_reg_rtx (mode);
5761	    convert_move (target, op0, unsignedp);
5762	    return target;
5763	  }
5764
5765	/* If we are not to produce a result, we have no target.  Otherwise,
5766	   if a target was specified use it; it will not be used as an
5767	   intermediate target unless it is safe.  If no target, use a
5768	   temporary.  */
5769
5770	if (ignore)
5771	  temp = 0;
5772	else if (original_target
5773		 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5774		 && GET_MODE (original_target) == mode
5775		 && ! (GET_CODE (original_target) == MEM
5776		       && MEM_VOLATILE_P (original_target)))
5777	  temp = original_target;
5778	else if (mode == BLKmode)
5779	  {
5780	    if (TYPE_SIZE (type) == 0
5781		|| TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5782	      abort ();
5783
5784	    temp = assign_stack_temp (BLKmode,
5785				      (TREE_INT_CST_LOW (TYPE_SIZE (type))
5786				       + BITS_PER_UNIT - 1)
5787				      / BITS_PER_UNIT, 0);
5788	    MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5789	  }
5790	else
5791	  temp = gen_reg_rtx (mode);
5792
5793	/* Check for X ? A + B : A.  If we have this, we can copy
5794	   A to the output and conditionally add B.  Similarly for unary
5795	   operations.  Don't do this if X has side-effects because
5796	   those side effects might affect A or B and the "?" operation is
5797	   a sequence point in ANSI.  (We test for side effects later.)  */
5798
5799	if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5800	    && operand_equal_p (TREE_OPERAND (exp, 2),
5801				TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5802	  singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5803	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5804		 && operand_equal_p (TREE_OPERAND (exp, 1),
5805				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5806	  singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5807	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5808		 && operand_equal_p (TREE_OPERAND (exp, 2),
5809				     TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5810	  singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5811	else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5812		 && operand_equal_p (TREE_OPERAND (exp, 1),
5813				     TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5814	  singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5815
5816	/* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5817	   operation, do this as A + (X != 0).  Similarly for other simple
5818	   binary operators.  */
5819	if (temp && singleton && binary_op
5820	    && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5821	    && (TREE_CODE (binary_op) == PLUS_EXPR
5822		|| TREE_CODE (binary_op) == MINUS_EXPR
5823		|| TREE_CODE (binary_op) == BIT_IOR_EXPR
5824		|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
5825	    && integer_onep (TREE_OPERAND (binary_op, 1))
5826	    && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5827	  {
5828	    rtx result;
5829	    optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5830			    : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5831			    : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5832			    : xor_optab);
5833
5834	    /* If we had X ? A : A + 1, do this as A + (X == 0).
5835
5836	       We have to invert the truth value here and then put it
5837	       back later if do_store_flag fails.  We cannot simply copy
5838	       TREE_OPERAND (exp, 0) to another variable and modify that
5839	       because invert_truthvalue can modify the tree pointed to
5840	       by its argument.  */
5841	    if (singleton == TREE_OPERAND (exp, 1))
5842	      TREE_OPERAND (exp, 0)
5843		= invert_truthvalue (TREE_OPERAND (exp, 0));
5844
5845	    result = do_store_flag (TREE_OPERAND (exp, 0),
5846				    (safe_from_p (temp, singleton)
5847				     ? temp : NULL_RTX),
5848				    mode, BRANCH_COST <= 1);
5849
5850	    if (result)
5851	      {
5852		op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5853		return expand_binop (mode, boptab, op1, result, temp,
5854				     unsignedp, OPTAB_LIB_WIDEN);
5855	      }
5856	    else if (singleton == TREE_OPERAND (exp, 1))
5857	      TREE_OPERAND (exp, 0)
5858		= invert_truthvalue (TREE_OPERAND (exp, 0));
5859	  }
5860
5861	do_pending_stack_adjust ();
5862	NO_DEFER_POP;
5863	op0 = gen_label_rtx ();
5864
5865	flag = gen_reg_rtx (word_mode);
5866	if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5867	  {
5868	    if (temp != 0)
5869	      {
5870		/* If the target conflicts with the other operand of the
5871		   binary op, we can't use it.  Also, we can't use the target
5872		   if it is a hard register, because evaluating the condition
5873		   might clobber it.  */
5874		if ((binary_op
5875		     && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5876		    || (GET_CODE (temp) == REG
5877			&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
5878		  temp = gen_reg_rtx (mode);
5879		store_expr (singleton, temp, 0);
5880	      }
5881	    else
5882	      expand_expr (singleton,
5883			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5884	    dest_left_flag = get_last_insn ();
5885	    if (singleton == TREE_OPERAND (exp, 1))
5886	      jumpif (TREE_OPERAND (exp, 0), op0);
5887	    else
5888	      jumpifnot (TREE_OPERAND (exp, 0), op0);
5889
5890	    /* Allows cleanups up to here. */
5891	    old_cleanups = cleanups_this_call;
5892	    if (binary_op && temp == 0)
5893	      /* Just touch the other operand.  */
5894	      expand_expr (TREE_OPERAND (binary_op, 1),
5895			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5896	    else if (binary_op)
5897	      store_expr (build (TREE_CODE (binary_op), type,
5898				 make_tree (type, temp),
5899				 TREE_OPERAND (binary_op, 1)),
5900			  temp, 0);
5901	    else
5902	      store_expr (build1 (TREE_CODE (unary_op), type,
5903				  make_tree (type, temp)),
5904			  temp, 0);
5905	    op1 = op0;
5906	    dest_right_flag = get_last_insn ();
5907	  }
5908#if 0
5909	/* This is now done in jump.c and is better done there because it
5910	   produces shorter register lifetimes.  */
5911
5912	/* Check for both possibilities either constants or variables
5913	   in registers (but not the same as the target!).  If so, can
5914	   save branches by assigning one, branching, and assigning the
5915	   other.  */
5916	else if (temp && GET_MODE (temp) != BLKmode
5917		 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5918		     || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5919			  || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5920			 && DECL_RTL (TREE_OPERAND (exp, 1))
5921			 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5922			 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5923		 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5924		     || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5925			  || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5926			 && DECL_RTL (TREE_OPERAND (exp, 2))
5927			 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5928			 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5929	  {
5930	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5931	      temp = gen_reg_rtx (mode);
5932	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
5933	    dest_left_flag = get_last_insn ();
5934	    jumpifnot (TREE_OPERAND (exp, 0), op0);
5935
5936	    /* Allows cleanups up to here. */
5937	    old_cleanups = cleanups_this_call;
5938	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
5939	    op1 = op0;
5940	    dest_right_flag = get_last_insn ();
5941	  }
5942#endif
5943	/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5944	   comparison operator.  If we have one of these cases, set the
5945	   output to A, branch on A (cse will merge these two references),
5946	   then set the output to FOO.  */
5947	else if (temp
5948		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5949		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5950		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5951				     TREE_OPERAND (exp, 1), 0)
5952		 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5953		 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5954	  {
5955	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5956	      temp = gen_reg_rtx (mode);
5957	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
5958	    dest_left_flag = get_last_insn ();
5959	    jumpif (TREE_OPERAND (exp, 0), op0);
5960
5961	    /* Allows cleanups up to here. */
5962	    old_cleanups = cleanups_this_call;
5963	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
5964	    op1 = op0;
5965	    dest_right_flag = get_last_insn ();
5966	  }
5967	else if (temp
5968		 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5969		 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5970		 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5971				     TREE_OPERAND (exp, 2), 0)
5972		 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5973		 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5974	  {
5975	    if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5976	      temp = gen_reg_rtx (mode);
5977	    store_expr (TREE_OPERAND (exp, 2), temp, 0);
5978	    dest_left_flag = get_last_insn ();
5979	    jumpifnot (TREE_OPERAND (exp, 0), op0);
5980
5981	    /* Allows cleanups up to here. */
5982	    old_cleanups = cleanups_this_call;
5983	    store_expr (TREE_OPERAND (exp, 1), temp, 0);
5984	    op1 = op0;
5985	    dest_right_flag = get_last_insn ();
5986	  }
5987	else
5988	  {
5989	    op1 = gen_label_rtx ();
5990	    jumpifnot (TREE_OPERAND (exp, 0), op0);
5991
5992	    /* Allows cleanups up to here. */
5993	    old_cleanups = cleanups_this_call;
5994	    if (temp != 0)
5995	      store_expr (TREE_OPERAND (exp, 1), temp, 0);
5996	    else
5997	      expand_expr (TREE_OPERAND (exp, 1),
5998			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5999	    dest_left_flag = get_last_insn ();
6000
6001	    /* Handle conditional cleanups, if any. */
6002	    left_cleanups = defer_cleanups_to (old_cleanups);
6003
6004	    emit_queue ();
6005	    emit_jump_insn (gen_jump (op1));
6006	    emit_barrier ();
6007	    emit_label (op0);
6008	    if (temp != 0)
6009	      store_expr (TREE_OPERAND (exp, 2), temp, 0);
6010	    else
6011	      expand_expr (TREE_OPERAND (exp, 2),
6012			   ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6013	    dest_right_flag = get_last_insn ();
6014	  }
6015
6016	/* Handle conditional cleanups, if any. */
6017	right_cleanups = defer_cleanups_to (old_cleanups);
6018
6019	emit_queue ();
6020	emit_label (op1);
6021	OK_DEFER_POP;
6022
6023	/* Add back in, any conditional cleanups. */
6024	if (left_cleanups || right_cleanups)
6025	  {
6026	    tree new_cleanups;
6027	    tree cond;
6028	    rtx last;
6029
6030	    /* Now that we know that a flag is needed, go back and add in the
6031	       setting of the flag. */
6032
6033	    /* Do the left side flag. */
6034	    last = get_last_insn ();
6035	    /* Flag left cleanups as needed. */
6036	    emit_move_insn (flag, const1_rtx);
6037	    /* ??? deprecated, use sequences instead.  */
6038	    reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6039
6040	    /* Do the right side flag. */
6041	    last = get_last_insn ();
6042	    /* Flag left cleanups as needed. */
6043	    emit_move_insn (flag, const0_rtx);
6044	    /* ??? deprecated, use sequences instead.  */
6045	    reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6046
6047	    /* convert flag, which is an rtx, into a tree. */
6048	    cond = make_node (RTL_EXPR);
6049	    TREE_TYPE (cond) = integer_type_node;
6050	    RTL_EXPR_RTL (cond) = flag;
6051	    RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6052	    cond = save_expr (cond);
6053
6054	    if (! left_cleanups)
6055	      left_cleanups = integer_zero_node;
6056	    if (! right_cleanups)
6057	      right_cleanups = integer_zero_node;
6058	    new_cleanups = build (COND_EXPR, void_type_node,
6059				  truthvalue_conversion (cond),
6060				  left_cleanups, right_cleanups);
6061	    new_cleanups = fold (new_cleanups);
6062
6063	    /* Now add in the conditionalized cleanups. */
6064	    cleanups_this_call
6065	      = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6066	    (*interim_eh_hook) (NULL_TREE);
6067	  }
6068	return temp;
6069      }
6070
6071    case TARGET_EXPR:
6072      {
6073	int need_exception_region = 0;
6074	/* Something needs to be initialized, but we didn't know
6075	   where that thing was when building the tree.  For example,
6076	   it could be the return value of a function, or a parameter
6077	   to a function which lays down in the stack, or a temporary
6078	   variable which must be passed by reference.
6079
6080	   We guarantee that the expression will either be constructed
6081	   or copied into our original target.  */
6082
6083	tree slot = TREE_OPERAND (exp, 0);
6084	tree exp1;
6085	rtx temp;
6086
6087	if (TREE_CODE (slot) != VAR_DECL)
6088	  abort ();
6089
6090	if (! ignore)
6091	  target = original_target;
6092
6093	if (target == 0)
6094	  {
6095	    if (DECL_RTL (slot) != 0)
6096	      {
6097		target = DECL_RTL (slot);
6098		/* If we have already expanded the slot, so don't do
6099		   it again.  (mrs)  */
6100		if (TREE_OPERAND (exp, 1) == NULL_TREE)
6101		  return target;
6102	      }
6103	    else
6104	      {
6105		target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6106		MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6107		/* All temp slots at this level must not conflict.  */
6108		preserve_temp_slots (target);
6109		DECL_RTL (slot) = target;
6110
6111		/* Since SLOT is not known to the called function
6112		   to belong to its stack frame, we must build an explicit
6113		   cleanup.  This case occurs when we must build up a reference
6114		   to pass the reference as an argument.  In this case,
6115		   it is very likely that such a reference need not be
6116		   built here.  */
6117
6118		if (TREE_OPERAND (exp, 2) == 0)
6119		  TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6120		if (TREE_OPERAND (exp, 2))
6121		  {
6122		    cleanups_this_call = tree_cons (NULL_TREE,
6123						    TREE_OPERAND (exp, 2),
6124						    cleanups_this_call);
6125		    need_exception_region = 1;
6126		  }
6127	      }
6128	  }
6129	else
6130	  {
6131	    /* This case does occur, when expanding a parameter which
6132	       needs to be constructed on the stack.  The target
6133	       is the actual stack address that we want to initialize.
6134	       The function we call will perform the cleanup in this case.  */
6135
6136	    /* If we have already assigned it space, use that space,
6137	       not target that we were passed in, as our target
6138	       parameter is only a hint.  */
6139	    if (DECL_RTL (slot) != 0)
6140              {
6141                target = DECL_RTL (slot);
6142                /* If we have already expanded the slot, so don't do
6143                   it again.  (mrs)  */
6144                if (TREE_OPERAND (exp, 1) == NULL_TREE)
6145                  return target;
6146	      }
6147
6148	    DECL_RTL (slot) = target;
6149	  }
6150
6151	exp1 = TREE_OPERAND (exp, 1);
6152	/* Mark it as expanded.  */
6153	TREE_OPERAND (exp, 1) = NULL_TREE;
6154
6155	temp = expand_expr (exp1, target, tmode, modifier);
6156
6157	if (need_exception_region)
6158	  (*interim_eh_hook) (NULL_TREE);
6159
6160	return temp;
6161      }
6162
6163    case INIT_EXPR:
6164      {
6165	tree lhs = TREE_OPERAND (exp, 0);
6166	tree rhs = TREE_OPERAND (exp, 1);
6167	tree noncopied_parts = 0;
6168	tree lhs_type = TREE_TYPE (lhs);
6169
6170	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6171	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6172	  noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6173						  TYPE_NONCOPIED_PARTS (lhs_type));
6174	while (noncopied_parts != 0)
6175	  {
6176	    expand_assignment (TREE_VALUE (noncopied_parts),
6177			       TREE_PURPOSE (noncopied_parts), 0, 0);
6178	    noncopied_parts = TREE_CHAIN (noncopied_parts);
6179	  }
6180	return temp;
6181      }
6182
6183    case MODIFY_EXPR:
6184      {
6185	/* If lhs is complex, expand calls in rhs before computing it.
6186	   That's so we don't compute a pointer and save it over a call.
6187	   If lhs is simple, compute it first so we can give it as a
6188	   target if the rhs is just a call.  This avoids an extra temp and copy
6189	   and that prevents a partial-subsumption which makes bad code.
6190	   Actually we could treat component_ref's of vars like vars.  */
6191
6192	tree lhs = TREE_OPERAND (exp, 0);
6193	tree rhs = TREE_OPERAND (exp, 1);
6194	tree noncopied_parts = 0;
6195	tree lhs_type = TREE_TYPE (lhs);
6196
6197	temp = 0;
6198
6199	if (TREE_CODE (lhs) != VAR_DECL
6200	    && TREE_CODE (lhs) != RESULT_DECL
6201	    && TREE_CODE (lhs) != PARM_DECL)
6202	  preexpand_calls (exp);
6203
6204	/* Check for |= or &= of a bitfield of size one into another bitfield
6205	   of size 1.  In this case, (unless we need the result of the
6206	   assignment) we can do this more efficiently with a
6207	   test followed by an assignment, if necessary.
6208
6209	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
6210	   things change so we do, this code should be enhanced to
6211	   support it.  */
6212	if (ignore
6213	    && TREE_CODE (lhs) == COMPONENT_REF
6214	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
6215		|| TREE_CODE (rhs) == BIT_AND_EXPR)
6216	    && TREE_OPERAND (rhs, 0) == lhs
6217	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6218	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6219	    && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6220	  {
6221	    rtx label = gen_label_rtx ();
6222
6223	    do_jump (TREE_OPERAND (rhs, 1),
6224		     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6225		     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6226	    expand_assignment (lhs, convert (TREE_TYPE (rhs),
6227					     (TREE_CODE (rhs) == BIT_IOR_EXPR
6228					      ? integer_one_node
6229					      : integer_zero_node)),
6230			       0, 0);
6231	    do_pending_stack_adjust ();
6232	    emit_label (label);
6233	    return const0_rtx;
6234	  }
6235
6236	if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6237	    && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6238	  noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6239						  TYPE_NONCOPIED_PARTS (lhs_type));
6240
6241	temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6242	while (noncopied_parts != 0)
6243	  {
6244	    expand_assignment (TREE_PURPOSE (noncopied_parts),
6245			       TREE_VALUE (noncopied_parts), 0, 0);
6246	    noncopied_parts = TREE_CHAIN (noncopied_parts);
6247	  }
6248	return temp;
6249      }
6250
6251    case PREINCREMENT_EXPR:
6252    case PREDECREMENT_EXPR:
6253      return expand_increment (exp, 0);
6254
6255    case POSTINCREMENT_EXPR:
6256    case POSTDECREMENT_EXPR:
6257      /* Faster to treat as pre-increment if result is not used.  */
6258      return expand_increment (exp, ! ignore);
6259
6260    case ADDR_EXPR:
6261      /* If nonzero, TEMP will be set to the address of something that might
6262	 be a MEM corresponding to a stack slot. */
6263      temp = 0;
6264
6265      /* Are we taking the address of a nested function?  */
6266      if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6267	  && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6268	{
6269	  op0 = trampoline_address (TREE_OPERAND (exp, 0));
6270	  op0 = force_operand (op0, target);
6271	}
6272      /* If we are taking the address of something erroneous, just
6273	 return a zero.  */
6274      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6275	return const0_rtx;
6276      else
6277	{
6278	  /* We make sure to pass const0_rtx down if we came in with
6279	     ignore set, to avoid doing the cleanups twice for something.  */
6280	  op0 = expand_expr (TREE_OPERAND (exp, 0),
6281			     ignore ? const0_rtx : NULL_RTX, VOIDmode,
6282			     (modifier == EXPAND_INITIALIZER
6283			      ? modifier : EXPAND_CONST_ADDRESS));
6284
6285	  /* If we are going to ignore the result, OP0 will have been set
6286	     to const0_rtx, so just return it.  Don't get confused and
6287	     think we are taking the address of the constant.  */
6288	  if (ignore)
6289	    return op0;
6290
6291	  /* We would like the object in memory.  If it is a constant,
6292	     we can have it be statically allocated into memory.  For
6293	     a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6294	     memory and store the value into it.  */
6295
6296	  if (CONSTANT_P (op0))
6297	    op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6298				   op0);
6299	  else if (GET_CODE (op0) == MEM)
6300	    {
6301	      mark_temp_addr_taken (op0);
6302	      temp = XEXP (op0, 0);
6303	    }
6304
6305	  else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6306		   || GET_CODE (op0) == CONCAT)
6307	    {
6308	      /* If this object is in a register, it must be not
6309		 be BLKmode. */
6310	      tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6311	      enum machine_mode inner_mode = TYPE_MODE (inner_type);
6312	      rtx memloc
6313		= assign_stack_temp (inner_mode,
6314				     int_size_in_bytes (inner_type), 1);
6315	      MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6316
6317	      mark_temp_addr_taken (memloc);
6318	      emit_move_insn (memloc, op0);
6319	      op0 = memloc;
6320	    }
6321
6322	  if (GET_CODE (op0) != MEM)
6323	    abort ();
6324
6325	  if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6326	    {
6327	      temp = XEXP (op0, 0);
6328#ifdef POINTERS_EXTEND_UNSIGNED
6329	      if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6330		  && mode == ptr_mode)
6331		temp = convert_memory_address (ptr_mode, temp);
6332#endif
6333	      return temp;
6334	    }
6335
6336	  op0 = force_operand (XEXP (op0, 0), target);
6337	}
6338
6339      if (flag_force_addr && GET_CODE (op0) != REG)
6340	op0 = force_reg (Pmode, op0);
6341
6342      if (GET_CODE (op0) == REG)
6343	mark_reg_pointer (op0);
6344
6345      /* If we might have had a temp slot, add an equivalent address
6346	 for it.  */
6347      if (temp != 0)
6348	update_temp_slot_address (temp, op0);
6349
6350#ifdef POINTERS_EXTEND_UNSIGNED
6351      if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6352	  && mode == ptr_mode)
6353	op0 = convert_memory_address (ptr_mode, op0);
6354#endif
6355
6356      return op0;
6357
6358    case ENTRY_VALUE_EXPR:
6359      abort ();
6360
6361    /* COMPLEX type for Extended Pascal & Fortran  */
6362    case COMPLEX_EXPR:
6363      {
6364	enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6365	rtx insns;
6366
6367	/* Get the rtx code of the operands.  */
6368	op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6369	op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6370
6371	if (! target)
6372	  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6373
6374	start_sequence ();
6375
6376	/* Move the real (op0) and imaginary (op1) parts to their location.  */
6377	emit_move_insn (gen_realpart (mode, target), op0);
6378	emit_move_insn (gen_imagpart (mode, target), op1);
6379
6380	insns = get_insns ();
6381	end_sequence ();
6382
6383	/* Complex construction should appear as a single unit.  */
6384	/* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6385	   each with a separate pseudo as destination.
6386	   It's not correct for flow to treat them as a unit.  */
6387	if (GET_CODE (target) != CONCAT)
6388	  emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6389	else
6390	  emit_insns (insns);
6391
6392	return target;
6393      }
6394
6395    case REALPART_EXPR:
6396      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6397      return gen_realpart (mode, op0);
6398
6399    case IMAGPART_EXPR:
6400      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6401      return gen_imagpart (mode, op0);
6402
6403    case CONJ_EXPR:
6404      {
6405	enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6406	rtx imag_t;
6407	rtx insns;
6408
6409	op0  = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6410
6411	if (! target)
6412	  target = gen_reg_rtx (mode);
6413
6414	start_sequence ();
6415
6416	/* Store the realpart and the negated imagpart to target.  */
6417	emit_move_insn (gen_realpart (partmode, target),
6418			gen_realpart (partmode, op0));
6419
6420	imag_t = gen_imagpart (partmode, target);
6421	temp = expand_unop (partmode, neg_optab,
6422			       gen_imagpart (partmode, op0), imag_t, 0);
6423	if (temp != imag_t)
6424	  emit_move_insn (imag_t, temp);
6425
6426	insns = get_insns ();
6427	end_sequence ();
6428
6429	/* Conjugate should appear as a single unit
6430	   If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6431	   each with a separate pseudo as destination.
6432	   It's not correct for flow to treat them as a unit.  */
6433	if (GET_CODE (target) != CONCAT)
6434	  emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6435	else
6436	  emit_insns (insns);
6437
6438	return target;
6439      }
6440
6441    case ERROR_MARK:
6442      op0 = CONST0_RTX (tmode);
6443      if (op0 != 0)
6444	return op0;
6445      return const0_rtx;
6446
6447    default:
6448      return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6449    }
6450
6451  /* Here to do an ordinary binary operator, generating an instruction
6452     from the optab already placed in `this_optab'.  */
6453 binop:
6454  preexpand_calls (exp);
6455  if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6456    subtarget = 0;
6457  op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6458  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6459 binop2:
6460  temp = expand_binop (mode, this_optab, op0, op1, target,
6461		       unsignedp, OPTAB_LIB_WIDEN);
6462  if (temp == 0)
6463    abort ();
6464  return temp;
6465}
6466
6467
6468/* Emit bytecode to evaluate the given expression EXP to the stack. */
6469void
6470bc_expand_expr (exp)
6471    tree exp;
6472{
6473  enum tree_code code;
6474  tree type, arg0;
6475  rtx r;
6476  struct binary_operator *binoptab;
6477  struct unary_operator *unoptab;
6478  struct increment_operator *incroptab;
6479  struct bc_label *lab, *lab1;
6480  enum bytecode_opcode opcode;
6481
6482
6483  code = TREE_CODE (exp);
6484
6485  switch (code)
6486    {
6487    case PARM_DECL:
6488
6489      if (DECL_RTL (exp) == 0)
6490	{
6491	  error_with_decl (exp, "prior parameter's size depends on `%s'");
6492	  return;
6493	}
6494
6495      bc_load_parmaddr (DECL_RTL (exp));
6496      bc_load_memory (TREE_TYPE (exp), exp);
6497
6498      return;
6499
6500    case VAR_DECL:
6501
6502      if (DECL_RTL (exp) == 0)
6503	abort ();
6504
6505#if 0
6506      if (BYTECODE_LABEL (DECL_RTL (exp)))
6507	bc_load_externaddr (DECL_RTL (exp));
6508      else
6509	bc_load_localaddr (DECL_RTL (exp));
6510#endif
6511      if (TREE_PUBLIC (exp))
6512	bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6513			       BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6514      else
6515	bc_load_localaddr (DECL_RTL (exp));
6516
6517      bc_load_memory (TREE_TYPE (exp), exp);
6518      return;
6519
6520    case INTEGER_CST:
6521
6522#ifdef DEBUG_PRINT_CODE
6523      fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6524#endif
6525      bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6526					     ? SImode
6527					     : TYPE_MODE (TREE_TYPE (exp)))],
6528			   (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6529      return;
6530
6531    case REAL_CST:
6532
6533#if 0
6534#ifdef DEBUG_PRINT_CODE
6535      fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6536#endif
6537      /* FIX THIS: find a better way to pass real_cst's. -bson */
6538      bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6539			   (double) TREE_REAL_CST (exp));
6540#else
6541      abort ();
6542#endif
6543
6544      return;
6545
6546    case CALL_EXPR:
6547
6548      /* We build a call description vector describing the type of
6549	 the return value and of the arguments; this call vector,
6550	 together with a pointer to a location for the return value
6551	 and the base of the argument list, is passed to the low
6552	 level machine dependent call subroutine, which is responsible
6553	 for putting the arguments wherever real functions expect
6554	 them, as well as getting the return value back.  */
6555      {
6556	tree calldesc = 0, arg;
6557	int nargs = 0, i;
6558	rtx retval;
6559
6560	/* Push the evaluated args on the evaluation stack in reverse
6561	   order.  Also make an entry for each arg in the calldesc
6562	   vector while we're at it.  */
6563
6564	TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6565
6566	for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6567	  {
6568	    ++nargs;
6569	    bc_expand_expr (TREE_VALUE (arg));
6570
6571	    calldesc = tree_cons ((tree) 0,
6572				  size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6573				  calldesc);
6574	    calldesc = tree_cons ((tree) 0,
6575				  bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6576				  calldesc);
6577	  }
6578
6579	TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6580
6581	/* Allocate a location for the return value and push its
6582	   address on the evaluation stack.  Also make an entry
6583	   at the front of the calldesc for the return value type. */
6584
6585	type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6586	retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6587	bc_load_localaddr (retval);
6588
6589	calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6590	calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6591
6592	/* Prepend the argument count.  */
6593	calldesc = tree_cons ((tree) 0,
6594			      build_int_2 (nargs, 0),
6595			      calldesc);
6596
6597	/* Push the address of the call description vector on the stack.  */
6598	calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6599	TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6600						 build_index_type (build_int_2 (nargs * 2, 0)));
6601	r = output_constant_def (calldesc);
6602	bc_load_externaddr (r);
6603
6604	/* Push the address of the function to be called. */
6605	bc_expand_expr (TREE_OPERAND (exp, 0));
6606
6607	/* Call the function, popping its address and the calldesc vector
6608	   address off the evaluation stack in the process.  */
6609	bc_emit_instruction (call);
6610
6611	/* Pop the arguments off the stack.  */
6612	bc_adjust_stack (nargs);
6613
6614	/* Load the return value onto the stack.  */
6615	bc_load_localaddr (retval);
6616	bc_load_memory (type, TREE_OPERAND (exp, 0));
6617      }
6618      return;
6619
6620    case SAVE_EXPR:
6621
6622      if (!SAVE_EXPR_RTL (exp))
6623	{
6624	  /* First time around: copy to local variable */
6625	  SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6626						   TYPE_ALIGN (TREE_TYPE(exp)));
6627	  bc_expand_expr (TREE_OPERAND (exp, 0));
6628	  bc_emit_instruction (duplicate);
6629
6630	  bc_load_localaddr (SAVE_EXPR_RTL (exp));
6631	  bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6632	}
6633      else
6634	{
6635	  /* Consecutive reference: use saved copy */
6636	  bc_load_localaddr (SAVE_EXPR_RTL (exp));
6637	  bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6638	}
6639      return;
6640
6641#if 0
6642      /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6643	 how are they handled instead? */
6644    case LET_STMT:
6645
6646      TREE_USED (exp) = 1;
6647      bc_expand_expr (STMT_BODY (exp));
6648      return;
6649#endif
6650
6651    case NOP_EXPR:
6652    case CONVERT_EXPR:
6653
6654      bc_expand_expr (TREE_OPERAND (exp, 0));
6655      bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6656      return;
6657
6658    case MODIFY_EXPR:
6659
6660      expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6661      return;
6662
6663    case ADDR_EXPR:
6664
6665      bc_expand_address (TREE_OPERAND (exp, 0));
6666      return;
6667
6668    case INDIRECT_REF:
6669
6670      bc_expand_expr (TREE_OPERAND (exp, 0));
6671      bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6672      return;
6673
6674    case ARRAY_REF:
6675
6676      bc_expand_expr (bc_canonicalize_array_ref (exp));
6677      return;
6678
6679    case COMPONENT_REF:
6680
6681      bc_expand_component_address (exp);
6682
6683      /* If we have a bitfield, generate a proper load */
6684      bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6685      return;
6686
6687    case COMPOUND_EXPR:
6688
6689      bc_expand_expr (TREE_OPERAND (exp, 0));
6690      bc_emit_instruction (drop);
6691      bc_expand_expr (TREE_OPERAND (exp, 1));
6692      return;
6693
6694    case COND_EXPR:
6695
6696      bc_expand_expr (TREE_OPERAND (exp, 0));
6697      bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6698      lab = bc_get_bytecode_label ();
6699      bc_emit_bytecode (xjumpifnot);
6700      bc_emit_bytecode_labelref (lab);
6701
6702#ifdef DEBUG_PRINT_CODE
6703      fputc ('\n', stderr);
6704#endif
6705      bc_expand_expr (TREE_OPERAND (exp, 1));
6706      lab1 = bc_get_bytecode_label ();
6707      bc_emit_bytecode (jump);
6708      bc_emit_bytecode_labelref (lab1);
6709
6710#ifdef DEBUG_PRINT_CODE
6711      fputc ('\n', stderr);
6712#endif
6713
6714      bc_emit_bytecode_labeldef (lab);
6715      bc_expand_expr (TREE_OPERAND (exp, 2));
6716      bc_emit_bytecode_labeldef (lab1);
6717      return;
6718
6719    case TRUTH_ANDIF_EXPR:
6720
6721      opcode = xjumpifnot;
6722      goto andorif;
6723
6724    case TRUTH_ORIF_EXPR:
6725
6726      opcode = xjumpif;
6727      goto andorif;
6728
6729    case PLUS_EXPR:
6730
6731      binoptab = optab_plus_expr;
6732      goto binop;
6733
6734    case MINUS_EXPR:
6735
6736      binoptab = optab_minus_expr;
6737      goto binop;
6738
6739    case MULT_EXPR:
6740
6741      binoptab = optab_mult_expr;
6742      goto binop;
6743
6744    case TRUNC_DIV_EXPR:
6745    case FLOOR_DIV_EXPR:
6746    case CEIL_DIV_EXPR:
6747    case ROUND_DIV_EXPR:
6748    case EXACT_DIV_EXPR:
6749
6750      binoptab = optab_trunc_div_expr;
6751      goto binop;
6752
6753    case TRUNC_MOD_EXPR:
6754    case FLOOR_MOD_EXPR:
6755    case CEIL_MOD_EXPR:
6756    case ROUND_MOD_EXPR:
6757
6758      binoptab = optab_trunc_mod_expr;
6759      goto binop;
6760
6761    case FIX_ROUND_EXPR:
6762    case FIX_FLOOR_EXPR:
6763    case FIX_CEIL_EXPR:
6764      abort ();			/* Not used for C.  */
6765
6766    case FIX_TRUNC_EXPR:
6767    case FLOAT_EXPR:
6768    case MAX_EXPR:
6769    case MIN_EXPR:
6770    case FFS_EXPR:
6771    case LROTATE_EXPR:
6772    case RROTATE_EXPR:
6773      abort ();			/* FIXME */
6774
6775    case RDIV_EXPR:
6776
6777      binoptab = optab_rdiv_expr;
6778      goto binop;
6779
6780    case BIT_AND_EXPR:
6781
6782      binoptab = optab_bit_and_expr;
6783      goto binop;
6784
6785    case BIT_IOR_EXPR:
6786
6787      binoptab = optab_bit_ior_expr;
6788      goto binop;
6789
6790    case BIT_XOR_EXPR:
6791
6792      binoptab = optab_bit_xor_expr;
6793      goto binop;
6794
6795    case LSHIFT_EXPR:
6796
6797      binoptab = optab_lshift_expr;
6798      goto binop;
6799
6800    case RSHIFT_EXPR:
6801
6802      binoptab = optab_rshift_expr;
6803      goto binop;
6804
6805    case TRUTH_AND_EXPR:
6806
6807      binoptab = optab_truth_and_expr;
6808      goto binop;
6809
6810    case TRUTH_OR_EXPR:
6811
6812      binoptab = optab_truth_or_expr;
6813      goto binop;
6814
6815    case LT_EXPR:
6816
6817      binoptab = optab_lt_expr;
6818      goto binop;
6819
6820    case LE_EXPR:
6821
6822      binoptab = optab_le_expr;
6823      goto binop;
6824
6825    case GE_EXPR:
6826
6827      binoptab = optab_ge_expr;
6828      goto binop;
6829
6830    case GT_EXPR:
6831
6832      binoptab = optab_gt_expr;
6833      goto binop;
6834
6835    case EQ_EXPR:
6836
6837      binoptab = optab_eq_expr;
6838      goto binop;
6839
6840    case NE_EXPR:
6841
6842      binoptab = optab_ne_expr;
6843      goto binop;
6844
6845    case NEGATE_EXPR:
6846
6847      unoptab = optab_negate_expr;
6848      goto unop;
6849
6850    case BIT_NOT_EXPR:
6851
6852      unoptab = optab_bit_not_expr;
6853      goto unop;
6854
6855    case TRUTH_NOT_EXPR:
6856
6857      unoptab = optab_truth_not_expr;
6858      goto unop;
6859
6860    case PREDECREMENT_EXPR:
6861
6862      incroptab = optab_predecrement_expr;
6863      goto increment;
6864
6865    case PREINCREMENT_EXPR:
6866
6867      incroptab = optab_preincrement_expr;
6868      goto increment;
6869
6870    case POSTDECREMENT_EXPR:
6871
6872      incroptab = optab_postdecrement_expr;
6873      goto increment;
6874
6875    case POSTINCREMENT_EXPR:
6876
6877      incroptab = optab_postincrement_expr;
6878      goto increment;
6879
6880    case CONSTRUCTOR:
6881
6882      bc_expand_constructor (exp);
6883      return;
6884
6885    case ERROR_MARK:
6886    case RTL_EXPR:
6887
6888      return;
6889
6890    case BIND_EXPR:
6891      {
6892	tree vars = TREE_OPERAND (exp, 0);
6893	int vars_need_expansion = 0;
6894
6895	/* Need to open a binding contour here because
6896	   if there are any cleanups they most be contained here.  */
6897	expand_start_bindings (0);
6898
6899	/* Mark the corresponding BLOCK for output.  */
6900	if (TREE_OPERAND (exp, 2) != 0)
6901	  TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6902
6903	/* If VARS have not yet been expanded, expand them now.  */
6904	while (vars)
6905	  {
6906	    if (DECL_RTL (vars) == 0)
6907	      {
6908		vars_need_expansion = 1;
6909		expand_decl (vars);
6910	      }
6911	    expand_decl_init (vars);
6912	    vars = TREE_CHAIN (vars);
6913	  }
6914
6915	bc_expand_expr (TREE_OPERAND (exp, 1));
6916
6917	expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6918
6919	return;
6920      }
6921    }
6922
6923  abort ();
6924
6925 binop:
6926
6927  bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6928			      TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6929  return;
6930
6931
6932 unop:
6933
6934  bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6935  return;
6936
6937
6938 andorif:
6939
6940  bc_expand_expr (TREE_OPERAND (exp, 0));
6941  bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6942  lab = bc_get_bytecode_label ();
6943
6944  bc_emit_instruction (duplicate);
6945  bc_emit_bytecode (opcode);
6946  bc_emit_bytecode_labelref (lab);
6947
6948#ifdef DEBUG_PRINT_CODE
6949  fputc ('\n', stderr);
6950#endif
6951
6952  bc_emit_instruction (drop);
6953
6954  bc_expand_expr (TREE_OPERAND (exp, 1));
6955  bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6956  bc_emit_bytecode_labeldef (lab);
6957  return;
6958
6959
6960 increment:
6961
6962  type = TREE_TYPE (TREE_OPERAND (exp, 0));
6963
6964  /* Push the quantum.  */
6965  bc_expand_expr (TREE_OPERAND (exp, 1));
6966
6967  /* Convert it to the lvalue's type.  */
6968  bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6969
6970  /* Push the address of the lvalue */
6971  bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6972
6973  /* Perform actual increment */
6974  bc_expand_increment (incroptab, type);
6975  return;
6976}
6977
6978/* Return the alignment in bits of EXP, a pointer valued expression.
6979   But don't return more than MAX_ALIGN no matter what.
6980   The alignment returned is, by default, the alignment of the thing that
6981   EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6982
6983   Otherwise, look at the expression to see if we can do better, i.e., if the
6984   expression is actually pointing at an object whose alignment is tighter.  */
6985
6986static int
6987get_pointer_alignment (exp, max_align)
6988     tree exp;
6989     unsigned max_align;
6990{
6991  unsigned align, inner;
6992
6993  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6994    return 0;
6995
6996  align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6997  align = MIN (align, max_align);
6998
6999  while (1)
7000    {
7001      switch (TREE_CODE (exp))
7002	{
7003	case NOP_EXPR:
7004	case CONVERT_EXPR:
7005	case NON_LVALUE_EXPR:
7006	  exp = TREE_OPERAND (exp, 0);
7007	  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7008	    return align;
7009	  inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7010	  align = MIN (inner, max_align);
7011	  break;
7012
7013	case PLUS_EXPR:
7014	  /* If sum of pointer + int, restrict our maximum alignment to that
7015	     imposed by the integer.  If not, we can't do any better than
7016	     ALIGN.  */
7017	  if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7018	    return align;
7019
7020	  while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7021		  & (max_align - 1))
7022		 != 0)
7023	    max_align >>= 1;
7024
7025	  exp = TREE_OPERAND (exp, 0);
7026	  break;
7027
7028	case ADDR_EXPR:
7029	  /* See what we are pointing at and look at its alignment.  */
7030	  exp = TREE_OPERAND (exp, 0);
7031	  if (TREE_CODE (exp) == FUNCTION_DECL)
7032	    align = FUNCTION_BOUNDARY;
7033	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7034	    align = DECL_ALIGN (exp);
7035#ifdef CONSTANT_ALIGNMENT
7036	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7037	    align = CONSTANT_ALIGNMENT (exp, align);
7038#endif
7039	  return MIN (align, max_align);
7040
7041	default:
7042	  return align;
7043	}
7044    }
7045}
7046
7047/* Return the tree node and offset if a given argument corresponds to
7048   a string constant.  */
7049
7050static tree
7051string_constant (arg, ptr_offset)
7052     tree arg;
7053     tree *ptr_offset;
7054{
7055  STRIP_NOPS (arg);
7056
7057  if (TREE_CODE (arg) == ADDR_EXPR
7058      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7059    {
7060      *ptr_offset = integer_zero_node;
7061      return TREE_OPERAND (arg, 0);
7062    }
7063  else if (TREE_CODE (arg) == PLUS_EXPR)
7064    {
7065      tree arg0 = TREE_OPERAND (arg, 0);
7066      tree arg1 = TREE_OPERAND (arg, 1);
7067
7068      STRIP_NOPS (arg0);
7069      STRIP_NOPS (arg1);
7070
7071      if (TREE_CODE (arg0) == ADDR_EXPR
7072	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7073	{
7074	  *ptr_offset = arg1;
7075	  return TREE_OPERAND (arg0, 0);
7076	}
7077      else if (TREE_CODE (arg1) == ADDR_EXPR
7078	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7079	{
7080	  *ptr_offset = arg0;
7081	  return TREE_OPERAND (arg1, 0);
7082	}
7083    }
7084
7085  return 0;
7086}
7087
7088/* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
7089   way, because it could contain a zero byte in the middle.
7090   TREE_STRING_LENGTH is the size of the character array, not the string.
7091
7092   Unfortunately, string_constant can't access the values of const char
7093   arrays with initializers, so neither can we do so here.  */
7094
7095static tree
7096c_strlen (src)
7097     tree src;
7098{
7099  tree offset_node;
7100  int offset, max;
7101  char *ptr;
7102
7103  src = string_constant (src, &offset_node);
7104  if (src == 0)
7105    return 0;
7106  max = TREE_STRING_LENGTH (src);
7107  ptr = TREE_STRING_POINTER (src);
7108  if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7109    {
7110      /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7111	 compute the offset to the following null if we don't know where to
7112	 start searching for it.  */
7113      int i;
7114      for (i = 0; i < max; i++)
7115	if (ptr[i] == 0)
7116	  return 0;
7117      /* We don't know the starting offset, but we do know that the string
7118	 has no internal zero bytes.  We can assume that the offset falls
7119	 within the bounds of the string; otherwise, the programmer deserves
7120	 what he gets.  Subtract the offset from the length of the string,
7121	 and return that.  */
7122      /* This would perhaps not be valid if we were dealing with named
7123         arrays in addition to literal string constants.  */
7124      return size_binop (MINUS_EXPR, size_int (max), offset_node);
7125    }
7126
7127  /* We have a known offset into the string.  Start searching there for
7128     a null character.  */
7129  if (offset_node == 0)
7130    offset = 0;
7131  else
7132    {
7133      /* Did we get a long long offset?  If so, punt.  */
7134      if (TREE_INT_CST_HIGH (offset_node) != 0)
7135	return 0;
7136      offset = TREE_INT_CST_LOW (offset_node);
7137    }
7138  /* If the offset is known to be out of bounds, warn, and call strlen at
7139     runtime.  */
7140  if (offset < 0 || offset > max)
7141    {
7142      warning ("offset outside bounds of constant string");
7143      return 0;
7144    }
7145  /* Use strlen to search for the first zero byte.  Since any strings
7146     constructed with build_string will have nulls appended, we win even
7147     if we get handed something like (char[4])"abcd".
7148
7149     Since OFFSET is our starting index into the string, no further
7150     calculation is needed.  */
7151  return size_int (strlen (ptr + offset));
7152}
7153
7154rtx
7155expand_builtin_return_addr (fndecl_code, count, tem)
7156     enum built_in_function fndecl_code;
7157     rtx tem;
7158     int count;
7159{
7160  int i;
7161
7162  /* Some machines need special handling before we can access
7163     arbitrary frames.  For example, on the sparc, we must first flush
7164     all register windows to the stack.  */
7165#ifdef SETUP_FRAME_ADDRESSES
7166  SETUP_FRAME_ADDRESSES ();
7167#endif
7168
7169  /* On the sparc, the return address is not in the frame, it is in a
7170     register.  There is no way to access it off of the current frame
7171     pointer, but it can be accessed off the previous frame pointer by
7172     reading the value from the register window save area.  */
7173#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7174  if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7175    count--;
7176#endif
7177
7178  /* Scan back COUNT frames to the specified frame.  */
7179  for (i = 0; i < count; i++)
7180    {
7181      /* Assume the dynamic chain pointer is in the word that the
7182	 frame address points to, unless otherwise specified.  */
7183#ifdef DYNAMIC_CHAIN_ADDRESS
7184      tem = DYNAMIC_CHAIN_ADDRESS (tem);
7185#endif
7186      tem = memory_address (Pmode, tem);
7187      tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7188    }
7189
7190  /* For __builtin_frame_address, return what we've got.  */
7191  if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7192    return tem;
7193
7194  /* For __builtin_return_address, Get the return address from that
7195     frame.  */
7196#ifdef RETURN_ADDR_RTX
7197  tem = RETURN_ADDR_RTX (count, tem);
7198#else
7199  tem = memory_address (Pmode,
7200			plus_constant (tem, GET_MODE_SIZE (Pmode)));
7201  tem = gen_rtx (MEM, Pmode, tem);
7202#endif
7203  return tem;
7204}
7205
7206/* Expand an expression EXP that calls a built-in function,
7207   with result going to TARGET if that's convenient
7208   (and in mode MODE if that's convenient).
7209   SUBTARGET may be used as the target for computing one of EXP's operands.
7210   IGNORE is nonzero if the value is to be ignored.  */
7211
7212#define CALLED_AS_BUILT_IN(NODE) \
7213   (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7214
7215static rtx
7216expand_builtin (exp, target, subtarget, mode, ignore)
7217     tree exp;
7218     rtx target;
7219     rtx subtarget;
7220     enum machine_mode mode;
7221     int ignore;
7222{
7223  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7224  tree arglist = TREE_OPERAND (exp, 1);
7225  rtx op0;
7226  rtx lab1, insns;
7227  enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7228  optab builtin_optab;
7229
7230  switch (DECL_FUNCTION_CODE (fndecl))
7231    {
7232    case BUILT_IN_ABS:
7233    case BUILT_IN_LABS:
7234    case BUILT_IN_FABS:
7235      /* build_function_call changes these into ABS_EXPR.  */
7236      abort ();
7237
7238    case BUILT_IN_SIN:
7239    case BUILT_IN_COS:
7240      /* Treat these like sqrt, but only if the user asks for them. */
7241      if (! flag_fast_math)
7242	break;
7243    case BUILT_IN_FSQRT:
7244      /* If not optimizing, call the library function.  */
7245      if (! optimize)
7246	break;
7247
7248      if (arglist == 0
7249	  /* Arg could be wrong type if user redeclared this fcn wrong.  */
7250	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7251	break;
7252
7253      /* Stabilize and compute the argument.  */
7254      if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7255	  && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7256	{
7257	  exp = copy_node (exp);
7258	  arglist = copy_node (arglist);
7259	  TREE_OPERAND (exp, 1) = arglist;
7260	  TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7261	}
7262      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7263
7264      /* Make a suitable register to place result in.  */
7265      target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7266
7267      emit_queue ();
7268      start_sequence ();
7269
7270      switch (DECL_FUNCTION_CODE (fndecl))
7271	{
7272	case BUILT_IN_SIN:
7273	  builtin_optab = sin_optab; break;
7274	case BUILT_IN_COS:
7275	  builtin_optab = cos_optab; break;
7276	case BUILT_IN_FSQRT:
7277	  builtin_optab = sqrt_optab; break;
7278	default:
7279	  abort ();
7280	}
7281
7282      /* Compute into TARGET.
7283	 Set TARGET to wherever the result comes back.  */
7284      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7285			    builtin_optab, op0, target, 0);
7286
7287      /* If we were unable to expand via the builtin, stop the
7288	 sequence (without outputting the insns) and break, causing
7289	 a call the the library function.  */
7290      if (target == 0)
7291	{
7292	  end_sequence ();
7293	  break;
7294        }
7295
7296      /* Check the results by default.  But if flag_fast_math is turned on,
7297	 then assume sqrt will always be called with valid arguments.  */
7298
7299      if (! flag_fast_math)
7300	{
7301	  /* Don't define the builtin FP instructions
7302	     if your machine is not IEEE.  */
7303	  if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7304	    abort ();
7305
7306	  lab1 = gen_label_rtx ();
7307
7308	  /* Test the result; if it is NaN, set errno=EDOM because
7309	     the argument was not in the domain.  */
7310	  emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7311	  emit_jump_insn (gen_beq (lab1));
7312
7313#ifdef TARGET_EDOM
7314	  {
7315#ifdef GEN_ERRNO_RTX
7316	    rtx errno_rtx = GEN_ERRNO_RTX;
7317#else
7318	    rtx errno_rtx
7319	      = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7320#endif
7321
7322	    emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7323	  }
7324#else
7325	  /* We can't set errno=EDOM directly; let the library call do it.
7326	     Pop the arguments right away in case the call gets deleted. */
7327	  NO_DEFER_POP;
7328	  expand_call (exp, target, 0);
7329	  OK_DEFER_POP;
7330#endif
7331
7332	  emit_label (lab1);
7333	}
7334
7335      /* Output the entire sequence. */
7336      insns = get_insns ();
7337      end_sequence ();
7338      emit_insns (insns);
7339
7340      return target;
7341
7342      /* __builtin_apply_args returns block of memory allocated on
7343	 the stack into which is stored the arg pointer, structure
7344	 value address, static chain, and all the registers that might
7345	 possibly be used in performing a function call.  The code is
7346	 moved to the start of the function so the incoming values are
7347	 saved.  */
7348    case BUILT_IN_APPLY_ARGS:
7349      /* Don't do __builtin_apply_args more than once in a function.
7350	 Save the result of the first call and reuse it.  */
7351      if (apply_args_value != 0)
7352	return apply_args_value;
7353      {
7354	/* When this function is called, it means that registers must be
7355	   saved on entry to this function.  So we migrate the
7356	   call to the first insn of this function.  */
7357	rtx temp;
7358	rtx seq;
7359
7360	start_sequence ();
7361	temp = expand_builtin_apply_args ();
7362	seq = get_insns ();
7363	end_sequence ();
7364
7365	apply_args_value = temp;
7366
7367	/* Put the sequence after the NOTE that starts the function.
7368	   If this is inside a SEQUENCE, make the outer-level insn
7369	   chain current, so the code is placed at the start of the
7370	   function.  */
7371	push_topmost_sequence ();
7372	emit_insns_before (seq, NEXT_INSN (get_insns ()));
7373	pop_topmost_sequence ();
7374	return temp;
7375      }
7376
7377      /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7378	 FUNCTION with a copy of the parameters described by
7379	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
7380	 allocated on the stack into which is stored all the registers
7381	 that might possibly be used for returning the result of a
7382	 function.  ARGUMENTS is the value returned by
7383	 __builtin_apply_args.  ARGSIZE is the number of bytes of
7384	 arguments that must be copied.  ??? How should this value be
7385	 computed?  We'll also need a safe worst case value for varargs
7386	 functions.  */
7387    case BUILT_IN_APPLY:
7388      if (arglist == 0
7389	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
7390	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7391	  || TREE_CHAIN (arglist) == 0
7392	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7393	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7394	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7395	return const0_rtx;
7396      else
7397	{
7398	  int i;
7399	  tree t;
7400	  rtx ops[3];
7401
7402	  for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7403	    ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7404
7405	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
7406	}
7407
7408      /* __builtin_return (RESULT) causes the function to return the
7409	 value described by RESULT.  RESULT is address of the block of
7410	 memory returned by __builtin_apply.  */
7411    case BUILT_IN_RETURN:
7412      if (arglist
7413	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
7414	  && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7415	expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7416					    NULL_RTX, VOIDmode, 0));
7417      return const0_rtx;
7418
7419    case BUILT_IN_SAVEREGS:
7420      /* Don't do __builtin_saveregs more than once in a function.
7421	 Save the result of the first call and reuse it.  */
7422      if (saveregs_value != 0)
7423	return saveregs_value;
7424      {
7425	/* When this function is called, it means that registers must be
7426	   saved on entry to this function.  So we migrate the
7427	   call to the first insn of this function.  */
7428	rtx temp;
7429	rtx seq;
7430
7431	/* Now really call the function.  `expand_call' does not call
7432	   expand_builtin, so there is no danger of infinite recursion here.  */
7433	start_sequence ();
7434
7435#ifdef EXPAND_BUILTIN_SAVEREGS
7436	/* Do whatever the machine needs done in this case.  */
7437	temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7438#else
7439	/* The register where the function returns its value
7440	   is likely to have something else in it, such as an argument.
7441	   So preserve that register around the call.  */
7442
7443	if (value_mode != VOIDmode)
7444	  {
7445	    rtx valreg = hard_libcall_value (value_mode);
7446	    rtx saved_valreg = gen_reg_rtx (value_mode);
7447
7448	    emit_move_insn (saved_valreg, valreg);
7449	    temp = expand_call (exp, target, ignore);
7450	    emit_move_insn (valreg, saved_valreg);
7451	  }
7452	else
7453	  /* Generate the call, putting the value in a pseudo.  */
7454	  temp = expand_call (exp, target, ignore);
7455#endif
7456
7457	seq = get_insns ();
7458	end_sequence ();
7459
7460	saveregs_value = temp;
7461
7462	/* Put the sequence after the NOTE that starts the function.
7463	   If this is inside a SEQUENCE, make the outer-level insn
7464	   chain current, so the code is placed at the start of the
7465	   function.  */
7466	push_topmost_sequence ();
7467	emit_insns_before (seq, NEXT_INSN (get_insns ()));
7468	pop_topmost_sequence ();
7469	return temp;
7470      }
7471
7472      /* __builtin_args_info (N) returns word N of the arg space info
7473	 for the current function.  The number and meanings of words
7474	 is controlled by the definition of CUMULATIVE_ARGS.  */
7475    case BUILT_IN_ARGS_INFO:
7476      {
7477	int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7478	int i;
7479	int *word_ptr = (int *) &current_function_args_info;
7480	tree type, elts, result;
7481
7482	if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7483	  fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7484		 __FILE__, __LINE__);
7485
7486	if (arglist != 0)
7487	  {
7488	    tree arg = TREE_VALUE (arglist);
7489	    if (TREE_CODE (arg) != INTEGER_CST)
7490	      error ("argument of `__builtin_args_info' must be constant");
7491	    else
7492	      {
7493		int wordnum = TREE_INT_CST_LOW (arg);
7494
7495		if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7496		  error ("argument of `__builtin_args_info' out of range");
7497		else
7498		  return GEN_INT (word_ptr[wordnum]);
7499	      }
7500	  }
7501	else
7502	  error ("missing argument in `__builtin_args_info'");
7503
7504	return const0_rtx;
7505
7506#if 0
7507	for (i = 0; i < nwords; i++)
7508	  elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7509
7510	type = build_array_type (integer_type_node,
7511				 build_index_type (build_int_2 (nwords, 0)));
7512	result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7513	TREE_CONSTANT (result) = 1;
7514	TREE_STATIC (result) = 1;
7515	result = build (INDIRECT_REF, build_pointer_type (type), result);
7516	TREE_CONSTANT (result) = 1;
7517	return expand_expr (result, NULL_RTX, VOIDmode, 0);
7518#endif
7519      }
7520
7521      /* Return the address of the first anonymous stack arg.  */
7522    case BUILT_IN_NEXT_ARG:
7523      {
7524	tree fntype = TREE_TYPE (current_function_decl);
7525
7526	if ((TYPE_ARG_TYPES (fntype) == 0
7527	     || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7528		 == void_type_node))
7529	    && ! current_function_varargs)
7530	  {
7531	    error ("`va_start' used in function with fixed args");
7532	    return const0_rtx;
7533	  }
7534
7535	if (arglist)
7536	  {
7537	    tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7538	    tree arg = TREE_VALUE (arglist);
7539
7540	    /* Strip off all nops for the sake of the comparison.  This
7541	       is not quite the same as STRIP_NOPS.  It does more.  */
7542	    while (TREE_CODE (arg) == NOP_EXPR
7543		   || TREE_CODE (arg) == CONVERT_EXPR
7544		   || TREE_CODE (arg) == NON_LVALUE_EXPR)
7545	      arg = TREE_OPERAND (arg, 0);
7546	    if (arg != last_parm)
7547	      warning ("second parameter of `va_start' not last named argument");
7548	  }
7549	else if (! current_function_varargs)
7550	  /* Evidently an out of date version of <stdarg.h>; can't validate
7551	     va_start's second argument, but can still work as intended.  */
7552	  warning ("`__builtin_next_arg' called without an argument");
7553      }
7554
7555      return expand_binop (Pmode, add_optab,
7556			   current_function_internal_arg_pointer,
7557			   current_function_arg_offset_rtx,
7558			   NULL_RTX, 0, OPTAB_LIB_WIDEN);
7559
7560    case BUILT_IN_CLASSIFY_TYPE:
7561      if (arglist != 0)
7562	{
7563	  tree type = TREE_TYPE (TREE_VALUE (arglist));
7564	  enum tree_code code = TREE_CODE (type);
7565	  if (code == VOID_TYPE)
7566	    return GEN_INT (void_type_class);
7567	  if (code == INTEGER_TYPE)
7568	    return GEN_INT (integer_type_class);
7569	  if (code == CHAR_TYPE)
7570	    return GEN_INT (char_type_class);
7571	  if (code == ENUMERAL_TYPE)
7572	    return GEN_INT (enumeral_type_class);
7573	  if (code == BOOLEAN_TYPE)
7574	    return GEN_INT (boolean_type_class);
7575	  if (code == POINTER_TYPE)
7576	    return GEN_INT (pointer_type_class);
7577	  if (code == REFERENCE_TYPE)
7578	    return GEN_INT (reference_type_class);
7579	  if (code == OFFSET_TYPE)
7580	    return GEN_INT (offset_type_class);
7581	  if (code == REAL_TYPE)
7582	    return GEN_INT (real_type_class);
7583	  if (code == COMPLEX_TYPE)
7584	    return GEN_INT (complex_type_class);
7585	  if (code == FUNCTION_TYPE)
7586	    return GEN_INT (function_type_class);
7587	  if (code == METHOD_TYPE)
7588	    return GEN_INT (method_type_class);
7589	  if (code == RECORD_TYPE)
7590	    return GEN_INT (record_type_class);
7591	  if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7592	    return GEN_INT (union_type_class);
7593	  if (code == ARRAY_TYPE)
7594	    {
7595	      if (TYPE_STRING_FLAG (type))
7596		return GEN_INT (string_type_class);
7597	      else
7598		return GEN_INT (array_type_class);
7599	    }
7600	  if (code == SET_TYPE)
7601	    return GEN_INT (set_type_class);
7602	  if (code == FILE_TYPE)
7603	    return GEN_INT (file_type_class);
7604	  if (code == LANG_TYPE)
7605	    return GEN_INT (lang_type_class);
7606	}
7607      return GEN_INT (no_type_class);
7608
7609    case BUILT_IN_CONSTANT_P:
7610      if (arglist == 0)
7611	return const0_rtx;
7612      else
7613	{
7614	  tree arg = TREE_VALUE (arglist);
7615
7616	  STRIP_NOPS (arg);
7617	  return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
7618		  || (TREE_CODE (arg) == ADDR_EXPR
7619		      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7620		  ? const1_rtx : const0_rtx);
7621	}
7622
7623    case BUILT_IN_FRAME_ADDRESS:
7624      /* The argument must be a nonnegative integer constant.
7625	 It counts the number of frames to scan up the stack.
7626	 The value is the address of that frame.  */
7627    case BUILT_IN_RETURN_ADDRESS:
7628      /* The argument must be a nonnegative integer constant.
7629	 It counts the number of frames to scan up the stack.
7630	 The value is the return address saved in that frame.  */
7631      if (arglist == 0)
7632	/* Warning about missing arg was already issued.  */
7633	return const0_rtx;
7634      else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7635	{
7636	  error ("invalid arg to `__builtin_return_address'");
7637	  return const0_rtx;
7638	}
7639      else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7640	{
7641	  error ("invalid arg to `__builtin_return_address'");
7642	  return const0_rtx;
7643	}
7644      else
7645	{
7646	  rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
7647						TREE_INT_CST_LOW (TREE_VALUE (arglist)),
7648						hard_frame_pointer_rtx);
7649
7650	  /* For __builtin_frame_address, return what we've got.  */
7651	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7652	    return tem;
7653
7654	  if (GET_CODE (tem) != REG)
7655	    tem = copy_to_reg (tem);
7656	  return tem;
7657	}
7658
7659    case BUILT_IN_ALLOCA:
7660      if (arglist == 0
7661	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
7662	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7663	break;
7664
7665      /* Compute the argument.  */
7666      op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7667
7668      /* Allocate the desired space.  */
7669      return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7670
7671    case BUILT_IN_FFS:
7672      /* If not optimizing, call the library function.  */
7673      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7674	break;
7675
7676      if (arglist == 0
7677	  /* Arg could be non-integer if user redeclared this fcn wrong.  */
7678	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7679	break;
7680
7681      /* Compute the argument.  */
7682      op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7683      /* Compute ffs, into TARGET if possible.
7684	 Set TARGET to wherever the result comes back.  */
7685      target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7686			    ffs_optab, op0, target, 1);
7687      if (target == 0)
7688	abort ();
7689      return target;
7690
7691    case BUILT_IN_STRLEN:
7692      /* If not optimizing, call the library function.  */
7693      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7694	break;
7695
7696      if (arglist == 0
7697	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
7698	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7699	break;
7700      else
7701	{
7702	  tree src = TREE_VALUE (arglist);
7703	  tree len = c_strlen (src);
7704
7705	  int align
7706	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7707
7708	  rtx result, src_rtx, char_rtx;
7709	  enum machine_mode insn_mode = value_mode, char_mode;
7710	  enum insn_code icode;
7711
7712	  /* If the length is known, just return it. */
7713	  if (len != 0)
7714	    return expand_expr (len, target, mode, 0);
7715
7716	  /* If SRC is not a pointer type, don't do this operation inline. */
7717	  if (align == 0)
7718	    break;
7719
7720	  /* Call a function if we can't compute strlen in the right mode. */
7721
7722	  while (insn_mode != VOIDmode)
7723	    {
7724	      icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7725	      if (icode != CODE_FOR_nothing)
7726		break;
7727
7728	      insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7729	    }
7730	  if (insn_mode == VOIDmode)
7731	    break;
7732
7733	  /* Make a place to write the result of the instruction.  */
7734	  result = target;
7735	  if (! (result != 0
7736		 && GET_CODE (result) == REG
7737		 && GET_MODE (result) == insn_mode
7738		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7739	    result = gen_reg_rtx (insn_mode);
7740
7741	  /* Make sure the operands are acceptable to the predicates.  */
7742
7743	  if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7744	    result = gen_reg_rtx (insn_mode);
7745
7746	  src_rtx = memory_address (BLKmode,
7747				    expand_expr (src, NULL_RTX, ptr_mode,
7748						 EXPAND_NORMAL));
7749	  if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7750	    src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7751
7752	  char_rtx = const0_rtx;
7753	  char_mode = insn_operand_mode[(int)icode][2];
7754	  if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7755	    char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7756
7757	  emit_insn (GEN_FCN (icode) (result,
7758				      gen_rtx (MEM, BLKmode, src_rtx),
7759				      char_rtx, GEN_INT (align)));
7760
7761	  /* Return the value in the proper mode for this function.  */
7762	  if (GET_MODE (result) == value_mode)
7763	    return result;
7764	  else if (target != 0)
7765	    {
7766	      convert_move (target, result, 0);
7767	      return target;
7768	    }
7769	  else
7770	    return convert_to_mode (value_mode, result, 0);
7771	}
7772
7773    case BUILT_IN_STRCPY:
7774      /* If not optimizing, call the library function.  */
7775      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7776	break;
7777
7778      if (arglist == 0
7779	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
7780	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7781	  || TREE_CHAIN (arglist) == 0
7782	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7783	break;
7784      else
7785	{
7786	  tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7787
7788	  if (len == 0)
7789	    break;
7790
7791	  len = size_binop (PLUS_EXPR, len, integer_one_node);
7792
7793	  chainon (arglist, build_tree_list (NULL_TREE, len));
7794	}
7795
7796      /* Drops in.  */
7797    case BUILT_IN_MEMCPY:
7798      /* If not optimizing, call the library function.  */
7799      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7800	break;
7801
7802      if (arglist == 0
7803	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
7804	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7805	  || TREE_CHAIN (arglist) == 0
7806	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7807	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7808	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7809	break;
7810      else
7811	{
7812	  tree dest = TREE_VALUE (arglist);
7813	  tree src = TREE_VALUE (TREE_CHAIN (arglist));
7814	  tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7815	  tree type;
7816
7817	  int src_align
7818	    = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7819	  int dest_align
7820	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7821	  rtx dest_rtx, dest_mem, src_mem;
7822
7823	  /* If either SRC or DEST is not a pointer type, don't do
7824	     this operation in-line.  */
7825	  if (src_align == 0 || dest_align == 0)
7826	    {
7827	      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7828		TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7829	      break;
7830	    }
7831
7832	  dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
7833	  dest_mem = gen_rtx (MEM, BLKmode,
7834			      memory_address (BLKmode, dest_rtx));
7835	  /* There could be a void* cast on top of the object.  */
7836	  while (TREE_CODE (dest) == NOP_EXPR)
7837	    dest = TREE_OPERAND (dest, 0);
7838	  type = TREE_TYPE (TREE_TYPE (dest));
7839	  MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
7840	  src_mem = gen_rtx (MEM, BLKmode,
7841			     memory_address (BLKmode,
7842					     expand_expr (src, NULL_RTX,
7843							  ptr_mode,
7844							  EXPAND_SUM)));
7845	  /* There could be a void* cast on top of the object.  */
7846	  while (TREE_CODE (src) == NOP_EXPR)
7847	    src = TREE_OPERAND (src, 0);
7848	  type = TREE_TYPE (TREE_TYPE (src));
7849	  MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
7850
7851	  /* Copy word part most expediently.  */
7852	  emit_block_move (dest_mem, src_mem,
7853			   expand_expr (len, NULL_RTX, VOIDmode, 0),
7854			   MIN (src_align, dest_align));
7855	  return force_operand (dest_rtx, NULL_RTX);
7856	}
7857
7858/* These comparison functions need an instruction that returns an actual
7859   index.  An ordinary compare that just sets the condition codes
7860   is not enough.  */
7861#ifdef HAVE_cmpstrsi
7862    case BUILT_IN_STRCMP:
7863      /* If not optimizing, call the library function.  */
7864      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7865	break;
7866
7867      if (arglist == 0
7868	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
7869	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7870	  || TREE_CHAIN (arglist) == 0
7871	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7872	break;
7873      else if (!HAVE_cmpstrsi)
7874	break;
7875      {
7876	tree arg1 = TREE_VALUE (arglist);
7877	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7878	tree offset;
7879	tree len, len2;
7880
7881	len = c_strlen (arg1);
7882	if (len)
7883	  len = size_binop (PLUS_EXPR, integer_one_node, len);
7884	len2 = c_strlen (arg2);
7885	if (len2)
7886	  len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7887
7888	/* If we don't have a constant length for the first, use the length
7889	   of the second, if we know it.  We don't require a constant for
7890	   this case; some cost analysis could be done if both are available
7891	   but neither is constant.  For now, assume they're equally cheap.
7892
7893	   If both strings have constant lengths, use the smaller.  This
7894	   could arise if optimization results in strcpy being called with
7895	   two fixed strings, or if the code was machine-generated.  We should
7896	   add some code to the `memcmp' handler below to deal with such
7897	   situations, someday.  */
7898	if (!len || TREE_CODE (len) != INTEGER_CST)
7899	  {
7900	    if (len2)
7901	      len = len2;
7902	    else if (len == 0)
7903	      break;
7904	  }
7905	else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7906	  {
7907	    if (tree_int_cst_lt (len2, len))
7908	      len = len2;
7909	  }
7910
7911	chainon (arglist, build_tree_list (NULL_TREE, len));
7912      }
7913
7914      /* Drops in.  */
7915    case BUILT_IN_MEMCMP:
7916      /* If not optimizing, call the library function.  */
7917      if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7918	break;
7919
7920      if (arglist == 0
7921	  /* Arg could be non-pointer if user redeclared this fcn wrong.  */
7922	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7923	  || TREE_CHAIN (arglist) == 0
7924	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7925	  || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7926	  || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7927	break;
7928      else if (!HAVE_cmpstrsi)
7929	break;
7930      {
7931	tree arg1 = TREE_VALUE (arglist);
7932	tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7933	tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7934	rtx result;
7935
7936	int arg1_align
7937	  = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7938	int arg2_align
7939	  = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7940	enum machine_mode insn_mode
7941	  = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7942
7943	/* If we don't have POINTER_TYPE, call the function.  */
7944	if (arg1_align == 0 || arg2_align == 0)
7945	  {
7946	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7947	      TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7948	    break;
7949	  }
7950
7951	/* Make a place to write the result of the instruction.  */
7952	result = target;
7953	if (! (result != 0
7954	       && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7955	       && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7956	  result = gen_reg_rtx (insn_mode);
7957
7958	emit_insn (gen_cmpstrsi (result,
7959				 gen_rtx (MEM, BLKmode,
7960					  expand_expr (arg1, NULL_RTX,
7961						       ptr_mode,
7962						       EXPAND_NORMAL)),
7963				 gen_rtx (MEM, BLKmode,
7964					  expand_expr (arg2, NULL_RTX,
7965						       ptr_mode,
7966						       EXPAND_NORMAL)),
7967				 expand_expr (len, NULL_RTX, VOIDmode, 0),
7968				 GEN_INT (MIN (arg1_align, arg2_align))));
7969
7970	/* Return the value in the proper mode for this function.  */
7971	mode = TYPE_MODE (TREE_TYPE (exp));
7972	if (GET_MODE (result) == mode)
7973	  return result;
7974	else if (target != 0)
7975	  {
7976	    convert_move (target, result, 0);
7977	    return target;
7978	  }
7979	else
7980	  return convert_to_mode (mode, result, 0);
7981      }
7982#else
7983    case BUILT_IN_STRCMP:
7984    case BUILT_IN_MEMCMP:
7985      break;
7986#endif
7987
7988    default:			/* just do library call, if unknown builtin */
7989      error ("built-in function `%s' not currently supported",
7990	     IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7991    }
7992
7993  /* The switch statement above can drop through to cause the function
7994     to be called normally.  */
7995
7996  return expand_call (exp, target, ignore);
7997}
7998
7999/* Built-in functions to perform an untyped call and return.  */
8000
8001/* For each register that may be used for calling a function, this
8002   gives a mode used to copy the register's value.  VOIDmode indicates
8003   the register is not used for calling a function.  If the machine
8004   has register windows, this gives only the outbound registers.
8005   INCOMING_REGNO gives the corresponding inbound register.  */
8006static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8007
8008/* For each register that may be used for returning values, this gives
8009   a mode used to copy the register's value.  VOIDmode indicates the
8010   register is not used for returning values.  If the machine has
8011   register windows, this gives only the outbound registers.
8012   INCOMING_REGNO gives the corresponding inbound register.  */
8013static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8014
8015/* For each register that may be used for calling a function, this
8016   gives the offset of that register into the block returned by
8017   __builtin_apply_args.  0 indicates that the register is not
8018   used for calling a function. */
8019static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8020
8021/* Return the offset of register REGNO into the block returned by
8022   __builtin_apply_args.  This is not declared static, since it is
8023   needed in objc-act.c. */
8024
8025int
8026apply_args_register_offset (regno)
8027     int regno;
8028{
8029  apply_args_size ();
8030
8031  /* Arguments are always put in outgoing registers (in the argument
8032     block) if such make sense. */
8033#ifdef OUTGOING_REGNO
8034  regno = OUTGOING_REGNO(regno);
8035#endif
8036  return apply_args_reg_offset[regno];
8037}
8038
8039/* Return the size required for the block returned by __builtin_apply_args,
8040   and initialize apply_args_mode.  */
8041
8042static int
8043apply_args_size ()
8044{
8045  static int size = -1;
8046  int align, regno;
8047  enum machine_mode mode;
8048
8049  /* The values computed by this function never change.  */
8050  if (size < 0)
8051    {
8052      /* The first value is the incoming arg-pointer.  */
8053      size = GET_MODE_SIZE (Pmode);
8054
8055      /* The second value is the structure value address unless this is
8056	 passed as an "invisible" first argument.  */
8057      if (struct_value_rtx)
8058	size += GET_MODE_SIZE (Pmode);
8059
8060      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8061	if (FUNCTION_ARG_REGNO_P (regno))
8062	  {
8063	    /* Search for the proper mode for copying this register's
8064	       value.  I'm not sure this is right, but it works so far.  */
8065	    enum machine_mode best_mode = VOIDmode;
8066
8067	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8068		 mode != VOIDmode;
8069		 mode = GET_MODE_WIDER_MODE (mode))
8070	      if (HARD_REGNO_MODE_OK (regno, mode)
8071		  && HARD_REGNO_NREGS (regno, mode) == 1)
8072		best_mode = mode;
8073
8074	    if (best_mode == VOIDmode)
8075	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8076		   mode != VOIDmode;
8077		   mode = GET_MODE_WIDER_MODE (mode))
8078		if (HARD_REGNO_MODE_OK (regno, mode)
8079		    && (mov_optab->handlers[(int) mode].insn_code
8080			!= CODE_FOR_nothing))
8081		  best_mode = mode;
8082
8083	    mode = best_mode;
8084	    if (mode == VOIDmode)
8085	      abort ();
8086
8087	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8088	    if (size % align != 0)
8089	      size = CEIL (size, align) * align;
8090	    apply_args_reg_offset[regno] = size;
8091	    size += GET_MODE_SIZE (mode);
8092	    apply_args_mode[regno] = mode;
8093	  }
8094	else
8095	  {
8096	    apply_args_mode[regno] = VOIDmode;
8097	    apply_args_reg_offset[regno] = 0;
8098	  }
8099    }
8100  return size;
8101}
8102
8103/* Return the size required for the block returned by __builtin_apply,
8104   and initialize apply_result_mode.  */
8105
8106static int
8107apply_result_size ()
8108{
8109  static int size = -1;
8110  int align, regno;
8111  enum machine_mode mode;
8112
8113  /* The values computed by this function never change.  */
8114  if (size < 0)
8115    {
8116      size = 0;
8117
8118      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8119	if (FUNCTION_VALUE_REGNO_P (regno))
8120	  {
8121	    /* Search for the proper mode for copying this register's
8122	       value.  I'm not sure this is right, but it works so far.  */
8123	    enum machine_mode best_mode = VOIDmode;
8124
8125	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8126		 mode != TImode;
8127		 mode = GET_MODE_WIDER_MODE (mode))
8128	      if (HARD_REGNO_MODE_OK (regno, mode))
8129		best_mode = mode;
8130
8131	    if (best_mode == VOIDmode)
8132	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8133		   mode != VOIDmode;
8134		   mode = GET_MODE_WIDER_MODE (mode))
8135		if (HARD_REGNO_MODE_OK (regno, mode)
8136		    && (mov_optab->handlers[(int) mode].insn_code
8137			!= CODE_FOR_nothing))
8138		  best_mode = mode;
8139
8140	    mode = best_mode;
8141	    if (mode == VOIDmode)
8142	      abort ();
8143
8144	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8145	    if (size % align != 0)
8146	      size = CEIL (size, align) * align;
8147	    size += GET_MODE_SIZE (mode);
8148	    apply_result_mode[regno] = mode;
8149	  }
8150	else
8151	  apply_result_mode[regno] = VOIDmode;
8152
8153      /* Allow targets that use untyped_call and untyped_return to override
8154	 the size so that machine-specific information can be stored here.  */
8155#ifdef APPLY_RESULT_SIZE
8156      size = APPLY_RESULT_SIZE;
8157#endif
8158    }
8159  return size;
8160}
8161
8162#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8163/* Create a vector describing the result block RESULT.  If SAVEP is true,
8164   the result block is used to save the values; otherwise it is used to
8165   restore the values.  */
8166
8167static rtx
8168result_vector (savep, result)
8169     int savep;
8170     rtx result;
8171{
8172  int regno, size, align, nelts;
8173  enum machine_mode mode;
8174  rtx reg, mem;
8175  rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8176
8177  size = nelts = 0;
8178  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8179    if ((mode = apply_result_mode[regno]) != VOIDmode)
8180      {
8181	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8182	if (size % align != 0)
8183	  size = CEIL (size, align) * align;
8184	reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8185	mem = change_address (result, mode,
8186			      plus_constant (XEXP (result, 0), size));
8187	savevec[nelts++] = (savep
8188			    ? gen_rtx (SET, VOIDmode, mem, reg)
8189			    : gen_rtx (SET, VOIDmode, reg, mem));
8190	size += GET_MODE_SIZE (mode);
8191      }
8192  return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8193}
8194#endif /* HAVE_untyped_call or HAVE_untyped_return */
8195
8196/* Save the state required to perform an untyped call with the same
8197   arguments as were passed to the current function.  */
8198
8199static rtx
8200expand_builtin_apply_args ()
8201{
8202  rtx registers;
8203  int size, align, regno;
8204  enum machine_mode mode;
8205
8206  /* Create a block where the arg-pointer, structure value address,
8207     and argument registers can be saved.  */
8208  registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8209
8210  /* Walk past the arg-pointer and structure value address.  */
8211  size = GET_MODE_SIZE (Pmode);
8212  if (struct_value_rtx)
8213    size += GET_MODE_SIZE (Pmode);
8214
8215  /* Save each register used in calling a function to the block.  */
8216  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8217    if ((mode = apply_args_mode[regno]) != VOIDmode)
8218      {
8219	rtx tem;
8220
8221	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8222	if (size % align != 0)
8223	  size = CEIL (size, align) * align;
8224
8225	tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8226
8227#ifdef STACK_REGS
8228        /* For reg-stack.c's stack register household.
8229	   Compare with a similar piece of code in function.c.  */
8230
8231        emit_insn (gen_rtx (USE, mode, tem));
8232#endif
8233
8234	emit_move_insn (change_address (registers, mode,
8235					plus_constant (XEXP (registers, 0),
8236						       size)),
8237			tem);
8238	size += GET_MODE_SIZE (mode);
8239      }
8240
8241  /* Save the arg pointer to the block.  */
8242  emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8243		  copy_to_reg (virtual_incoming_args_rtx));
8244  size = GET_MODE_SIZE (Pmode);
8245
8246  /* Save the structure value address unless this is passed as an
8247     "invisible" first argument.  */
8248  if (struct_value_incoming_rtx)
8249    {
8250      emit_move_insn (change_address (registers, Pmode,
8251				      plus_constant (XEXP (registers, 0),
8252						     size)),
8253		      copy_to_reg (struct_value_incoming_rtx));
8254      size += GET_MODE_SIZE (Pmode);
8255    }
8256
8257  /* Return the address of the block.  */
8258  return copy_addr_to_reg (XEXP (registers, 0));
8259}
8260
8261/* Perform an untyped call and save the state required to perform an
8262   untyped return of whatever value was returned by the given function.  */
8263
8264static rtx
8265expand_builtin_apply (function, arguments, argsize)
8266     rtx function, arguments, argsize;
8267{
8268  int size, align, regno;
8269  enum machine_mode mode;
8270  rtx incoming_args, result, reg, dest, call_insn;
8271  rtx old_stack_level = 0;
8272  rtx call_fusage = 0;
8273
8274  /* Create a block where the return registers can be saved.  */
8275  result = assign_stack_local (BLKmode, apply_result_size (), -1);
8276
8277  /* ??? The argsize value should be adjusted here.  */
8278
8279  /* Fetch the arg pointer from the ARGUMENTS block.  */
8280  incoming_args = gen_reg_rtx (Pmode);
8281  emit_move_insn (incoming_args,
8282		  gen_rtx (MEM, Pmode, arguments));
8283#ifndef STACK_GROWS_DOWNWARD
8284  incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8285				incoming_args, 0, OPTAB_LIB_WIDEN);
8286#endif
8287
8288  /* Perform postincrements before actually calling the function.  */
8289  emit_queue ();
8290
8291  /* Push a new argument block and copy the arguments.  */
8292  do_pending_stack_adjust ();
8293  emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8294
8295  /* Push a block of memory onto the stack to store the memory arguments.
8296     Save the address in a register, and copy the memory arguments.  ??? I
8297     haven't figured out how the calling convention macros effect this,
8298     but it's likely that the source and/or destination addresses in
8299     the block copy will need updating in machine specific ways.  */
8300  dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8301  emit_block_move (gen_rtx (MEM, BLKmode, dest),
8302		   gen_rtx (MEM, BLKmode, incoming_args),
8303		   argsize,
8304		   PARM_BOUNDARY / BITS_PER_UNIT);
8305
8306  /* Refer to the argument block.  */
8307  apply_args_size ();
8308  arguments = gen_rtx (MEM, BLKmode, arguments);
8309
8310  /* Walk past the arg-pointer and structure value address.  */
8311  size = GET_MODE_SIZE (Pmode);
8312  if (struct_value_rtx)
8313    size += GET_MODE_SIZE (Pmode);
8314
8315  /* Restore each of the registers previously saved.  Make USE insns
8316     for each of these registers for use in making the call.  */
8317  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8318    if ((mode = apply_args_mode[regno]) != VOIDmode)
8319      {
8320	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8321	if (size % align != 0)
8322	  size = CEIL (size, align) * align;
8323	reg = gen_rtx (REG, mode, regno);
8324	emit_move_insn (reg,
8325			change_address (arguments, mode,
8326					plus_constant (XEXP (arguments, 0),
8327						       size)));
8328
8329	use_reg (&call_fusage, reg);
8330	size += GET_MODE_SIZE (mode);
8331      }
8332
8333  /* Restore the structure value address unless this is passed as an
8334     "invisible" first argument.  */
8335  size = GET_MODE_SIZE (Pmode);
8336  if (struct_value_rtx)
8337    {
8338      rtx value = gen_reg_rtx (Pmode);
8339      emit_move_insn (value,
8340		      change_address (arguments, Pmode,
8341				      plus_constant (XEXP (arguments, 0),
8342						     size)));
8343      emit_move_insn (struct_value_rtx, value);
8344      if (GET_CODE (struct_value_rtx) == REG)
8345	  use_reg (&call_fusage, struct_value_rtx);
8346      size += GET_MODE_SIZE (Pmode);
8347    }
8348
8349  /* All arguments and registers used for the call are set up by now!  */
8350  function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8351
8352  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
8353     and we don't want to load it into a register as an optimization,
8354     because prepare_call_address already did it if it should be done.  */
8355  if (GET_CODE (function) != SYMBOL_REF)
8356    function = memory_address (FUNCTION_MODE, function);
8357
8358  /* Generate the actual call instruction and save the return value.  */
8359#ifdef HAVE_untyped_call
8360  if (HAVE_untyped_call)
8361    emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8362				      result, result_vector (1, result)));
8363  else
8364#endif
8365#ifdef HAVE_call_value
8366  if (HAVE_call_value)
8367    {
8368      rtx valreg = 0;
8369
8370      /* Locate the unique return register.  It is not possible to
8371	 express a call that sets more than one return register using
8372	 call_value; use untyped_call for that.  In fact, untyped_call
8373	 only needs to save the return registers in the given block.  */
8374      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8375	if ((mode = apply_result_mode[regno]) != VOIDmode)
8376	  {
8377	    if (valreg)
8378	      abort (); /* HAVE_untyped_call required.  */
8379	    valreg = gen_rtx (REG, mode, regno);
8380	  }
8381
8382      emit_call_insn (gen_call_value (valreg,
8383				      gen_rtx (MEM, FUNCTION_MODE, function),
8384				      const0_rtx, NULL_RTX, const0_rtx));
8385
8386      emit_move_insn (change_address (result, GET_MODE (valreg),
8387				      XEXP (result, 0)),
8388		      valreg);
8389    }
8390  else
8391#endif
8392    abort ();
8393
8394  /* Find the CALL insn we just emitted.  */
8395  for (call_insn = get_last_insn ();
8396       call_insn && GET_CODE (call_insn) != CALL_INSN;
8397       call_insn = PREV_INSN (call_insn))
8398    ;
8399
8400  if (! call_insn)
8401    abort ();
8402
8403  /* Put the register usage information on the CALL.  If there is already
8404     some usage information, put ours at the end.  */
8405  if (CALL_INSN_FUNCTION_USAGE (call_insn))
8406    {
8407      rtx link;
8408
8409      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8410	   link = XEXP (link, 1))
8411	;
8412
8413      XEXP (link, 1) = call_fusage;
8414    }
8415  else
8416    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8417
8418  /* Restore the stack.  */
8419  emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8420
8421  /* Return the address of the result block.  */
8422  return copy_addr_to_reg (XEXP (result, 0));
8423}
8424
8425/* Perform an untyped return.  */
8426
8427static void
8428expand_builtin_return (result)
8429     rtx result;
8430{
8431  int size, align, regno;
8432  enum machine_mode mode;
8433  rtx reg;
8434  rtx call_fusage = 0;
8435
8436  apply_result_size ();
8437  result = gen_rtx (MEM, BLKmode, result);
8438
8439#ifdef HAVE_untyped_return
8440  if (HAVE_untyped_return)
8441    {
8442      emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8443      emit_barrier ();
8444      return;
8445    }
8446#endif
8447
8448  /* Restore the return value and note that each value is used.  */
8449  size = 0;
8450  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8451    if ((mode = apply_result_mode[regno]) != VOIDmode)
8452      {
8453	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8454	if (size % align != 0)
8455	  size = CEIL (size, align) * align;
8456	reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8457	emit_move_insn (reg,
8458			change_address (result, mode,
8459					plus_constant (XEXP (result, 0),
8460						       size)));
8461
8462	push_to_sequence (call_fusage);
8463	emit_insn (gen_rtx (USE, VOIDmode, reg));
8464	call_fusage = get_insns ();
8465	end_sequence ();
8466	size += GET_MODE_SIZE (mode);
8467      }
8468
8469  /* Put the USE insns before the return.  */
8470  emit_insns (call_fusage);
8471
8472  /* Return whatever values was restored by jumping directly to the end
8473     of the function.  */
8474  expand_null_return ();
8475}
8476
8477/* Expand code for a post- or pre- increment or decrement
8478   and return the RTX for the result.
8479   POST is 1 for postinc/decrements and 0 for preinc/decrements.  */
8480
8481static rtx
8482expand_increment (exp, post)
8483     register tree exp;
8484     int post;
8485{
8486  register rtx op0, op1;
8487  register rtx temp, value;
8488  register tree incremented = TREE_OPERAND (exp, 0);
8489  optab this_optab = add_optab;
8490  int icode;
8491  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8492  int op0_is_copy = 0;
8493  int single_insn = 0;
8494  /* 1 means we can't store into OP0 directly,
8495     because it is a subreg narrower than a word,
8496     and we don't dare clobber the rest of the word.  */
8497  int bad_subreg = 0;
8498
8499  if (output_bytecode)
8500    {
8501      bc_expand_expr (exp);
8502      return NULL_RTX;
8503    }
8504
8505  /* Stabilize any component ref that might need to be
8506     evaluated more than once below.  */
8507  if (!post
8508      || TREE_CODE (incremented) == BIT_FIELD_REF
8509      || (TREE_CODE (incremented) == COMPONENT_REF
8510	  && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8511	      || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8512    incremented = stabilize_reference (incremented);
8513  /* Nested *INCREMENT_EXPRs can happen in C++.  We must force innermost
8514     ones into save exprs so that they don't accidentally get evaluated
8515     more than once by the code below.  */
8516  if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8517      || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8518    incremented = save_expr (incremented);
8519
8520  /* Compute the operands as RTX.
8521     Note whether OP0 is the actual lvalue or a copy of it:
8522     I believe it is a copy iff it is a register or subreg
8523     and insns were generated in computing it.   */
8524
8525  temp = get_last_insn ();
8526  op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8527
8528  /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8529     in place but instead must do sign- or zero-extension during assignment,
8530     so we copy it into a new register and let the code below use it as
8531     a copy.
8532
8533     Note that we can safely modify this SUBREG since it is know not to be
8534     shared (it was made by the expand_expr call above).  */
8535
8536  if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8537    {
8538      if (post)
8539	SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8540      else
8541	bad_subreg = 1;
8542    }
8543  else if (GET_CODE (op0) == SUBREG
8544	   && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8545    {
8546      /* We cannot increment this SUBREG in place.  If we are
8547	 post-incrementing, get a copy of the old value.  Otherwise,
8548	 just mark that we cannot increment in place.  */
8549      if (post)
8550	op0 = copy_to_reg (op0);
8551      else
8552	bad_subreg = 1;
8553    }
8554
8555  op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8556		 && temp != get_last_insn ());
8557  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8558
8559  /* Decide whether incrementing or decrementing.  */
8560  if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8561      || TREE_CODE (exp) == PREDECREMENT_EXPR)
8562    this_optab = sub_optab;
8563
8564  /* Convert decrement by a constant into a negative increment.  */
8565  if (this_optab == sub_optab
8566      && GET_CODE (op1) == CONST_INT)
8567    {
8568      op1 = GEN_INT (- INTVAL (op1));
8569      this_optab = add_optab;
8570    }
8571
8572  /* For a preincrement, see if we can do this with a single instruction.  */
8573  if (!post)
8574    {
8575      icode = (int) this_optab->handlers[(int) mode].insn_code;
8576      if (icode != (int) CODE_FOR_nothing
8577	  /* Make sure that OP0 is valid for operands 0 and 1
8578	     of the insn we want to queue.  */
8579	  && (*insn_operand_predicate[icode][0]) (op0, mode)
8580	  && (*insn_operand_predicate[icode][1]) (op0, mode)
8581	  && (*insn_operand_predicate[icode][2]) (op1, mode))
8582	single_insn = 1;
8583    }
8584
8585  /* If OP0 is not the actual lvalue, but rather a copy in a register,
8586     then we cannot just increment OP0.  We must therefore contrive to
8587     increment the original value.  Then, for postincrement, we can return
8588     OP0 since it is a copy of the old value.  For preincrement, expand here
8589     unless we can do it with a single insn.
8590
8591     Likewise if storing directly into OP0 would clobber high bits
8592     we need to preserve (bad_subreg).  */
8593  if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8594    {
8595      /* This is the easiest way to increment the value wherever it is.
8596	 Problems with multiple evaluation of INCREMENTED are prevented
8597	 because either (1) it is a component_ref or preincrement,
8598	 in which case it was stabilized above, or (2) it is an array_ref
8599	 with constant index in an array in a register, which is
8600	 safe to reevaluate.  */
8601      tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8602			     || TREE_CODE (exp) == PREDECREMENT_EXPR)
8603			    ? MINUS_EXPR : PLUS_EXPR),
8604			   TREE_TYPE (exp),
8605			   incremented,
8606			   TREE_OPERAND (exp, 1));
8607
8608      while (TREE_CODE (incremented) == NOP_EXPR
8609	     || TREE_CODE (incremented) == CONVERT_EXPR)
8610	{
8611	  newexp = convert (TREE_TYPE (incremented), newexp);
8612	  incremented = TREE_OPERAND (incremented, 0);
8613	}
8614
8615      temp = expand_assignment (incremented, newexp, ! post, 0);
8616      return post ? op0 : temp;
8617    }
8618
8619  if (post)
8620    {
8621      /* We have a true reference to the value in OP0.
8622	 If there is an insn to add or subtract in this mode, queue it.
8623	 Queueing the increment insn avoids the register shuffling
8624	 that often results if we must increment now and first save
8625	 the old value for subsequent use.  */
8626
8627#if 0  /* Turned off to avoid making extra insn for indexed memref.  */
8628      op0 = stabilize (op0);
8629#endif
8630
8631      icode = (int) this_optab->handlers[(int) mode].insn_code;
8632      if (icode != (int) CODE_FOR_nothing
8633	  /* Make sure that OP0 is valid for operands 0 and 1
8634	     of the insn we want to queue.  */
8635	  && (*insn_operand_predicate[icode][0]) (op0, mode)
8636	  && (*insn_operand_predicate[icode][1]) (op0, mode))
8637	{
8638	  if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8639	    op1 = force_reg (mode, op1);
8640
8641	  return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8642	}
8643    }
8644
8645  /* Preincrement, or we can't increment with one simple insn.  */
8646  if (post)
8647    /* Save a copy of the value before inc or dec, to return it later.  */
8648    temp = value = copy_to_reg (op0);
8649  else
8650    /* Arrange to return the incremented value.  */
8651    /* Copy the rtx because expand_binop will protect from the queue,
8652       and the results of that would be invalid for us to return
8653       if our caller does emit_queue before using our result.  */
8654    temp = copy_rtx (value = op0);
8655
8656  /* Increment however we can.  */
8657  op1 = expand_binop (mode, this_optab, value, op1, op0,
8658		      TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8659  /* Make sure the value is stored into OP0.  */
8660  if (op1 != op0)
8661    emit_move_insn (op0, op1);
8662
8663  return temp;
8664}
8665
8666/* Expand all function calls contained within EXP, innermost ones first.
8667   But don't look within expressions that have sequence points.
8668   For each CALL_EXPR, record the rtx for its value
8669   in the CALL_EXPR_RTL field.  */
8670
8671static void
8672preexpand_calls (exp)
8673     tree exp;
8674{
8675  register int nops, i;
8676  int type = TREE_CODE_CLASS (TREE_CODE (exp));
8677
8678  if (! do_preexpand_calls)
8679    return;
8680
8681  /* Only expressions and references can contain calls.  */
8682
8683  if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8684    return;
8685
8686  switch (TREE_CODE (exp))
8687    {
8688    case CALL_EXPR:
8689      /* Do nothing if already expanded.  */
8690      if (CALL_EXPR_RTL (exp) != 0)
8691	return;
8692
8693      /* Do nothing to built-in functions.  */
8694      if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8695	  || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8696	  || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8697	  /* Do nothing if the call returns a variable-sized object.  */
8698	  || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8699	CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8700      return;
8701
8702    case COMPOUND_EXPR:
8703    case COND_EXPR:
8704    case TRUTH_ANDIF_EXPR:
8705    case TRUTH_ORIF_EXPR:
8706      /* If we find one of these, then we can be sure
8707	 the adjust will be done for it (since it makes jumps).
8708	 Do it now, so that if this is inside an argument
8709	 of a function, we don't get the stack adjustment
8710	 after some other args have already been pushed.  */
8711      do_pending_stack_adjust ();
8712      return;
8713
8714    case BLOCK:
8715    case RTL_EXPR:
8716    case WITH_CLEANUP_EXPR:
8717    case CLEANUP_POINT_EXPR:
8718      return;
8719
8720    case SAVE_EXPR:
8721      if (SAVE_EXPR_RTL (exp) != 0)
8722	return;
8723    }
8724
8725  nops = tree_code_length[(int) TREE_CODE (exp)];
8726  for (i = 0; i < nops; i++)
8727    if (TREE_OPERAND (exp, i) != 0)
8728      {
8729	type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8730	if (type == 'e' || type == '<' || type == '1' || type == '2'
8731	    || type == 'r')
8732	  preexpand_calls (TREE_OPERAND (exp, i));
8733      }
8734}
8735
8736/* At the start of a function, record that we have no previously-pushed
8737   arguments waiting to be popped.  */
8738
8739void
8740init_pending_stack_adjust ()
8741{
8742  pending_stack_adjust = 0;
8743}
8744
8745/* When exiting from function, if safe, clear out any pending stack adjust
8746   so the adjustment won't get done.  */
8747
8748void
8749clear_pending_stack_adjust ()
8750{
8751#ifdef EXIT_IGNORE_STACK
8752  if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8753      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8754      && ! flag_inline_functions)
8755    pending_stack_adjust = 0;
8756#endif
8757}
8758
8759/* Pop any previously-pushed arguments that have not been popped yet.  */
8760
8761void
8762do_pending_stack_adjust ()
8763{
8764  if (inhibit_defer_pop == 0)
8765    {
8766      if (pending_stack_adjust != 0)
8767	adjust_stack (GEN_INT (pending_stack_adjust));
8768      pending_stack_adjust = 0;
8769    }
8770}
8771
8772/* Defer the expansion all cleanups up to OLD_CLEANUPS.
8773   Returns the cleanups to be performed.  */
8774
8775static tree
8776defer_cleanups_to (old_cleanups)
8777     tree old_cleanups;
8778{
8779  tree new_cleanups = NULL_TREE;
8780  tree cleanups = cleanups_this_call;
8781  tree last = NULL_TREE;
8782
8783  while (cleanups_this_call != old_cleanups)
8784    {
8785      (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8786      last = cleanups_this_call;
8787      cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8788    }
8789
8790  if (last)
8791    {
8792      /* Remove the list from the chain of cleanups.  */
8793      TREE_CHAIN (last) = NULL_TREE;
8794
8795      /* reverse them so that we can build them in the right order.  */
8796      cleanups = nreverse (cleanups);
8797
8798      while (cleanups)
8799	{
8800	  if (new_cleanups)
8801	    new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8802				  TREE_VALUE (cleanups), new_cleanups);
8803	  else
8804	    new_cleanups = TREE_VALUE (cleanups);
8805
8806	  cleanups = TREE_CHAIN (cleanups);
8807	}
8808    }
8809
8810  return new_cleanups;
8811}
8812
8813/* Expand all cleanups up to OLD_CLEANUPS.
8814   Needed here, and also for language-dependent calls.  */
8815
8816void
8817expand_cleanups_to (old_cleanups)
8818     tree old_cleanups;
8819{
8820  while (cleanups_this_call != old_cleanups)
8821    {
8822      (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8823      expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8824      cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8825    }
8826}
8827
8828/* Expand conditional expressions.  */
8829
8830/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8831   LABEL is an rtx of code CODE_LABEL, in this function and all the
8832   functions here.  */
8833
8834void
8835jumpifnot (exp, label)
8836     tree exp;
8837     rtx label;
8838{
8839  do_jump (exp, label, NULL_RTX);
8840}
8841
8842/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
8843
8844void
8845jumpif (exp, label)
8846     tree exp;
8847     rtx label;
8848{
8849  do_jump (exp, NULL_RTX, label);
8850}
8851
8852/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8853   the result is zero, or IF_TRUE_LABEL if the result is one.
8854   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8855   meaning fall through in that case.
8856
8857   do_jump always does any pending stack adjust except when it does not
8858   actually perform a jump.  An example where there is no jump
8859   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8860
8861   This function is responsible for optimizing cases such as
8862   &&, || and comparison operators in EXP.  */
8863
8864void
8865do_jump (exp, if_false_label, if_true_label)
8866     tree exp;
8867     rtx if_false_label, if_true_label;
8868{
8869  register enum tree_code code = TREE_CODE (exp);
8870  /* Some cases need to create a label to jump to
8871     in order to properly fall through.
8872     These cases set DROP_THROUGH_LABEL nonzero.  */
8873  rtx drop_through_label = 0;
8874  rtx temp;
8875  rtx comparison = 0;
8876  int i;
8877  tree type;
8878  enum machine_mode mode;
8879
8880  emit_queue ();
8881
8882  switch (code)
8883    {
8884    case ERROR_MARK:
8885      break;
8886
8887    case INTEGER_CST:
8888      temp = integer_zerop (exp) ? if_false_label : if_true_label;
8889      if (temp)
8890	emit_jump (temp);
8891      break;
8892
8893#if 0
8894      /* This is not true with #pragma weak  */
8895    case ADDR_EXPR:
8896      /* The address of something can never be zero.  */
8897      if (if_true_label)
8898	emit_jump (if_true_label);
8899      break;
8900#endif
8901
8902    case NOP_EXPR:
8903      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8904	  || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8905	  || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8906	goto normal;
8907    case CONVERT_EXPR:
8908      /* If we are narrowing the operand, we have to do the compare in the
8909	 narrower mode.  */
8910      if ((TYPE_PRECISION (TREE_TYPE (exp))
8911	   < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8912	goto normal;
8913    case NON_LVALUE_EXPR:
8914    case REFERENCE_EXPR:
8915    case ABS_EXPR:
8916    case NEGATE_EXPR:
8917    case LROTATE_EXPR:
8918    case RROTATE_EXPR:
8919      /* These cannot change zero->non-zero or vice versa.  */
8920      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8921      break;
8922
8923#if 0
8924      /* This is never less insns than evaluating the PLUS_EXPR followed by
8925	 a test and can be longer if the test is eliminated.  */
8926    case PLUS_EXPR:
8927      /* Reduce to minus.  */
8928      exp = build (MINUS_EXPR, TREE_TYPE (exp),
8929		   TREE_OPERAND (exp, 0),
8930		   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8931				 TREE_OPERAND (exp, 1))));
8932      /* Process as MINUS.  */
8933#endif
8934
8935    case MINUS_EXPR:
8936      /* Non-zero iff operands of minus differ.  */
8937      comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8938				   TREE_OPERAND (exp, 0),
8939				   TREE_OPERAND (exp, 1)),
8940			    NE, NE);
8941      break;
8942
8943    case BIT_AND_EXPR:
8944      /* If we are AND'ing with a small constant, do this comparison in the
8945	 smallest type that fits.  If the machine doesn't have comparisons
8946	 that small, it will be converted back to the wider comparison.
8947	 This helps if we are testing the sign bit of a narrower object.
8948	 combine can't do this for us because it can't know whether a
8949	 ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
8950
8951      if (! SLOW_BYTE_ACCESS
8952	  && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8953	  && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8954	  && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8955	  && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8956	  && (type = type_for_mode (mode, 1)) != 0
8957	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8958	  && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8959	      != CODE_FOR_nothing))
8960	{
8961	  do_jump (convert (type, exp), if_false_label, if_true_label);
8962	  break;
8963	}
8964      goto normal;
8965
8966    case TRUTH_NOT_EXPR:
8967      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8968      break;
8969
8970    case TRUTH_ANDIF_EXPR:
8971      {
8972	rtx seq1, seq2;
8973	tree cleanups, old_cleanups;
8974
8975	if (if_false_label == 0)
8976	  if_false_label = drop_through_label = gen_label_rtx ();
8977	start_sequence ();
8978	do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8979	seq1 = get_insns ();
8980	end_sequence ();
8981
8982	old_cleanups = cleanups_this_call;
8983	start_sequence ();
8984	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8985	seq2 = get_insns ();
8986	end_sequence ();
8987
8988	cleanups = defer_cleanups_to (old_cleanups);
8989	if (cleanups)
8990	  {
8991	    rtx flag = gen_reg_rtx (word_mode);
8992	    tree new_cleanups;
8993	    tree cond;
8994
8995	    /* Flag cleanups as not needed. */
8996	    emit_move_insn (flag, const0_rtx);
8997	    emit_insns (seq1);
8998
8999	    /* Flag cleanups as needed. */
9000	    emit_move_insn (flag, const1_rtx);
9001	    emit_insns (seq2);
9002
9003	    /* convert flag, which is an rtx, into a tree. */
9004	    cond = make_node (RTL_EXPR);
9005	    TREE_TYPE (cond) = integer_type_node;
9006	    RTL_EXPR_RTL (cond) = flag;
9007	    RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9008	    cond = save_expr (cond);
9009
9010	    new_cleanups = build (COND_EXPR, void_type_node,
9011				  truthvalue_conversion (cond),
9012				  cleanups, integer_zero_node);
9013	    new_cleanups = fold (new_cleanups);
9014
9015	    /* Now add in the conditionalized cleanups. */
9016	    cleanups_this_call
9017	      = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9018	    (*interim_eh_hook) (NULL_TREE);
9019	  }
9020	else
9021	  {
9022	    emit_insns (seq1);
9023	    emit_insns (seq2);
9024	  }
9025      }
9026      break;
9027
9028    case TRUTH_ORIF_EXPR:
9029      {
9030	rtx seq1, seq2;
9031	tree cleanups, old_cleanups;
9032
9033	if (if_true_label == 0)
9034	  if_true_label = drop_through_label = gen_label_rtx ();
9035	start_sequence ();
9036	do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9037	seq1 = get_insns ();
9038	end_sequence ();
9039
9040	old_cleanups = cleanups_this_call;
9041	start_sequence ();
9042	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9043	seq2 = get_insns ();
9044	end_sequence ();
9045
9046	cleanups = defer_cleanups_to (old_cleanups);
9047	if (cleanups)
9048	  {
9049	    rtx flag = gen_reg_rtx (word_mode);
9050	    tree new_cleanups;
9051	    tree cond;
9052
9053	    /* Flag cleanups as not needed. */
9054	    emit_move_insn (flag, const0_rtx);
9055	    emit_insns (seq1);
9056
9057	    /* Flag cleanups as needed. */
9058	    emit_move_insn (flag, const1_rtx);
9059	    emit_insns (seq2);
9060
9061	    /* convert flag, which is an rtx, into a tree. */
9062	    cond = make_node (RTL_EXPR);
9063	    TREE_TYPE (cond) = integer_type_node;
9064	    RTL_EXPR_RTL (cond) = flag;
9065	    RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9066	    cond = save_expr (cond);
9067
9068	    new_cleanups = build (COND_EXPR, void_type_node,
9069				  truthvalue_conversion (cond),
9070				  cleanups, integer_zero_node);
9071	    new_cleanups = fold (new_cleanups);
9072
9073	    /* Now add in the conditionalized cleanups. */
9074	    cleanups_this_call
9075	      = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9076	    (*interim_eh_hook) (NULL_TREE);
9077	  }
9078	else
9079	  {
9080	    emit_insns (seq1);
9081	    emit_insns (seq2);
9082	  }
9083      }
9084      break;
9085
9086    case COMPOUND_EXPR:
9087      push_temp_slots ();
9088      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9089      free_temp_slots ();
9090      pop_temp_slots ();
9091      emit_queue ();
9092      do_pending_stack_adjust ();
9093      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9094      break;
9095
9096    case COMPONENT_REF:
9097    case BIT_FIELD_REF:
9098    case ARRAY_REF:
9099      {
9100	int bitsize, bitpos, unsignedp;
9101	enum machine_mode mode;
9102	tree type;
9103	tree offset;
9104	int volatilep = 0;
9105
9106	/* Get description of this reference.  We don't actually care
9107	   about the underlying object here.  */
9108	get_inner_reference (exp, &bitsize, &bitpos, &offset,
9109			     &mode, &unsignedp, &volatilep);
9110
9111	type = type_for_size (bitsize, unsignedp);
9112	if (! SLOW_BYTE_ACCESS
9113	    && type != 0 && bitsize >= 0
9114	    && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9115	    && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9116		!= CODE_FOR_nothing))
9117	  {
9118	    do_jump (convert (type, exp), if_false_label, if_true_label);
9119	    break;
9120	  }
9121	goto normal;
9122      }
9123
9124    case COND_EXPR:
9125      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
9126      if (integer_onep (TREE_OPERAND (exp, 1))
9127	  && integer_zerop (TREE_OPERAND (exp, 2)))
9128	do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9129
9130      else if (integer_zerop (TREE_OPERAND (exp, 1))
9131	       && integer_onep (TREE_OPERAND (exp, 2)))
9132	do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9133
9134      else
9135	{
9136	  register rtx label1 = gen_label_rtx ();
9137	  drop_through_label = gen_label_rtx ();
9138	  do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9139	  /* Now the THEN-expression.  */
9140	  do_jump (TREE_OPERAND (exp, 1),
9141		   if_false_label ? if_false_label : drop_through_label,
9142		   if_true_label ? if_true_label : drop_through_label);
9143	  /* In case the do_jump just above never jumps.  */
9144	  do_pending_stack_adjust ();
9145	  emit_label (label1);
9146	  /* Now the ELSE-expression.  */
9147	  do_jump (TREE_OPERAND (exp, 2),
9148		   if_false_label ? if_false_label : drop_through_label,
9149		   if_true_label ? if_true_label : drop_through_label);
9150	}
9151      break;
9152
9153    case EQ_EXPR:
9154      {
9155	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9156
9157	if (integer_zerop (TREE_OPERAND (exp, 1)))
9158	  do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9159	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9160		 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9161	  do_jump
9162	    (fold
9163	     (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9164		     fold (build (EQ_EXPR, TREE_TYPE (exp),
9165				  fold (build1 (REALPART_EXPR,
9166						TREE_TYPE (inner_type),
9167						TREE_OPERAND (exp, 0))),
9168				  fold (build1 (REALPART_EXPR,
9169						TREE_TYPE (inner_type),
9170						TREE_OPERAND (exp, 1))))),
9171		     fold (build (EQ_EXPR, TREE_TYPE (exp),
9172				  fold (build1 (IMAGPART_EXPR,
9173						TREE_TYPE (inner_type),
9174						TREE_OPERAND (exp, 0))),
9175				  fold (build1 (IMAGPART_EXPR,
9176						TREE_TYPE (inner_type),
9177						TREE_OPERAND (exp, 1))))))),
9178	     if_false_label, if_true_label);
9179	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9180		 && !can_compare_p (TYPE_MODE (inner_type)))
9181	  do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9182	else
9183	  comparison = compare (exp, EQ, EQ);
9184	break;
9185      }
9186
9187    case NE_EXPR:
9188      {
9189	tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9190
9191	if (integer_zerop (TREE_OPERAND (exp, 1)))
9192	  do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9193	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9194		 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9195	  do_jump
9196	    (fold
9197	     (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9198		     fold (build (NE_EXPR, TREE_TYPE (exp),
9199				  fold (build1 (REALPART_EXPR,
9200						TREE_TYPE (inner_type),
9201						TREE_OPERAND (exp, 0))),
9202				  fold (build1 (REALPART_EXPR,
9203						TREE_TYPE (inner_type),
9204						TREE_OPERAND (exp, 1))))),
9205		     fold (build (NE_EXPR, TREE_TYPE (exp),
9206				  fold (build1 (IMAGPART_EXPR,
9207						TREE_TYPE (inner_type),
9208						TREE_OPERAND (exp, 0))),
9209				  fold (build1 (IMAGPART_EXPR,
9210						TREE_TYPE (inner_type),
9211						TREE_OPERAND (exp, 1))))))),
9212	     if_false_label, if_true_label);
9213	else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9214		 && !can_compare_p (TYPE_MODE (inner_type)))
9215	  do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9216	else
9217	  comparison = compare (exp, NE, NE);
9218	break;
9219      }
9220
9221    case LT_EXPR:
9222      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9223	   == MODE_INT)
9224	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9225	do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9226      else
9227	comparison = compare (exp, LT, LTU);
9228      break;
9229
9230    case LE_EXPR:
9231      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9232	   == MODE_INT)
9233	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9234	do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9235      else
9236	comparison = compare (exp, LE, LEU);
9237      break;
9238
9239    case GT_EXPR:
9240      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9241	   == MODE_INT)
9242	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9243	do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9244      else
9245	comparison = compare (exp, GT, GTU);
9246      break;
9247
9248    case GE_EXPR:
9249      if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9250	   == MODE_INT)
9251	  && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9252	do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9253      else
9254	comparison = compare (exp, GE, GEU);
9255      break;
9256
9257    default:
9258    normal:
9259      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9260#if 0
9261      /* This is not needed any more and causes poor code since it causes
9262	 comparisons and tests from non-SI objects to have different code
9263	 sequences.  */
9264      /* Copy to register to avoid generating bad insns by cse
9265	 from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
9266      if (!cse_not_expected && GET_CODE (temp) == MEM)
9267	temp = copy_to_reg (temp);
9268#endif
9269      do_pending_stack_adjust ();
9270      if (GET_CODE (temp) == CONST_INT)
9271	comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9272      else if (GET_CODE (temp) == LABEL_REF)
9273	comparison = const_true_rtx;
9274      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9275	       && !can_compare_p (GET_MODE (temp)))
9276	/* Note swapping the labels gives us not-equal.  */
9277	do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9278      else if (GET_MODE (temp) != VOIDmode)
9279	comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9280				       NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9281				       GET_MODE (temp), NULL_RTX, 0);
9282      else
9283	abort ();
9284    }
9285
9286  /* Do any postincrements in the expression that was tested.  */
9287  emit_queue ();
9288
9289  /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9290     straight into a conditional jump instruction as the jump condition.
9291     Otherwise, all the work has been done already.  */
9292
9293  if (comparison == const_true_rtx)
9294    {
9295      if (if_true_label)
9296	emit_jump (if_true_label);
9297    }
9298  else if (comparison == const0_rtx)
9299    {
9300      if (if_false_label)
9301	emit_jump (if_false_label);
9302    }
9303  else if (comparison)
9304    do_jump_for_compare (comparison, if_false_label, if_true_label);
9305
9306  if (drop_through_label)
9307    {
9308      /* If do_jump produces code that might be jumped around,
9309	 do any stack adjusts from that code, before the place
9310	 where control merges in.  */
9311      do_pending_stack_adjust ();
9312      emit_label (drop_through_label);
9313    }
9314}
9315
9316/* Given a comparison expression EXP for values too wide to be compared
9317   with one insn, test the comparison and jump to the appropriate label.
9318   The code of EXP is ignored; we always test GT if SWAP is 0,
9319   and LT if SWAP is 1.  */
9320
9321static void
9322do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9323     tree exp;
9324     int swap;
9325     rtx if_false_label, if_true_label;
9326{
9327  rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9328  rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9329  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9330  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9331  rtx drop_through_label = 0;
9332  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9333  int i;
9334
9335  if (! if_true_label || ! if_false_label)
9336    drop_through_label = gen_label_rtx ();
9337  if (! if_true_label)
9338    if_true_label = drop_through_label;
9339  if (! if_false_label)
9340    if_false_label = drop_through_label;
9341
9342  /* Compare a word at a time, high order first.  */
9343  for (i = 0; i < nwords; i++)
9344    {
9345      rtx comp;
9346      rtx op0_word, op1_word;
9347
9348      if (WORDS_BIG_ENDIAN)
9349	{
9350	  op0_word = operand_subword_force (op0, i, mode);
9351	  op1_word = operand_subword_force (op1, i, mode);
9352	}
9353      else
9354	{
9355	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9356	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9357	}
9358
9359      /* All but high-order word must be compared as unsigned.  */
9360      comp = compare_from_rtx (op0_word, op1_word,
9361			       (unsignedp || i > 0) ? GTU : GT,
9362			       unsignedp, word_mode, NULL_RTX, 0);
9363      if (comp == const_true_rtx)
9364	emit_jump (if_true_label);
9365      else if (comp != const0_rtx)
9366	do_jump_for_compare (comp, NULL_RTX, if_true_label);
9367
9368      /* Consider lower words only if these are equal.  */
9369      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9370			       NULL_RTX, 0);
9371      if (comp == const_true_rtx)
9372	emit_jump (if_false_label);
9373      else if (comp != const0_rtx)
9374	do_jump_for_compare (comp, NULL_RTX, if_false_label);
9375    }
9376
9377  if (if_false_label)
9378    emit_jump (if_false_label);
9379  if (drop_through_label)
9380    emit_label (drop_through_label);
9381}
9382
9383/* Compare OP0 with OP1, word at a time, in mode MODE.
9384   UNSIGNEDP says to do unsigned comparison.
9385   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
9386
9387void
9388do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9389     enum machine_mode mode;
9390     int unsignedp;
9391     rtx op0, op1;
9392     rtx if_false_label, if_true_label;
9393{
9394  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9395  rtx drop_through_label = 0;
9396  int i;
9397
9398  if (! if_true_label || ! if_false_label)
9399    drop_through_label = gen_label_rtx ();
9400  if (! if_true_label)
9401    if_true_label = drop_through_label;
9402  if (! if_false_label)
9403    if_false_label = drop_through_label;
9404
9405  /* Compare a word at a time, high order first.  */
9406  for (i = 0; i < nwords; i++)
9407    {
9408      rtx comp;
9409      rtx op0_word, op1_word;
9410
9411      if (WORDS_BIG_ENDIAN)
9412	{
9413	  op0_word = operand_subword_force (op0, i, mode);
9414	  op1_word = operand_subword_force (op1, i, mode);
9415	}
9416      else
9417	{
9418	  op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9419	  op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9420	}
9421
9422      /* All but high-order word must be compared as unsigned.  */
9423      comp = compare_from_rtx (op0_word, op1_word,
9424			       (unsignedp || i > 0) ? GTU : GT,
9425			       unsignedp, word_mode, NULL_RTX, 0);
9426      if (comp == const_true_rtx)
9427	emit_jump (if_true_label);
9428      else if (comp != const0_rtx)
9429	do_jump_for_compare (comp, NULL_RTX, if_true_label);
9430
9431      /* Consider lower words only if these are equal.  */
9432      comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9433			       NULL_RTX, 0);
9434      if (comp == const_true_rtx)
9435	emit_jump (if_false_label);
9436      else if (comp != const0_rtx)
9437	do_jump_for_compare (comp, NULL_RTX, if_false_label);
9438    }
9439
9440  if (if_false_label)
9441    emit_jump (if_false_label);
9442  if (drop_through_label)
9443    emit_label (drop_through_label);
9444}
9445
9446/* Given an EQ_EXPR expression EXP for values too wide to be compared
9447   with one insn, test the comparison and jump to the appropriate label.  */
9448
9449static void
9450do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9451     tree exp;
9452     rtx if_false_label, if_true_label;
9453{
9454  rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9455  rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9456  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9457  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9458  int i;
9459  rtx drop_through_label = 0;
9460
9461  if (! if_false_label)
9462    drop_through_label = if_false_label = gen_label_rtx ();
9463
9464  for (i = 0; i < nwords; i++)
9465    {
9466      rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9467				   operand_subword_force (op1, i, mode),
9468				   EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9469				   word_mode, NULL_RTX, 0);
9470      if (comp == const_true_rtx)
9471	emit_jump (if_false_label);
9472      else if (comp != const0_rtx)
9473	do_jump_for_compare (comp, if_false_label, NULL_RTX);
9474    }
9475
9476  if (if_true_label)
9477    emit_jump (if_true_label);
9478  if (drop_through_label)
9479    emit_label (drop_through_label);
9480}
9481
9482/* Jump according to whether OP0 is 0.
9483   We assume that OP0 has an integer mode that is too wide
9484   for the available compare insns.  */
9485
9486static void
9487do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9488     rtx op0;
9489     rtx if_false_label, if_true_label;
9490{
9491  int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9492  int i;
9493  rtx drop_through_label = 0;
9494
9495  if (! if_false_label)
9496    drop_through_label = if_false_label = gen_label_rtx ();
9497
9498  for (i = 0; i < nwords; i++)
9499    {
9500      rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9501							  GET_MODE (op0)),
9502				   const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9503      if (comp == const_true_rtx)
9504	emit_jump (if_false_label);
9505      else if (comp != const0_rtx)
9506	do_jump_for_compare (comp, if_false_label, NULL_RTX);
9507    }
9508
9509  if (if_true_label)
9510    emit_jump (if_true_label);
9511  if (drop_through_label)
9512    emit_label (drop_through_label);
9513}
9514
9515/* Given a comparison expression in rtl form, output conditional branches to
9516   IF_TRUE_LABEL, IF_FALSE_LABEL, or both.  */
9517
9518static void
9519do_jump_for_compare (comparison, if_false_label, if_true_label)
9520     rtx comparison, if_false_label, if_true_label;
9521{
9522  if (if_true_label)
9523    {
9524      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9525	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9526      else
9527	abort ();
9528
9529      if (if_false_label)
9530	emit_jump (if_false_label);
9531    }
9532  else if (if_false_label)
9533    {
9534      rtx insn;
9535      rtx prev = get_last_insn ();
9536      rtx branch = 0;
9537
9538      /* Output the branch with the opposite condition.  Then try to invert
9539	 what is generated.  If more than one insn is a branch, or if the
9540	 branch is not the last insn written, abort. If we can't invert
9541	 the branch, emit make a true label, redirect this jump to that,
9542	 emit a jump to the false label and define the true label.  */
9543
9544      if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9545	emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9546      else
9547	abort ();
9548
9549      /* Here we get the first insn that was just emitted.  It used to be  the
9550	 case that, on some machines, emitting the branch would discard
9551	 the previous compare insn and emit a replacement.  This isn't
9552	 done anymore, but abort if we see that PREV is deleted.  */
9553
9554      if (prev == 0)
9555	insn = get_insns ();
9556      else if (INSN_DELETED_P (prev))
9557	abort ();
9558      else
9559	insn = NEXT_INSN (prev);
9560
9561      for (; insn; insn = NEXT_INSN (insn))
9562	if (GET_CODE (insn) == JUMP_INSN)
9563	  {
9564	    if (branch)
9565	      abort ();
9566	    branch = insn;
9567	  }
9568
9569      if (branch != get_last_insn ())
9570	abort ();
9571
9572      JUMP_LABEL (branch) = if_false_label;
9573      if (! invert_jump (branch, if_false_label))
9574	{
9575	  if_true_label = gen_label_rtx ();
9576	  redirect_jump (branch, if_true_label);
9577	  emit_jump (if_false_label);
9578	  emit_label (if_true_label);
9579	}
9580    }
9581}
9582
9583/* Generate code for a comparison expression EXP
9584   (including code to compute the values to be compared)
9585   and set (CC0) according to the result.
9586   SIGNED_CODE should be the rtx operation for this comparison for
9587   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9588
9589   We force a stack adjustment unless there are currently
9590   things pushed on the stack that aren't yet used.  */
9591
9592static rtx
9593compare (exp, signed_code, unsigned_code)
9594     register tree exp;
9595     enum rtx_code signed_code, unsigned_code;
9596{
9597  register rtx op0
9598    = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9599  register rtx op1
9600    = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9601  register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9602  register enum machine_mode mode = TYPE_MODE (type);
9603  int unsignedp = TREE_UNSIGNED (type);
9604  enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9605
9606  return compare_from_rtx (op0, op1, code, unsignedp, mode,
9607			   ((mode == BLKmode)
9608			    ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9609			   TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9610}
9611
9612/* Like compare but expects the values to compare as two rtx's.
9613   The decision as to signed or unsigned comparison must be made by the caller.
9614
9615   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9616   compared.
9617
9618   If ALIGN is non-zero, it is the alignment of this type; if zero, the
9619   size of MODE should be used.  */
9620
9621rtx
9622compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9623     register rtx op0, op1;
9624     enum rtx_code code;
9625     int unsignedp;
9626     enum machine_mode mode;
9627     rtx size;
9628     int align;
9629{
9630  rtx tem;
9631
9632  /* If one operand is constant, make it the second one.  Only do this
9633     if the other operand is not constant as well.  */
9634
9635  if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9636      || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9637    {
9638      tem = op0;
9639      op0 = op1;
9640      op1 = tem;
9641      code = swap_condition (code);
9642    }
9643
9644  if (flag_force_mem)
9645    {
9646      op0 = force_not_mem (op0);
9647      op1 = force_not_mem (op1);
9648    }
9649
9650  do_pending_stack_adjust ();
9651
9652  if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9653      && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9654    return tem;
9655
9656#if 0
9657  /* There's no need to do this now that combine.c can eliminate lots of
9658     sign extensions.  This can be less efficient in certain cases on other
9659     machines. */
9660
9661  /* If this is a signed equality comparison, we can do it as an
9662     unsigned comparison since zero-extension is cheaper than sign
9663     extension and comparisons with zero are done as unsigned.  This is
9664     the case even on machines that can do fast sign extension, since
9665     zero-extension is easier to combine with other operations than
9666     sign-extension is.  If we are comparing against a constant, we must
9667     convert it to what it would look like unsigned.  */
9668  if ((code == EQ || code == NE) && ! unsignedp
9669      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9670    {
9671      if (GET_CODE (op1) == CONST_INT
9672	  && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9673	op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9674      unsignedp = 1;
9675    }
9676#endif
9677
9678  emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9679
9680  return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9681}
9682
9683/* Generate code to calculate EXP using a store-flag instruction
9684   and return an rtx for the result.  EXP is either a comparison
9685   or a TRUTH_NOT_EXPR whose operand is a comparison.
9686
9687   If TARGET is nonzero, store the result there if convenient.
9688
9689   If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9690   cheap.
9691
9692   Return zero if there is no suitable set-flag instruction
9693   available on this machine.
9694
9695   Once expand_expr has been called on the arguments of the comparison,
9696   we are committed to doing the store flag, since it is not safe to
9697   re-evaluate the expression.  We emit the store-flag insn by calling
9698   emit_store_flag, but only expand the arguments if we have a reason
9699   to believe that emit_store_flag will be successful.  If we think that
9700   it will, but it isn't, we have to simulate the store-flag with a
9701   set/jump/set sequence.  */
9702
9703static rtx
9704do_store_flag (exp, target, mode, only_cheap)
9705     tree exp;
9706     rtx target;
9707     enum machine_mode mode;
9708     int only_cheap;
9709{
9710  enum rtx_code code;
9711  tree arg0, arg1, type;
9712  tree tem;
9713  enum machine_mode operand_mode;
9714  int invert = 0;
9715  int unsignedp;
9716  rtx op0, op1;
9717  enum insn_code icode;
9718  rtx subtarget = target;
9719  rtx result, label, pattern, jump_pat;
9720
9721  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9722     result at the end.  We can't simply invert the test since it would
9723     have already been inverted if it were valid.  This case occurs for
9724     some floating-point comparisons.  */
9725
9726  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9727    invert = 1, exp = TREE_OPERAND (exp, 0);
9728
9729  arg0 = TREE_OPERAND (exp, 0);
9730  arg1 = TREE_OPERAND (exp, 1);
9731  type = TREE_TYPE (arg0);
9732  operand_mode = TYPE_MODE (type);
9733  unsignedp = TREE_UNSIGNED (type);
9734
9735  /* We won't bother with BLKmode store-flag operations because it would mean
9736     passing a lot of information to emit_store_flag.  */
9737  if (operand_mode == BLKmode)
9738    return 0;
9739
9740  STRIP_NOPS (arg0);
9741  STRIP_NOPS (arg1);
9742
9743  /* Get the rtx comparison code to use.  We know that EXP is a comparison
9744     operation of some type.  Some comparisons against 1 and -1 can be
9745     converted to comparisons with zero.  Do so here so that the tests
9746     below will be aware that we have a comparison with zero.   These
9747     tests will not catch constants in the first operand, but constants
9748     are rarely passed as the first operand.  */
9749
9750  switch (TREE_CODE (exp))
9751    {
9752    case EQ_EXPR:
9753      code = EQ;
9754      break;
9755    case NE_EXPR:
9756      code = NE;
9757      break;
9758    case LT_EXPR:
9759      if (integer_onep (arg1))
9760	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9761      else
9762	code = unsignedp ? LTU : LT;
9763      break;
9764    case LE_EXPR:
9765      if (! unsignedp && integer_all_onesp (arg1))
9766	arg1 = integer_zero_node, code = LT;
9767      else
9768	code = unsignedp ? LEU : LE;
9769      break;
9770    case GT_EXPR:
9771      if (! unsignedp && integer_all_onesp (arg1))
9772	arg1 = integer_zero_node, code = GE;
9773      else
9774	code = unsignedp ? GTU : GT;
9775      break;
9776    case GE_EXPR:
9777      if (integer_onep (arg1))
9778	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9779      else
9780	code = unsignedp ? GEU : GE;
9781      break;
9782    default:
9783      abort ();
9784    }
9785
9786  /* Put a constant second.  */
9787  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9788    {
9789      tem = arg0; arg0 = arg1; arg1 = tem;
9790      code = swap_condition (code);
9791    }
9792
9793  /* If this is an equality or inequality test of a single bit, we can
9794     do this by shifting the bit being tested to the low-order bit and
9795     masking the result with the constant 1.  If the condition was EQ,
9796     we xor it with 1.  This does not require an scc insn and is faster
9797     than an scc insn even if we have it.  */
9798
9799  if ((code == NE || code == EQ)
9800      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9801      && integer_pow2p (TREE_OPERAND (arg0, 1))
9802      && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9803    {
9804      tree inner = TREE_OPERAND (arg0, 0);
9805      int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9806						    NULL_RTX, VOIDmode, 0)));
9807      int ops_unsignedp;
9808
9809      /* If INNER is a right shift of a constant and it plus BITNUM does
9810	 not overflow, adjust BITNUM and INNER.  */
9811
9812      if (TREE_CODE (inner) == RSHIFT_EXPR
9813	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9814	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9815	  && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9816	      < TYPE_PRECISION (type)))
9817	{
9818	  bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9819	  inner = TREE_OPERAND (inner, 0);
9820	}
9821
9822      /* If we are going to be able to omit the AND below, we must do our
9823	 operations as unsigned.  If we must use the AND, we have a choice.
9824	 Normally unsigned is faster, but for some machines signed is.  */
9825      ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9826#ifdef LOAD_EXTEND_OP
9827		       : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9828#else
9829		       : 1
9830#endif
9831		       );
9832
9833      if (subtarget == 0 || GET_CODE (subtarget) != REG
9834	  || GET_MODE (subtarget) != operand_mode
9835	  || ! safe_from_p (subtarget, inner))
9836	subtarget = 0;
9837
9838      op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9839
9840      if (bitnum != 0)
9841	op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9842			    size_int (bitnum), subtarget, ops_unsignedp);
9843
9844      if (GET_MODE (op0) != mode)
9845	op0 = convert_to_mode (mode, op0, ops_unsignedp);
9846
9847      if ((code == EQ && ! invert) || (code == NE && invert))
9848	op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9849			    ops_unsignedp, OPTAB_LIB_WIDEN);
9850
9851      /* Put the AND last so it can combine with more things.  */
9852      if (bitnum != TYPE_PRECISION (type) - 1)
9853	op0 = expand_and (op0, const1_rtx, subtarget);
9854
9855      return op0;
9856    }
9857
9858  /* Now see if we are likely to be able to do this.  Return if not.  */
9859  if (! can_compare_p (operand_mode))
9860    return 0;
9861  icode = setcc_gen_code[(int) code];
9862  if (icode == CODE_FOR_nothing
9863      || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9864    {
9865      /* We can only do this if it is one of the special cases that
9866	 can be handled without an scc insn.  */
9867      if ((code == LT && integer_zerop (arg1))
9868	  || (! only_cheap && code == GE && integer_zerop (arg1)))
9869	;
9870      else if (BRANCH_COST >= 0
9871	       && ! only_cheap && (code == NE || code == EQ)
9872	       && TREE_CODE (type) != REAL_TYPE
9873	       && ((abs_optab->handlers[(int) operand_mode].insn_code
9874		    != CODE_FOR_nothing)
9875		   || (ffs_optab->handlers[(int) operand_mode].insn_code
9876		       != CODE_FOR_nothing)))
9877	;
9878      else
9879	return 0;
9880    }
9881
9882  preexpand_calls (exp);
9883  if (subtarget == 0 || GET_CODE (subtarget) != REG
9884      || GET_MODE (subtarget) != operand_mode
9885      || ! safe_from_p (subtarget, arg1))
9886    subtarget = 0;
9887
9888  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9889  op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9890
9891  if (target == 0)
9892    target = gen_reg_rtx (mode);
9893
9894  /* Pass copies of OP0 and OP1 in case they contain a QUEUED.  This is safe
9895     because, if the emit_store_flag does anything it will succeed and
9896     OP0 and OP1 will not be used subsequently.  */
9897
9898  result = emit_store_flag (target, code,
9899			    queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9900			    queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9901			    operand_mode, unsignedp, 1);
9902
9903  if (result)
9904    {
9905      if (invert)
9906	result = expand_binop (mode, xor_optab, result, const1_rtx,
9907			       result, 0, OPTAB_LIB_WIDEN);
9908      return result;
9909    }
9910
9911  /* If this failed, we have to do this with set/compare/jump/set code.  */
9912  if (target == 0 || GET_CODE (target) != REG
9913      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9914    target = gen_reg_rtx (GET_MODE (target));
9915
9916  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9917  result = compare_from_rtx (op0, op1, code, unsignedp,
9918			     operand_mode, NULL_RTX, 0);
9919  if (GET_CODE (result) == CONST_INT)
9920    return (((result == const0_rtx && ! invert)
9921	     || (result != const0_rtx && invert))
9922	    ? const0_rtx : const1_rtx);
9923
9924  label = gen_label_rtx ();
9925  if (bcc_gen_fctn[(int) code] == 0)
9926    abort ();
9927
9928  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9929  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9930  emit_label (label);
9931
9932  return target;
9933}
9934
9935/* Generate a tablejump instruction (used for switch statements).  */
9936
9937#ifdef HAVE_tablejump
9938
9939/* INDEX is the value being switched on, with the lowest value
9940   in the table already subtracted.
9941   MODE is its expected mode (needed if INDEX is constant).
9942   RANGE is the length of the jump table.
9943   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9944
9945   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9946   index value is out of range.  */
9947
9948void
9949do_tablejump (index, mode, range, table_label, default_label)
9950     rtx index, range, table_label, default_label;
9951     enum machine_mode mode;
9952{
9953  register rtx temp, vector;
9954
9955  /* Do an unsigned comparison (in the proper mode) between the index
9956     expression and the value which represents the length of the range.
9957     Since we just finished subtracting the lower bound of the range
9958     from the index expression, this comparison allows us to simultaneously
9959     check that the original index expression value is both greater than
9960     or equal to the minimum value of the range and less than or equal to
9961     the maximum value of the range.  */
9962
9963  emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9964  emit_jump_insn (gen_bgtu (default_label));
9965
9966  /* If index is in range, it must fit in Pmode.
9967     Convert to Pmode so we can index with it.  */
9968  if (mode != Pmode)
9969    index = convert_to_mode (Pmode, index, 1);
9970
9971  /* Don't let a MEM slip thru, because then INDEX that comes
9972     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9973     and break_out_memory_refs will go to work on it and mess it up.  */
9974#ifdef PIC_CASE_VECTOR_ADDRESS
9975  if (flag_pic && GET_CODE (index) != REG)
9976    index = copy_to_mode_reg (Pmode, index);
9977#endif
9978
9979  /* If flag_force_addr were to affect this address
9980     it could interfere with the tricky assumptions made
9981     about addresses that contain label-refs,
9982     which may be valid only very near the tablejump itself.  */
9983  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9984     GET_MODE_SIZE, because this indicates how large insns are.  The other
9985     uses should all be Pmode, because they are addresses.  This code
9986     could fail if addresses and insns are not the same size.  */
9987  index = gen_rtx (PLUS, Pmode,
9988		   gen_rtx (MULT, Pmode, index,
9989			    GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9990		   gen_rtx (LABEL_REF, Pmode, table_label));
9991#ifdef PIC_CASE_VECTOR_ADDRESS
9992  if (flag_pic)
9993    index = PIC_CASE_VECTOR_ADDRESS (index);
9994  else
9995#endif
9996    index = memory_address_noforce (CASE_VECTOR_MODE, index);
9997  temp = gen_reg_rtx (CASE_VECTOR_MODE);
9998  vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9999  RTX_UNCHANGING_P (vector) = 1;
10000  convert_move (temp, vector, 0);
10001
10002  emit_jump_insn (gen_tablejump (temp, table_label));
10003
10004#ifndef CASE_VECTOR_PC_RELATIVE
10005  /* If we are generating PIC code or if the table is PC-relative, the
10006     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10007  if (! flag_pic)
10008    emit_barrier ();
10009#endif
10010}
10011
10012#endif /* HAVE_tablejump */
10013
10014
10015/* Emit a suitable bytecode to load a value from memory, assuming a pointer
10016   to that value is on the top of the stack. The resulting type is TYPE, and
10017   the source declaration is DECL. */
10018
10019void
10020bc_load_memory (type, decl)
10021     tree type, decl;
10022{
10023  enum bytecode_opcode opcode;
10024
10025
10026  /* Bit fields are special.  We only know about signed and
10027     unsigned ints, and enums.  The latter are treated as
10028     signed integers. */
10029
10030  if (DECL_BIT_FIELD (decl))
10031    if (TREE_CODE (type) == ENUMERAL_TYPE
10032	|| TREE_CODE (type) == INTEGER_TYPE)
10033      opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10034    else
10035      abort ();
10036  else
10037    /* See corresponding comment in bc_store_memory(). */
10038    if (TYPE_MODE (type) == BLKmode
10039	|| TYPE_MODE (type) == VOIDmode)
10040      return;
10041    else
10042      opcode = mode_to_load_map [(int) TYPE_MODE (type)];
10043
10044  if (opcode == neverneverland)
10045    abort ();
10046
10047  bc_emit_bytecode (opcode);
10048
10049#ifdef DEBUG_PRINT_CODE
10050  fputc ('\n', stderr);
10051#endif
10052}
10053
10054
10055/* Store the contents of the second stack slot to the address in the
10056   top stack slot.  DECL is the declaration of the destination and is used
10057   to determine whether we're dealing with a bitfield. */
10058
10059void
10060bc_store_memory (type, decl)
10061     tree type, decl;
10062{
10063  enum bytecode_opcode opcode;
10064
10065
10066  if (DECL_BIT_FIELD (decl))
10067    {
10068      if (TREE_CODE (type) == ENUMERAL_TYPE
10069	  || TREE_CODE (type) == INTEGER_TYPE)
10070	opcode = sstoreBI;
10071      else
10072	abort ();
10073    }
10074  else
10075    if (TYPE_MODE (type) == BLKmode)
10076      {
10077	/* Copy structure.  This expands to a block copy instruction, storeBLK.
10078	   In addition to the arguments expected by the other store instructions,
10079	   it also expects a type size (SImode) on top of the stack, which is the
10080	   structure size in size units (usually bytes).  The two first arguments
10081	   are already on the stack; so we just put the size on level 1.  For some
10082	   other languages, the size may be variable, this is why we don't encode
10083	   it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10084
10085	bc_expand_expr (TYPE_SIZE (type));
10086	opcode = storeBLK;
10087      }
10088    else
10089      opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10090
10091  if (opcode == neverneverland)
10092    abort ();
10093
10094  bc_emit_bytecode (opcode);
10095
10096#ifdef DEBUG_PRINT_CODE
10097  fputc ('\n', stderr);
10098#endif
10099}
10100
10101
10102/* Allocate local stack space sufficient to hold a value of the given
10103   SIZE at alignment boundary ALIGNMENT bits.  ALIGNMENT must be an
10104   integral power of 2.  A special case is locals of type VOID, which
10105   have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10106   remapped into the corresponding attribute of SI.  */
10107
10108rtx
10109bc_allocate_local (size, alignment)
10110     int size, alignment;
10111{
10112  rtx retval;
10113  int byte_alignment;
10114
10115  if (size < 0)
10116    abort ();
10117
10118  /* Normalize size and alignment  */
10119  if (!size)
10120    size = UNITS_PER_WORD;
10121
10122  if (alignment < BITS_PER_UNIT)
10123    byte_alignment = 1 << (INT_ALIGN - 1);
10124  else
10125    /* Align */
10126    byte_alignment = alignment / BITS_PER_UNIT;
10127
10128  if (local_vars_size & (byte_alignment - 1))
10129    local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10130
10131  retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10132  local_vars_size += size;
10133
10134  return retval;
10135}
10136
10137
10138/* Allocate variable-sized local array. Variable-sized arrays are
10139   actually pointers to the address in memory where they are stored. */
10140
10141rtx
10142bc_allocate_variable_array (size)
10143     tree size;
10144{
10145  rtx retval;
10146  const int ptralign = (1 << (PTR_ALIGN - 1));
10147
10148  /* Align pointer */
10149  if (local_vars_size & ptralign)
10150    local_vars_size +=  ptralign - (local_vars_size & ptralign);
10151
10152  /* Note down local space needed: pointer to block; also return
10153     dummy rtx */
10154
10155  retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10156  local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10157  return retval;
10158}
10159
10160
10161/* Push the machine address for the given external variable offset.  */
10162void
10163bc_load_externaddr (externaddr)
10164     rtx externaddr;
10165{
10166  bc_emit_bytecode (constP);
10167  bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10168			 BYTECODE_BC_LABEL (externaddr)->offset);
10169
10170#ifdef DEBUG_PRINT_CODE
10171  fputc ('\n', stderr);
10172#endif
10173}
10174
10175
10176static char *
10177bc_strdup (s)
10178    char *s;
10179{
10180  char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10181  strcpy (new, s);
10182  return new;
10183}
10184
10185
10186/* Like above, but expects an IDENTIFIER.  */
10187void
10188bc_load_externaddr_id (id, offset)
10189     tree id;
10190     int offset;
10191{
10192  if (!IDENTIFIER_POINTER (id))
10193    abort ();
10194
10195  bc_emit_bytecode (constP);
10196  bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10197
10198#ifdef DEBUG_PRINT_CODE
10199  fputc ('\n', stderr);
10200#endif
10201}
10202
10203
10204/* Push the machine address for the given local variable offset.  */
10205void
10206bc_load_localaddr (localaddr)
10207     rtx localaddr;
10208{
10209  bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10210}
10211
10212
10213/* Push the machine address for the given parameter offset.
10214   NOTE: offset is in bits. */
10215void
10216bc_load_parmaddr (parmaddr)
10217     rtx parmaddr;
10218{
10219  bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10220			      / BITS_PER_UNIT));
10221}
10222
10223
10224/* Convert a[i] into *(a + i).  */
10225tree
10226bc_canonicalize_array_ref (exp)
10227     tree exp;
10228{
10229  tree type = TREE_TYPE (exp);
10230  tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10231			   TREE_OPERAND (exp, 0));
10232  tree index = TREE_OPERAND (exp, 1);
10233
10234
10235  /* Convert the integer argument to a type the same size as a pointer
10236     so the multiply won't overflow spuriously.  */
10237
10238  if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10239    index = convert (type_for_size (POINTER_SIZE, 0), index);
10240
10241  /* The array address isn't volatile even if the array is.
10242     (Of course this isn't terribly relevant since the bytecode
10243     translator treats nearly everything as volatile anyway.)  */
10244  TREE_THIS_VOLATILE (array_adr) = 0;
10245
10246  return build1 (INDIRECT_REF, type,
10247		 fold (build (PLUS_EXPR,
10248			      TYPE_POINTER_TO (type),
10249			      array_adr,
10250			      fold (build (MULT_EXPR,
10251					   TYPE_POINTER_TO (type),
10252					   index,
10253					   size_in_bytes (type))))));
10254}
10255
10256
10257/* Load the address of the component referenced by the given
10258   COMPONENT_REF expression.
10259
10260   Returns innermost lvalue. */
10261
10262tree
10263bc_expand_component_address (exp)
10264     tree exp;
10265{
10266  tree tem, chain;
10267  enum machine_mode mode;
10268  int bitpos = 0;
10269  HOST_WIDE_INT SIval;
10270
10271
10272  tem = TREE_OPERAND (exp, 1);
10273  mode = DECL_MODE (tem);
10274
10275
10276  /* Compute cumulative bit offset for nested component refs
10277     and array refs, and find the ultimate containing object.  */
10278
10279  for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10280    {
10281      if (TREE_CODE (tem) == COMPONENT_REF)
10282	bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10283      else
10284	if (TREE_CODE (tem) == ARRAY_REF
10285	    && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10286	    && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10287
10288	  bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10289		     * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10290		     /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10291	else
10292	  break;
10293    }
10294
10295  bc_expand_expr (tem);
10296
10297
10298  /* For bitfields also push their offset and size */
10299  if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10300    bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10301  else
10302    if (SIval = bitpos / BITS_PER_UNIT)
10303      bc_emit_instruction (addconstPSI, SIval);
10304
10305  return (TREE_OPERAND (exp, 1));
10306}
10307
10308
10309/* Emit code to push two SI constants */
10310void
10311bc_push_offset_and_size (offset, size)
10312     HOST_WIDE_INT offset, size;
10313{
10314  bc_emit_instruction (constSI, offset);
10315  bc_emit_instruction (constSI, size);
10316}
10317
10318
10319/* Emit byte code to push the address of the given lvalue expression to
10320   the stack.  If it's a bit field, we also push offset and size info.
10321
10322   Returns innermost component, which allows us to determine not only
10323   its type, but also whether it's a bitfield. */
10324
10325tree
10326bc_expand_address (exp)
10327     tree exp;
10328{
10329  /* Safeguard */
10330  if (!exp || TREE_CODE (exp) == ERROR_MARK)
10331    return (exp);
10332
10333
10334  switch (TREE_CODE (exp))
10335    {
10336    case ARRAY_REF:
10337
10338      return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10339
10340    case COMPONENT_REF:
10341
10342      return (bc_expand_component_address (exp));
10343
10344    case INDIRECT_REF:
10345
10346      bc_expand_expr (TREE_OPERAND (exp, 0));
10347
10348      /* For variable-sized types: retrieve pointer.  Sometimes the
10349	 TYPE_SIZE tree is NULL.  Is this a bug or a feature?  Let's
10350	 also make sure we have an operand, just in case... */
10351
10352      if (TREE_OPERAND (exp, 0)
10353	  && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10354	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10355	bc_emit_instruction (loadP);
10356
10357      /* If packed, also return offset and size */
10358      if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10359
10360	bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10361				 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10362
10363      return (TREE_OPERAND (exp, 0));
10364
10365    case FUNCTION_DECL:
10366
10367      bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10368			     BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10369      break;
10370
10371    case PARM_DECL:
10372
10373      bc_load_parmaddr (DECL_RTL (exp));
10374
10375      /* For variable-sized types: retrieve pointer */
10376      if (TYPE_SIZE (TREE_TYPE (exp))
10377	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10378	bc_emit_instruction (loadP);
10379
10380      /* If packed, also return offset and size */
10381      if (DECL_BIT_FIELD (exp))
10382	bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10383				 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10384
10385      break;
10386
10387    case RESULT_DECL:
10388
10389      bc_emit_instruction (returnP);
10390      break;
10391
10392    case VAR_DECL:
10393
10394#if 0
10395      if (BYTECODE_LABEL (DECL_RTL (exp)))
10396	bc_load_externaddr (DECL_RTL (exp));
10397#endif
10398
10399      if (DECL_EXTERNAL (exp))
10400	bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10401			       (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10402      else
10403	bc_load_localaddr (DECL_RTL (exp));
10404
10405      /* For variable-sized types: retrieve pointer */
10406      if (TYPE_SIZE (TREE_TYPE (exp))
10407	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10408	bc_emit_instruction (loadP);
10409
10410      /* If packed, also return offset and size */
10411      if (DECL_BIT_FIELD (exp))
10412	bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10413				 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10414
10415      break;
10416
10417    case STRING_CST:
10418      {
10419	rtx r;
10420
10421	bc_emit_bytecode (constP);
10422	r = output_constant_def (exp);
10423	bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10424
10425#ifdef DEBUG_PRINT_CODE
10426	fputc ('\n', stderr);
10427#endif
10428      }
10429      break;
10430
10431    default:
10432
10433      abort();
10434      break;
10435    }
10436
10437  /* Most lvalues don't have components. */
10438  return (exp);
10439}
10440
10441
10442/* Emit a type code to be used by the runtime support in handling
10443   parameter passing.   The type code consists of the machine mode
10444   plus the minimal alignment shifted left 8 bits.  */
10445
10446tree
10447bc_runtime_type_code (type)
10448     tree type;
10449{
10450  int val;
10451
10452  switch (TREE_CODE (type))
10453    {
10454    case VOID_TYPE:
10455    case INTEGER_TYPE:
10456    case REAL_TYPE:
10457    case COMPLEX_TYPE:
10458    case ENUMERAL_TYPE:
10459    case POINTER_TYPE:
10460    case RECORD_TYPE:
10461
10462      val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10463      break;
10464
10465    case ERROR_MARK:
10466
10467      val = 0;
10468      break;
10469
10470    default:
10471
10472      abort ();
10473    }
10474  return build_int_2 (val, 0);
10475}
10476
10477
10478/* Generate constructor label */
10479char *
10480bc_gen_constr_label ()
10481{
10482  static int label_counter;
10483  static char label[20];
10484
10485  sprintf (label, "*LR%d", label_counter++);
10486
10487  return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10488}
10489
10490
10491/* Evaluate constructor CONSTR and return pointer to it on level one.  We
10492   expand the constructor data as static data, and push a pointer to it.
10493   The pointer is put in the pointer table and is retrieved by a constP
10494   bytecode instruction.  We then loop and store each constructor member in
10495   the corresponding component.  Finally, we return the original pointer on
10496   the stack. */
10497
10498void
10499bc_expand_constructor (constr)
10500     tree constr;
10501{
10502  char *l;
10503  HOST_WIDE_INT ptroffs;
10504  rtx constr_rtx;
10505
10506
10507  /* Literal constructors are handled as constants, whereas
10508     non-literals are evaluated and stored element by element
10509     into the data segment. */
10510
10511  /* Allocate space in proper segment and push pointer to space on stack.
10512   */
10513
10514  l = bc_gen_constr_label ();
10515
10516  if (TREE_CONSTANT (constr))
10517    {
10518      text_section ();
10519
10520      bc_emit_const_labeldef (l);
10521      bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10522    }
10523  else
10524    {
10525      data_section ();
10526
10527      bc_emit_data_labeldef (l);
10528      bc_output_data_constructor (constr);
10529    }
10530
10531
10532  /* Add reference to pointer table and recall pointer to stack;
10533     this code is common for both types of constructors: literals
10534     and non-literals. */
10535
10536  ptroffs = bc_define_pointer (l);
10537  bc_emit_instruction (constP, ptroffs);
10538
10539  /* This is all that has to be done if it's a literal. */
10540  if (TREE_CONSTANT (constr))
10541    return;
10542
10543
10544  /* At this point, we have the pointer to the structure on top of the stack.
10545     Generate sequences of store_memory calls for the constructor. */
10546
10547  /* constructor type is structure */
10548  if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10549    {
10550      register tree elt;
10551
10552      /* If the constructor has fewer fields than the structure,
10553	 clear the whole structure first.  */
10554
10555      if (list_length (CONSTRUCTOR_ELTS (constr))
10556	  != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10557	{
10558	  bc_emit_instruction (duplicate);
10559	  bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10560	  bc_emit_instruction (clearBLK);
10561	}
10562
10563      /* Store each element of the constructor into the corresponding
10564	 field of TARGET.  */
10565
10566      for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10567	{
10568	  register tree field = TREE_PURPOSE (elt);
10569	  register enum machine_mode mode;
10570	  int bitsize;
10571	  int bitpos;
10572	  int unsignedp;
10573
10574	  bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10575	  mode = DECL_MODE (field);
10576	  unsignedp = TREE_UNSIGNED (field);
10577
10578	  bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10579
10580	  bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10581			  /* The alignment of TARGET is
10582			     at least what its type requires.  */
10583			  VOIDmode, 0,
10584			  TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10585			  int_size_in_bytes (TREE_TYPE (constr)));
10586	}
10587    }
10588  else
10589
10590    /* Constructor type is array */
10591    if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10592      {
10593	register tree elt;
10594	register int i;
10595	tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10596	int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10597	int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10598	tree elttype = TREE_TYPE (TREE_TYPE (constr));
10599
10600	/* If the constructor has fewer fields than the structure,
10601	   clear the whole structure first.  */
10602
10603	if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10604	  {
10605	    bc_emit_instruction (duplicate);
10606	    bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10607	    bc_emit_instruction (clearBLK);
10608	  }
10609
10610
10611	/* Store each element of the constructor into the corresponding
10612	   element of TARGET, determined by counting the elements. */
10613
10614	for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10615	     elt;
10616	     elt = TREE_CHAIN (elt), i++)
10617	  {
10618	    register enum machine_mode mode;
10619	    int bitsize;
10620	    int bitpos;
10621	    int unsignedp;
10622
10623	    mode = TYPE_MODE (elttype);
10624	    bitsize = GET_MODE_BITSIZE (mode);
10625	    unsignedp = TREE_UNSIGNED (elttype);
10626
10627	    bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10628		      /* * TYPE_SIZE_UNIT (elttype) */ );
10629
10630	    bc_store_field (elt, bitsize, bitpos, mode,
10631			    TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10632			    /* The alignment of TARGET is
10633			       at least what its type requires.  */
10634			    VOIDmode, 0,
10635			    TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10636			    int_size_in_bytes (TREE_TYPE (constr)));
10637	  }
10638
10639      }
10640}
10641
10642
10643/* Store the value of EXP (an expression tree) into member FIELD of
10644   structure at address on stack, which has type TYPE, mode MODE and
10645   occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10646   structure.
10647
10648   ALIGN is the alignment that TARGET is known to have, measured in bytes.
10649   TOTAL_SIZE is its size in bytes, or -1 if variable.  */
10650
10651void
10652bc_store_field (field, bitsize, bitpos, mode, exp, type,
10653		value_mode, unsignedp, align, total_size)
10654     int bitsize, bitpos;
10655     enum machine_mode mode;
10656     tree field, exp, type;
10657     enum machine_mode value_mode;
10658     int unsignedp;
10659     int align;
10660     int total_size;
10661{
10662
10663  /* Expand expression and copy pointer */
10664  bc_expand_expr (exp);
10665  bc_emit_instruction (over);
10666
10667
10668  /* If the component is a bit field, we cannot use addressing to access
10669     it.  Use bit-field techniques to store in it.  */
10670
10671  if (DECL_BIT_FIELD (field))
10672    {
10673      bc_store_bit_field (bitpos, bitsize, unsignedp);
10674      return;
10675    }
10676  else
10677    /* Not bit field */
10678    {
10679      HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10680
10681      /* Advance pointer to the desired member */
10682      if (offset)
10683	bc_emit_instruction (addconstPSI, offset);
10684
10685      /* Store */
10686      bc_store_memory (type, field);
10687    }
10688}
10689
10690
10691/* Store SI/SU in bitfield */
10692void
10693bc_store_bit_field (offset, size, unsignedp)
10694     int offset, size, unsignedp;
10695{
10696  /* Push bitfield offset and size */
10697  bc_push_offset_and_size (offset, size);
10698
10699  /* Store */
10700  bc_emit_instruction (sstoreBI);
10701}
10702
10703
10704/* Load SI/SU from bitfield */
10705void
10706bc_load_bit_field (offset, size, unsignedp)
10707     int offset, size, unsignedp;
10708{
10709  /* Push bitfield offset and size */
10710  bc_push_offset_and_size (offset, size);
10711
10712  /* Load: sign-extend if signed, else zero-extend */
10713  bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10714}
10715
10716
10717/* Adjust interpreter stack by NLEVELS.  Positive means drop NLEVELS
10718   (adjust stack pointer upwards), negative means add that number of
10719   levels (adjust the stack pointer downwards).  Only positive values
10720   normally make sense. */
10721
10722void
10723bc_adjust_stack (nlevels)
10724     int nlevels;
10725{
10726  switch (nlevels)
10727    {
10728    case 0:
10729      break;
10730
10731    case 2:
10732      bc_emit_instruction (drop);
10733
10734    case 1:
10735      bc_emit_instruction (drop);
10736      break;
10737
10738    default:
10739
10740      bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10741      stack_depth -= nlevels;
10742    }
10743
10744#if defined (VALIDATE_STACK_FOR_BC)
10745  VALIDATE_STACK_FOR_BC ();
10746#endif
10747}
10748