explow.c revision 117395
1230557Sjimharris/* Subroutines for manipulating rtx's in semantically interesting ways.
2230557Sjimharris   Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3230557Sjimharris   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4230557Sjimharris
5230557SjimharrisThis file is part of GCC.
6230557Sjimharris
7230557SjimharrisGCC is free software; you can redistribute it and/or modify it under
8230557Sjimharristhe terms of the GNU General Public License as published by the Free
9230557SjimharrisSoftware Foundation; either version 2, or (at your option) any later
10230557Sjimharrisversion.
11230557Sjimharris
12230557SjimharrisGCC is distributed in the hope that it will be useful, but WITHOUT ANY
13230557SjimharrisWARRANTY; without even the implied warranty of MERCHANTABILITY or
14230557SjimharrisFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15230557Sjimharrisfor more details.
16230557Sjimharris
17230557SjimharrisYou should have received a copy of the GNU General Public License
18230557Sjimharrisalong with GCC; see the file COPYING.  If not, write to the Free
19230557SjimharrisSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
20230557Sjimharris02111-1307, USA.  */
21230557Sjimharris
22230557Sjimharris
23230557Sjimharris#include "config.h"
24230557Sjimharris#include "system.h"
25230557Sjimharris#include "toplev.h"
26230557Sjimharris#include "rtl.h"
27230557Sjimharris#include "tree.h"
28230557Sjimharris#include "tm_p.h"
29230557Sjimharris#include "flags.h"
30230557Sjimharris#include "function.h"
31230557Sjimharris#include "expr.h"
32230557Sjimharris#include "optabs.h"
33230557Sjimharris#include "hard-reg-set.h"
34230557Sjimharris#include "insn-config.h"
35230557Sjimharris#include "ggc.h"
36230557Sjimharris#include "recog.h"
37230557Sjimharris#include "langhooks.h"
38230557Sjimharris
39230557Sjimharrisstatic rtx break_out_memory_refs	PARAMS ((rtx));
40230557Sjimharrisstatic void emit_stack_probe		PARAMS ((rtx));
41230557Sjimharris
42230557Sjimharris
43230557Sjimharris/* Truncate and perhaps sign-extend C as appropriate for MODE.  */
44230557Sjimharris
45230557SjimharrisHOST_WIDE_INT
46230557Sjimharristrunc_int_for_mode (c, mode)
47230557Sjimharris     HOST_WIDE_INT c;
48230557Sjimharris     enum machine_mode mode;
49230557Sjimharris{
50230557Sjimharris  int width = GET_MODE_BITSIZE (mode);
51230557Sjimharris
52230557Sjimharris  /* You want to truncate to a _what_?  */
53230557Sjimharris  if (! SCALAR_INT_MODE_P (mode))
54230557Sjimharris    abort ();
55230557Sjimharris
56230557Sjimharris  /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
57230557Sjimharris  if (mode == BImode)
58230557Sjimharris    return c & 1 ? STORE_FLAG_VALUE : 0;
59230557Sjimharris
60230557Sjimharris  /* Sign-extend for the requested mode.  */
61230557Sjimharris
62230557Sjimharris  if (width < HOST_BITS_PER_WIDE_INT)
63230557Sjimharris    {
64230557Sjimharris      HOST_WIDE_INT sign = 1;
65230557Sjimharris      sign <<= width - 1;
66230557Sjimharris      c &= (sign << 1) - 1;
67230557Sjimharris      c ^= sign;
68230557Sjimharris      c -= sign;
69230557Sjimharris    }
70230557Sjimharris
71230557Sjimharris  return c;
72230557Sjimharris}
73230557Sjimharris
74230557Sjimharris/* Return an rtx for the sum of X and the integer C.
75230557Sjimharris
76230557Sjimharris   This function should be used via the `plus_constant' macro.  */
77230557Sjimharris
78230557Sjimharrisrtx
79230557Sjimharrisplus_constant_wide (x, c)
80230557Sjimharris     rtx x;
81230557Sjimharris     HOST_WIDE_INT c;
82230557Sjimharris{
83230557Sjimharris  RTX_CODE code;
84230557Sjimharris  rtx y;
85230557Sjimharris  enum machine_mode mode;
86230557Sjimharris  rtx tem;
87230557Sjimharris  int all_constant = 0;
88230557Sjimharris
89230557Sjimharris  if (c == 0)
90230557Sjimharris    return x;
91230557Sjimharris
92230557Sjimharris restart:
93230557Sjimharris
94230557Sjimharris  code = GET_CODE (x);
95230557Sjimharris  mode = GET_MODE (x);
96230557Sjimharris  y = x;
97230557Sjimharris
98230557Sjimharris  switch (code)
99230557Sjimharris    {
100230557Sjimharris    case CONST_INT:
101230557Sjimharris      return GEN_INT (INTVAL (x) + c);
102230557Sjimharris
103230557Sjimharris    case CONST_DOUBLE:
104230557Sjimharris      {
105230557Sjimharris	unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
106230557Sjimharris	HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
107230557Sjimharris	unsigned HOST_WIDE_INT l2 = c;
108230557Sjimharris	HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
109230557Sjimharris	unsigned HOST_WIDE_INT lv;
110230557Sjimharris	HOST_WIDE_INT hv;
111230557Sjimharris
112230557Sjimharris	add_double (l1, h1, l2, h2, &lv, &hv);
113230557Sjimharris
114230557Sjimharris	return immed_double_const (lv, hv, VOIDmode);
115230557Sjimharris      }
116230557Sjimharris
117230557Sjimharris    case MEM:
118230557Sjimharris      /* If this is a reference to the constant pool, try replacing it with
119230557Sjimharris	 a reference to a new constant.  If the resulting address isn't
120230557Sjimharris	 valid, don't return it because we have no way to validize it.  */
121230557Sjimharris      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
122230557Sjimharris	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
123230557Sjimharris	{
124230557Sjimharris	  tem
125230557Sjimharris	    = force_const_mem (GET_MODE (x),
126230557Sjimharris			       plus_constant (get_pool_constant (XEXP (x, 0)),
127230557Sjimharris					      c));
128230557Sjimharris	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
129230557Sjimharris	    return tem;
130230557Sjimharris	}
131230557Sjimharris      break;
132230557Sjimharris
133230557Sjimharris    case CONST:
134230557Sjimharris      /* If adding to something entirely constant, set a flag
135230557Sjimharris	 so that we can add a CONST around the result.  */
136230557Sjimharris      x = XEXP (x, 0);
137230557Sjimharris      all_constant = 1;
138230557Sjimharris      goto restart;
139230557Sjimharris
140230557Sjimharris    case SYMBOL_REF:
141230557Sjimharris    case LABEL_REF:
142230557Sjimharris      all_constant = 1;
143230557Sjimharris      break;
144230557Sjimharris
145230557Sjimharris    case PLUS:
146230557Sjimharris      /* The interesting case is adding the integer to a sum.
147230557Sjimharris	 Look for constant term in the sum and combine
148230557Sjimharris	 with C.  For an integer constant term, we make a combined
149230557Sjimharris	 integer.  For a constant term that is not an explicit integer,
150230557Sjimharris	 we cannot really combine, but group them together anyway.
151230557Sjimharris
152230557Sjimharris	 Restart or use a recursive call in case the remaining operand is
153230557Sjimharris	 something that we handle specially, such as a SYMBOL_REF.
154230557Sjimharris
155230557Sjimharris	 We may not immediately return from the recursive call here, lest
156230557Sjimharris	 all_constant gets lost.  */
157230557Sjimharris
158230557Sjimharris      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
159230557Sjimharris	{
160230557Sjimharris	  c += INTVAL (XEXP (x, 1));
161230557Sjimharris
162230557Sjimharris	  if (GET_MODE (x) != VOIDmode)
163230557Sjimharris	    c = trunc_int_for_mode (c, GET_MODE (x));
164230557Sjimharris
165230557Sjimharris	  x = XEXP (x, 0);
166230557Sjimharris	  goto restart;
167230557Sjimharris	}
168230557Sjimharris      else if (CONSTANT_P (XEXP (x, 1)))
169230557Sjimharris	{
170230557Sjimharris	  x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
171230557Sjimharris	  c = 0;
172230557Sjimharris	}
173230557Sjimharris      else if (find_constant_term_loc (&y))
174230557Sjimharris	{
175230557Sjimharris	  /* We need to be careful since X may be shared and we can't
176230557Sjimharris	     modify it in place.  */
177230557Sjimharris	  rtx copy = copy_rtx (x);
178230557Sjimharris	  rtx *const_loc = find_constant_term_loc (&copy);
179230557Sjimharris
180230557Sjimharris	  *const_loc = plus_constant (*const_loc, c);
181230557Sjimharris	  x = copy;
182230557Sjimharris	  c = 0;
183230557Sjimharris	}
184230557Sjimharris      break;
185230557Sjimharris
186230557Sjimharris    default:
187230557Sjimharris      break;
188230557Sjimharris    }
189230557Sjimharris
190230557Sjimharris  if (c != 0)
191230557Sjimharris    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
192230557Sjimharris
193230557Sjimharris  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
194230557Sjimharris    return x;
195230557Sjimharris  else if (all_constant)
196230557Sjimharris    return gen_rtx_CONST (mode, x);
197230557Sjimharris  else
198230557Sjimharris    return x;
199230557Sjimharris}
200230557Sjimharris
201230557Sjimharris/* If X is a sum, return a new sum like X but lacking any constant terms.
202230557Sjimharris   Add all the removed constant terms into *CONSTPTR.
203230557Sjimharris   X itself is not altered.  The result != X if and only if
204230557Sjimharris   it is not isomorphic to X.  */
205230557Sjimharris
206230557Sjimharrisrtx
207230557Sjimharriseliminate_constant_term (x, constptr)
208230557Sjimharris     rtx x;
209230557Sjimharris     rtx *constptr;
210230557Sjimharris{
211230557Sjimharris  rtx x0, x1;
212230557Sjimharris  rtx tem;
213230557Sjimharris
214230557Sjimharris  if (GET_CODE (x) != PLUS)
215230557Sjimharris    return x;
216230557Sjimharris
217230557Sjimharris  /* First handle constants appearing at this level explicitly.  */
218230557Sjimharris  if (GET_CODE (XEXP (x, 1)) == CONST_INT
219230557Sjimharris      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
220230557Sjimharris						XEXP (x, 1)))
221230557Sjimharris      && GET_CODE (tem) == CONST_INT)
222230557Sjimharris    {
223230557Sjimharris      *constptr = tem;
224230557Sjimharris      return eliminate_constant_term (XEXP (x, 0), constptr);
225230557Sjimharris    }
226230557Sjimharris
227230557Sjimharris  tem = const0_rtx;
228230557Sjimharris  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
229230557Sjimharris  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
230230557Sjimharris  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
231230557Sjimharris      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
232230557Sjimharris						*constptr, tem))
233230557Sjimharris      && GET_CODE (tem) == CONST_INT)
234230557Sjimharris    {
235230557Sjimharris      *constptr = tem;
236230557Sjimharris      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
237230557Sjimharris    }
238230557Sjimharris
239230557Sjimharris  return x;
240230557Sjimharris}
241230557Sjimharris
242230557Sjimharris/* Returns the insn that next references REG after INSN, or 0
243230557Sjimharris   if REG is clobbered before next referenced or we cannot find
244230557Sjimharris   an insn that references REG in a straight-line piece of code.  */
245230557Sjimharris
246230557Sjimharrisrtx
247230557Sjimharrisfind_next_ref (reg, insn)
248230557Sjimharris     rtx reg;
249230557Sjimharris     rtx insn;
250230557Sjimharris{
251230557Sjimharris  rtx next;
252230557Sjimharris
253230557Sjimharris  for (insn = NEXT_INSN (insn); insn; insn = next)
254230557Sjimharris    {
255230557Sjimharris      next = NEXT_INSN (insn);
256230557Sjimharris      if (GET_CODE (insn) == NOTE)
257230557Sjimharris	continue;
258230557Sjimharris      if (GET_CODE (insn) == CODE_LABEL
259230557Sjimharris	  || GET_CODE (insn) == BARRIER)
260230557Sjimharris	return 0;
261230557Sjimharris      if (GET_CODE (insn) == INSN
262230557Sjimharris	  || GET_CODE (insn) == JUMP_INSN
263230557Sjimharris	  || GET_CODE (insn) == CALL_INSN)
264230557Sjimharris	{
265230557Sjimharris	  if (reg_set_p (reg, insn))
266230557Sjimharris	    return 0;
267230557Sjimharris	  if (reg_mentioned_p (reg, PATTERN (insn)))
268230557Sjimharris	    return insn;
269230557Sjimharris	  if (GET_CODE (insn) == JUMP_INSN)
270230557Sjimharris	    {
271230557Sjimharris	      if (any_uncondjump_p (insn))
272230557Sjimharris		next = JUMP_LABEL (insn);
273230557Sjimharris	      else
274230557Sjimharris		return 0;
275230557Sjimharris	    }
276230557Sjimharris	  if (GET_CODE (insn) == CALL_INSN
277230557Sjimharris	      && REGNO (reg) < FIRST_PSEUDO_REGISTER
278230557Sjimharris	      && call_used_regs[REGNO (reg)])
279230557Sjimharris	    return 0;
280230557Sjimharris	}
281230557Sjimharris      else
282230557Sjimharris	abort ();
283230557Sjimharris    }
284230557Sjimharris  return 0;
285230557Sjimharris}
286230557Sjimharris
287230557Sjimharris/* Return an rtx for the size in bytes of the value of EXP.  */
288230557Sjimharris
289230557Sjimharrisrtx
290230557Sjimharrisexpr_size (exp)
291230557Sjimharris     tree exp;
292230557Sjimharris{
293230557Sjimharris  tree size = (*lang_hooks.expr_size) (exp);
294230557Sjimharris
295230557Sjimharris  if (TREE_CODE (size) != INTEGER_CST
296230557Sjimharris      && contains_placeholder_p (size))
297230557Sjimharris    size = build (WITH_RECORD_EXPR, sizetype, size, exp);
298230557Sjimharris
299230557Sjimharris  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
300230557Sjimharris}
301230557Sjimharris
302230557Sjimharris/* Return a wide integer for the size in bytes of the value of EXP, or -1
303230557Sjimharris   if the size can vary or is larger than an integer.  */
304230557Sjimharris
305230557SjimharrisHOST_WIDE_INT
306230557Sjimharrisint_expr_size (exp)
307230557Sjimharris     tree exp;
308230557Sjimharris{
309230557Sjimharris  tree t = (*lang_hooks.expr_size) (exp);
310230557Sjimharris
311230557Sjimharris  if (t == 0
312230557Sjimharris      || TREE_CODE (t) != INTEGER_CST
313230557Sjimharris      || TREE_OVERFLOW (t)
314230557Sjimharris      || TREE_INT_CST_HIGH (t) != 0
315230557Sjimharris      /* If the result would appear negative, it's too big to represent.  */
316230557Sjimharris      || (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0)
317230557Sjimharris    return -1;
318230557Sjimharris
319230557Sjimharris  return TREE_INT_CST_LOW (t);
320230557Sjimharris}
321230557Sjimharris
322230557Sjimharris/* Return a copy of X in which all memory references
323230557Sjimharris   and all constants that involve symbol refs
324   have been replaced with new temporary registers.
325   Also emit code to load the memory locations and constants
326   into those registers.
327
328   If X contains no such constants or memory references,
329   X itself (not a copy) is returned.
330
331   If a constant is found in the address that is not a legitimate constant
332   in an insn, it is left alone in the hope that it might be valid in the
333   address.
334
335   X may contain no arithmetic except addition, subtraction and multiplication.
336   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
337
338static rtx
339break_out_memory_refs (x)
340     rtx x;
341{
342  if (GET_CODE (x) == MEM
343      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
344	  && GET_MODE (x) != VOIDmode))
345    x = force_reg (GET_MODE (x), x);
346  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
347	   || GET_CODE (x) == MULT)
348    {
349      rtx op0 = break_out_memory_refs (XEXP (x, 0));
350      rtx op1 = break_out_memory_refs (XEXP (x, 1));
351
352      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
353	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
354    }
355
356  return x;
357}
358
359#ifdef POINTERS_EXTEND_UNSIGNED
360
361/* Given X, a memory address in ptr_mode, convert it to an address
362   in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
363   the fact that pointers are not allowed to overflow by commuting arithmetic
364   operations over conversions so that address arithmetic insns can be
365   used.  */
366
367rtx
368convert_memory_address (to_mode, x)
369     enum machine_mode to_mode;
370     rtx x;
371{
372  enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
373  rtx temp;
374  enum rtx_code code;
375
376  /* Here we handle some special cases.  If none of them apply, fall through
377     to the default case.  */
378  switch (GET_CODE (x))
379    {
380    case CONST_INT:
381    case CONST_DOUBLE:
382      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
383	code = TRUNCATE;
384      else if (POINTERS_EXTEND_UNSIGNED < 0)
385	break;
386      else if (POINTERS_EXTEND_UNSIGNED > 0)
387	code = ZERO_EXTEND;
388      else
389	code = SIGN_EXTEND;
390      temp = simplify_unary_operation (code, to_mode, x, from_mode);
391      if (temp)
392	return temp;
393      break;
394
395    case SUBREG:
396      if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
397	  && GET_MODE (SUBREG_REG (x)) == to_mode)
398	return SUBREG_REG (x);
399      break;
400
401    case LABEL_REF:
402      temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
403      LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
404      return temp;
405      break;
406
407    case SYMBOL_REF:
408      temp = shallow_copy_rtx (x);
409      PUT_MODE (temp, to_mode);
410      return temp;
411      break;
412
413    case CONST:
414      return gen_rtx_CONST (to_mode,
415			    convert_memory_address (to_mode, XEXP (x, 0)));
416      break;
417
418    case PLUS:
419    case MULT:
420      /* For addition we can safely permute the conversion and addition
421	 operation if one operand is a constant and converting the constant
422	 does not change it.  We can always safely permute them if we are
423	 making the address narrower.  */
424      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
425	  || (GET_CODE (x) == PLUS
426	      && GET_CODE (XEXP (x, 1)) == CONST_INT
427	      && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
428	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
429			       convert_memory_address (to_mode, XEXP (x, 0)),
430			       XEXP (x, 1));
431      break;
432
433    default:
434      break;
435    }
436
437  return convert_modes (to_mode, from_mode,
438			x, POINTERS_EXTEND_UNSIGNED);
439}
440#endif
441
442/* Given a memory address or facsimile X, construct a new address,
443   currently equivalent, that is stable: future stores won't change it.
444
445   X must be composed of constants, register and memory references
446   combined with addition, subtraction and multiplication:
447   in other words, just what you can get from expand_expr if sum_ok is 1.
448
449   Works by making copies of all regs and memory locations used
450   by X and combining them the same way X does.
451   You could also stabilize the reference to this address
452   by copying the address to a register with copy_to_reg;
453   but then you wouldn't get indexed addressing in the reference.  */
454
455rtx
456copy_all_regs (x)
457     rtx x;
458{
459  if (GET_CODE (x) == REG)
460    {
461      if (REGNO (x) != FRAME_POINTER_REGNUM
462#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
463	  && REGNO (x) != HARD_FRAME_POINTER_REGNUM
464#endif
465	  )
466	x = copy_to_reg (x);
467    }
468  else if (GET_CODE (x) == MEM)
469    x = copy_to_reg (x);
470  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
471	   || GET_CODE (x) == MULT)
472    {
473      rtx op0 = copy_all_regs (XEXP (x, 0));
474      rtx op1 = copy_all_regs (XEXP (x, 1));
475      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
476	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
477    }
478  return x;
479}
480
481/* Return something equivalent to X but valid as a memory address
482   for something of mode MODE.  When X is not itself valid, this
483   works by copying X or subexpressions of it into registers.  */
484
485rtx
486memory_address (mode, x)
487     enum machine_mode mode;
488     rtx x;
489{
490  rtx oldx = x;
491
492  if (GET_CODE (x) == ADDRESSOF)
493    return x;
494
495#ifdef POINTERS_EXTEND_UNSIGNED
496  if (GET_MODE (x) != Pmode)
497    x = convert_memory_address (Pmode, x);
498#endif
499
500  /* By passing constant addresses thru registers
501     we get a chance to cse them.  */
502  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
503    x = force_reg (Pmode, x);
504
505  /* Accept a QUEUED that refers to a REG
506     even though that isn't a valid address.
507     On attempting to put this in an insn we will call protect_from_queue
508     which will turn it into a REG, which is valid.  */
509  else if (GET_CODE (x) == QUEUED
510      && GET_CODE (QUEUED_VAR (x)) == REG)
511    ;
512
513  /* We get better cse by rejecting indirect addressing at this stage.
514     Let the combiner create indirect addresses where appropriate.
515     For now, generate the code so that the subexpressions useful to share
516     are visible.  But not if cse won't be done!  */
517  else
518    {
519      if (! cse_not_expected && GET_CODE (x) != REG)
520	x = break_out_memory_refs (x);
521
522      /* At this point, any valid address is accepted.  */
523      GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
524
525      /* If it was valid before but breaking out memory refs invalidated it,
526	 use it the old way.  */
527      if (memory_address_p (mode, oldx))
528	goto win2;
529
530      /* Perform machine-dependent transformations on X
531	 in certain cases.  This is not necessary since the code
532	 below can handle all possible cases, but machine-dependent
533	 transformations can make better code.  */
534      LEGITIMIZE_ADDRESS (x, oldx, mode, win);
535
536      /* PLUS and MULT can appear in special ways
537	 as the result of attempts to make an address usable for indexing.
538	 Usually they are dealt with by calling force_operand, below.
539	 But a sum containing constant terms is special
540	 if removing them makes the sum a valid address:
541	 then we generate that address in a register
542	 and index off of it.  We do this because it often makes
543	 shorter code, and because the addresses thus generated
544	 in registers often become common subexpressions.  */
545      if (GET_CODE (x) == PLUS)
546	{
547	  rtx constant_term = const0_rtx;
548	  rtx y = eliminate_constant_term (x, &constant_term);
549	  if (constant_term == const0_rtx
550	      || ! memory_address_p (mode, y))
551	    x = force_operand (x, NULL_RTX);
552	  else
553	    {
554	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
555	      if (! memory_address_p (mode, y))
556		x = force_operand (x, NULL_RTX);
557	      else
558		x = y;
559	    }
560	}
561
562      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
563	x = force_operand (x, NULL_RTX);
564
565      /* If we have a register that's an invalid address,
566	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
567      else if (GET_CODE (x) == REG)
568	x = copy_to_reg (x);
569
570      /* Last resort: copy the value to a register, since
571	 the register is a valid address.  */
572      else
573	x = force_reg (Pmode, x);
574
575      goto done;
576
577    win2:
578      x = oldx;
579    win:
580      if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
581	  /* Don't copy an addr via a reg if it is one of our stack slots.  */
582	  && ! (GET_CODE (x) == PLUS
583		&& (XEXP (x, 0) == virtual_stack_vars_rtx
584		    || XEXP (x, 0) == virtual_incoming_args_rtx)))
585	{
586	  if (general_operand (x, Pmode))
587	    x = force_reg (Pmode, x);
588	  else
589	    x = force_operand (x, NULL_RTX);
590	}
591    }
592
593 done:
594
595  /* If we didn't change the address, we are done.  Otherwise, mark
596     a reg as a pointer if we have REG or REG + CONST_INT.  */
597  if (oldx == x)
598    return x;
599  else if (GET_CODE (x) == REG)
600    mark_reg_pointer (x, BITS_PER_UNIT);
601  else if (GET_CODE (x) == PLUS
602	   && GET_CODE (XEXP (x, 0)) == REG
603	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
604    mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
605
606  /* OLDX may have been the address on a temporary.  Update the address
607     to indicate that X is now used.  */
608  update_temp_slot_address (oldx, x);
609
610  return x;
611}
612
613/* Like `memory_address' but pretend `flag_force_addr' is 0.  */
614
615rtx
616memory_address_noforce (mode, x)
617     enum machine_mode mode;
618     rtx x;
619{
620  int ambient_force_addr = flag_force_addr;
621  rtx val;
622
623  flag_force_addr = 0;
624  val = memory_address (mode, x);
625  flag_force_addr = ambient_force_addr;
626  return val;
627}
628
629/* Convert a mem ref into one with a valid memory address.
630   Pass through anything else unchanged.  */
631
632rtx
633validize_mem (ref)
634     rtx ref;
635{
636  if (GET_CODE (ref) != MEM)
637    return ref;
638  if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
639      && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
640    return ref;
641
642  /* Don't alter REF itself, since that is probably a stack slot.  */
643  return replace_equiv_address (ref, XEXP (ref, 0));
644}
645
646/* Given REF, either a MEM or a REG, and T, either the type of X or
647   the expression corresponding to REF, set RTX_UNCHANGING_P if
648   appropriate.  */
649
650void
651maybe_set_unchanging (ref, t)
652     rtx ref;
653     tree t;
654{
655  /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
656     initialization is only executed once, or whose initializer always
657     has the same value.  Currently we simplify this to PARM_DECLs in the
658     first case, and decls with TREE_CONSTANT initializers in the second.
659
660     We cannot do this for non-static aggregates, because of the double
661     writes that can be generated by store_constructor, depending on the
662     contents of the initializer.  Yes, this does eliminate a good fraction
663     of the number of uses of RTX_UNCHANGING_P for a language like Ada.
664     It also eliminates a good quantity of bugs.  Let this be incentive to
665     eliminate RTX_UNCHANGING_P entirely in favour of a more reliable
666     solution, perhaps based on alias sets.  */
667
668  if ((TREE_READONLY (t) && DECL_P (t)
669       && (TREE_STATIC (t) || ! AGGREGATE_TYPE_P (TREE_TYPE (t)))
670       && (TREE_CODE (t) == PARM_DECL
671	   || DECL_INITIAL (t) == NULL_TREE
672	   || TREE_CONSTANT (DECL_INITIAL (t))))
673      || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
674    RTX_UNCHANGING_P (ref) = 1;
675}
676
677/* Return a modified copy of X with its memory address copied
678   into a temporary register to protect it from side effects.
679   If X is not a MEM, it is returned unchanged (and not copied).
680   Perhaps even if it is a MEM, if there is no need to change it.  */
681
682rtx
683stabilize (x)
684     rtx x;
685{
686
687  if (GET_CODE (x) != MEM
688      || ! rtx_unstable_p (XEXP (x, 0)))
689    return x;
690
691  return
692    replace_equiv_address (x, force_reg (Pmode, copy_all_regs (XEXP (x, 0))));
693}
694
695/* Copy the value or contents of X to a new temp reg and return that reg.  */
696
697rtx
698copy_to_reg (x)
699     rtx x;
700{
701  rtx temp = gen_reg_rtx (GET_MODE (x));
702
703  /* If not an operand, must be an address with PLUS and MULT so
704     do the computation.  */
705  if (! general_operand (x, VOIDmode))
706    x = force_operand (x, temp);
707
708  if (x != temp)
709    emit_move_insn (temp, x);
710
711  return temp;
712}
713
714/* Like copy_to_reg but always give the new register mode Pmode
715   in case X is a constant.  */
716
717rtx
718copy_addr_to_reg (x)
719     rtx x;
720{
721  return copy_to_mode_reg (Pmode, x);
722}
723
724/* Like copy_to_reg but always give the new register mode MODE
725   in case X is a constant.  */
726
727rtx
728copy_to_mode_reg (mode, x)
729     enum machine_mode mode;
730     rtx x;
731{
732  rtx temp = gen_reg_rtx (mode);
733
734  /* If not an operand, must be an address with PLUS and MULT so
735     do the computation.  */
736  if (! general_operand (x, VOIDmode))
737    x = force_operand (x, temp);
738
739  if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
740    abort ();
741  if (x != temp)
742    emit_move_insn (temp, x);
743  return temp;
744}
745
746/* Load X into a register if it is not already one.
747   Use mode MODE for the register.
748   X should be valid for mode MODE, but it may be a constant which
749   is valid for all integer modes; that's why caller must specify MODE.
750
751   The caller must not alter the value in the register we return,
752   since we mark it as a "constant" register.  */
753
754rtx
755force_reg (mode, x)
756     enum machine_mode mode;
757     rtx x;
758{
759  rtx temp, insn, set;
760
761  if (GET_CODE (x) == REG)
762    return x;
763
764  if (general_operand (x, mode))
765    {
766      temp = gen_reg_rtx (mode);
767      insn = emit_move_insn (temp, x);
768    }
769  else
770    {
771      temp = force_operand (x, NULL_RTX);
772      if (GET_CODE (temp) == REG)
773	insn = get_last_insn ();
774      else
775	{
776	  rtx temp2 = gen_reg_rtx (mode);
777	  insn = emit_move_insn (temp2, temp);
778	  temp = temp2;
779	}
780    }
781
782  /* Let optimizers know that TEMP's value never changes
783     and that X can be substituted for it.  Don't get confused
784     if INSN set something else (such as a SUBREG of TEMP).  */
785  if (CONSTANT_P (x)
786      && (set = single_set (insn)) != 0
787      && SET_DEST (set) == temp)
788    set_unique_reg_note (insn, REG_EQUAL, x);
789
790  return temp;
791}
792
793/* If X is a memory ref, copy its contents to a new temp reg and return
794   that reg.  Otherwise, return X.  */
795
796rtx
797force_not_mem (x)
798     rtx x;
799{
800  rtx temp;
801
802  if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
803    return x;
804
805  temp = gen_reg_rtx (GET_MODE (x));
806  emit_move_insn (temp, x);
807  return temp;
808}
809
810/* Copy X to TARGET (if it's nonzero and a reg)
811   or to a new temp reg and return that reg.
812   MODE is the mode to use for X in case it is a constant.  */
813
814rtx
815copy_to_suggested_reg (x, target, mode)
816     rtx x, target;
817     enum machine_mode mode;
818{
819  rtx temp;
820
821  if (target && GET_CODE (target) == REG)
822    temp = target;
823  else
824    temp = gen_reg_rtx (mode);
825
826  emit_move_insn (temp, x);
827  return temp;
828}
829
830/* Return the mode to use to store a scalar of TYPE and MODE.
831   PUNSIGNEDP points to the signedness of the type and may be adjusted
832   to show what signedness to use on extension operations.
833
834   FOR_CALL is nonzero if this call is promoting args for a call.  */
835
836enum machine_mode
837promote_mode (type, mode, punsignedp, for_call)
838     tree type;
839     enum machine_mode mode;
840     int *punsignedp;
841     int for_call ATTRIBUTE_UNUSED;
842{
843  enum tree_code code = TREE_CODE (type);
844  int unsignedp = *punsignedp;
845
846#ifdef PROMOTE_FOR_CALL_ONLY
847  if (! for_call)
848    return mode;
849#endif
850
851  switch (code)
852    {
853#ifdef PROMOTE_MODE
854    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
855    case CHAR_TYPE:      case REAL_TYPE:       case OFFSET_TYPE:
856      PROMOTE_MODE (mode, unsignedp, type);
857      break;
858#endif
859
860#ifdef POINTERS_EXTEND_UNSIGNED
861    case REFERENCE_TYPE:
862    case POINTER_TYPE:
863      mode = Pmode;
864      unsignedp = POINTERS_EXTEND_UNSIGNED;
865      break;
866#endif
867
868    default:
869      break;
870    }
871
872  *punsignedp = unsignedp;
873  return mode;
874}
875
876/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
877   This pops when ADJUST is positive.  ADJUST need not be constant.  */
878
879void
880adjust_stack (adjust)
881     rtx adjust;
882{
883  rtx temp;
884  adjust = protect_from_queue (adjust, 0);
885
886  if (adjust == const0_rtx)
887    return;
888
889  /* We expect all variable sized adjustments to be multiple of
890     PREFERRED_STACK_BOUNDARY.  */
891  if (GET_CODE (adjust) == CONST_INT)
892    stack_pointer_delta -= INTVAL (adjust);
893
894  temp = expand_binop (Pmode,
895#ifdef STACK_GROWS_DOWNWARD
896		       add_optab,
897#else
898		       sub_optab,
899#endif
900		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
901		       OPTAB_LIB_WIDEN);
902
903  if (temp != stack_pointer_rtx)
904    emit_move_insn (stack_pointer_rtx, temp);
905}
906
907/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
908   This pushes when ADJUST is positive.  ADJUST need not be constant.  */
909
910void
911anti_adjust_stack (adjust)
912     rtx adjust;
913{
914  rtx temp;
915  adjust = protect_from_queue (adjust, 0);
916
917  if (adjust == const0_rtx)
918    return;
919
920  /* We expect all variable sized adjustments to be multiple of
921     PREFERRED_STACK_BOUNDARY.  */
922  if (GET_CODE (adjust) == CONST_INT)
923    stack_pointer_delta += INTVAL (adjust);
924
925  temp = expand_binop (Pmode,
926#ifdef STACK_GROWS_DOWNWARD
927		       sub_optab,
928#else
929		       add_optab,
930#endif
931		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
932		       OPTAB_LIB_WIDEN);
933
934  if (temp != stack_pointer_rtx)
935    emit_move_insn (stack_pointer_rtx, temp);
936}
937
938/* Round the size of a block to be pushed up to the boundary required
939   by this machine.  SIZE is the desired size, which need not be constant.  */
940
941rtx
942round_push (size)
943     rtx size;
944{
945  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
946  if (align == 1)
947    return size;
948  if (GET_CODE (size) == CONST_INT)
949    {
950      int new = (INTVAL (size) + align - 1) / align * align;
951      if (INTVAL (size) != new)
952	size = GEN_INT (new);
953    }
954  else
955    {
956      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
957	 but we know it can't.  So add ourselves and then do
958	 TRUNC_DIV_EXPR.  */
959      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
960			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
961      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
962			    NULL_RTX, 1);
963      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
964    }
965  return size;
966}
967
968/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
969   to a previously-created save area.  If no save area has been allocated,
970   this function will allocate one.  If a save area is specified, it
971   must be of the proper mode.
972
973   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
974   are emitted at the current position.  */
975
976void
977emit_stack_save (save_level, psave, after)
978     enum save_level save_level;
979     rtx *psave;
980     rtx after;
981{
982  rtx sa = *psave;
983  /* The default is that we use a move insn and save in a Pmode object.  */
984  rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
985  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
986
987  /* See if this machine has anything special to do for this kind of save.  */
988  switch (save_level)
989    {
990#ifdef HAVE_save_stack_block
991    case SAVE_BLOCK:
992      if (HAVE_save_stack_block)
993	fcn = gen_save_stack_block;
994      break;
995#endif
996#ifdef HAVE_save_stack_function
997    case SAVE_FUNCTION:
998      if (HAVE_save_stack_function)
999	fcn = gen_save_stack_function;
1000      break;
1001#endif
1002#ifdef HAVE_save_stack_nonlocal
1003    case SAVE_NONLOCAL:
1004      if (HAVE_save_stack_nonlocal)
1005	fcn = gen_save_stack_nonlocal;
1006      break;
1007#endif
1008    default:
1009      break;
1010    }
1011
1012  /* If there is no save area and we have to allocate one, do so.  Otherwise
1013     verify the save area is the proper mode.  */
1014
1015  if (sa == 0)
1016    {
1017      if (mode != VOIDmode)
1018	{
1019	  if (save_level == SAVE_NONLOCAL)
1020	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1021	  else
1022	    *psave = sa = gen_reg_rtx (mode);
1023	}
1024    }
1025  else
1026    {
1027      if (mode == VOIDmode || GET_MODE (sa) != mode)
1028	abort ();
1029    }
1030
1031  if (after)
1032    {
1033      rtx seq;
1034
1035      start_sequence ();
1036      /* We must validize inside the sequence, to ensure that any instructions
1037	 created by the validize call also get moved to the right place.  */
1038      if (sa != 0)
1039	sa = validize_mem (sa);
1040      emit_insn (fcn (sa, stack_pointer_rtx));
1041      seq = get_insns ();
1042      end_sequence ();
1043      emit_insn_after (seq, after);
1044    }
1045  else
1046    {
1047      if (sa != 0)
1048	sa = validize_mem (sa);
1049      emit_insn (fcn (sa, stack_pointer_rtx));
1050    }
1051}
1052
1053/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
1054   area made by emit_stack_save.  If it is zero, we have nothing to do.
1055
1056   Put any emitted insns after insn AFTER, if nonzero, otherwise at
1057   current position.  */
1058
1059void
1060emit_stack_restore (save_level, sa, after)
1061     enum save_level save_level;
1062     rtx after;
1063     rtx sa;
1064{
1065  /* The default is that we use a move insn.  */
1066  rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1067
1068  /* See if this machine has anything special to do for this kind of save.  */
1069  switch (save_level)
1070    {
1071#ifdef HAVE_restore_stack_block
1072    case SAVE_BLOCK:
1073      if (HAVE_restore_stack_block)
1074	fcn = gen_restore_stack_block;
1075      break;
1076#endif
1077#ifdef HAVE_restore_stack_function
1078    case SAVE_FUNCTION:
1079      if (HAVE_restore_stack_function)
1080	fcn = gen_restore_stack_function;
1081      break;
1082#endif
1083#ifdef HAVE_restore_stack_nonlocal
1084    case SAVE_NONLOCAL:
1085      if (HAVE_restore_stack_nonlocal)
1086	fcn = gen_restore_stack_nonlocal;
1087      break;
1088#endif
1089    default:
1090      break;
1091    }
1092
1093  if (sa != 0)
1094    {
1095      sa = validize_mem (sa);
1096      /* These clobbers prevent the scheduler from moving
1097	 references to variable arrays below the code
1098	 that deletes (pops) the arrays.  */
1099      emit_insn (gen_rtx_CLOBBER (VOIDmode,
1100		    gen_rtx_MEM (BLKmode,
1101			gen_rtx_SCRATCH (VOIDmode))));
1102      emit_insn (gen_rtx_CLOBBER (VOIDmode,
1103		    gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
1104    }
1105
1106  if (after)
1107    {
1108      rtx seq;
1109
1110      start_sequence ();
1111      emit_insn (fcn (stack_pointer_rtx, sa));
1112      seq = get_insns ();
1113      end_sequence ();
1114      emit_insn_after (seq, after);
1115    }
1116  else
1117    emit_insn (fcn (stack_pointer_rtx, sa));
1118}
1119
1120#ifdef SETJMP_VIA_SAVE_AREA
1121/* Optimize RTL generated by allocate_dynamic_stack_space for targets
1122   where SETJMP_VIA_SAVE_AREA is true.  The problem is that on these
1123   platforms, the dynamic stack space used can corrupt the original
1124   frame, thus causing a crash if a longjmp unwinds to it.  */
1125
1126void
1127optimize_save_area_alloca (insns)
1128     rtx insns;
1129{
1130  rtx insn;
1131
1132  for (insn = insns; insn; insn = NEXT_INSN(insn))
1133    {
1134      rtx note;
1135
1136      if (GET_CODE (insn) != INSN)
1137	continue;
1138
1139      for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1140	{
1141	  if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1142	    continue;
1143
1144	  if (!current_function_calls_setjmp)
1145	    {
1146	      rtx pat = PATTERN (insn);
1147
1148	      /* If we do not see the note in a pattern matching
1149		 these precise characteristics, we did something
1150		 entirely wrong in allocate_dynamic_stack_space.
1151
1152		 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1153		 was defined on a machine where stacks grow towards higher
1154		 addresses.
1155
1156		 Right now only supported port with stack that grow upward
1157		 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA.  */
1158	      if (GET_CODE (pat) != SET
1159		  || SET_DEST (pat) != stack_pointer_rtx
1160		  || GET_CODE (SET_SRC (pat)) != MINUS
1161		  || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1162		abort ();
1163
1164	      /* This will now be transformed into a (set REG REG)
1165		 so we can just blow away all the other notes.  */
1166	      XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1167	      REG_NOTES (insn) = NULL_RTX;
1168	    }
1169	  else
1170	    {
1171	      /* setjmp was called, we must remove the REG_SAVE_AREA
1172		 note so that later passes do not get confused by its
1173		 presence.  */
1174	      if (note == REG_NOTES (insn))
1175		{
1176		  REG_NOTES (insn) = XEXP (note, 1);
1177		}
1178	      else
1179		{
1180		  rtx srch;
1181
1182		  for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1183		    if (XEXP (srch, 1) == note)
1184		      break;
1185
1186		  if (srch == NULL_RTX)
1187		    abort ();
1188
1189		  XEXP (srch, 1) = XEXP (note, 1);
1190		}
1191	    }
1192	  /* Once we've seen the note of interest, we need not look at
1193	     the rest of them.  */
1194	  break;
1195	}
1196    }
1197}
1198#endif /* SETJMP_VIA_SAVE_AREA */
1199
1200/* Return an rtx representing the address of an area of memory dynamically
1201   pushed on the stack.  This region of memory is always aligned to
1202   a multiple of BIGGEST_ALIGNMENT.
1203
1204   Any required stack pointer alignment is preserved.
1205
1206   SIZE is an rtx representing the size of the area.
1207   TARGET is a place in which the address can be placed.
1208
1209   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
1210
1211rtx
1212allocate_dynamic_stack_space (size, target, known_align)
1213     rtx size;
1214     rtx target;
1215     int known_align;
1216{
1217#ifdef SETJMP_VIA_SAVE_AREA
1218  rtx setjmpless_size = NULL_RTX;
1219#endif
1220
1221  /* If we're asking for zero bytes, it doesn't matter what we point
1222     to since we can't dereference it.  But return a reasonable
1223     address anyway.  */
1224  if (size == const0_rtx)
1225    return virtual_stack_dynamic_rtx;
1226
1227  /* Otherwise, show we're calling alloca or equivalent.  */
1228  current_function_calls_alloca = 1;
1229
1230  /* Ensure the size is in the proper mode.  */
1231  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1232    size = convert_to_mode (Pmode, size, 1);
1233
1234  /* We can't attempt to minimize alignment necessary, because we don't
1235     know the final value of preferred_stack_boundary yet while executing
1236     this code.  */
1237  cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1238
1239  /* We will need to ensure that the address we return is aligned to
1240     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1241     always know its final value at this point in the compilation (it
1242     might depend on the size of the outgoing parameter lists, for
1243     example), so we must align the value to be returned in that case.
1244     (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1245     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1246     We must also do an alignment operation on the returned value if
1247     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1248
1249     If we have to align, we must leave space in SIZE for the hole
1250     that might result from the alignment operation.  */
1251
1252#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1253#define MUST_ALIGN 1
1254#else
1255#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1256#endif
1257
1258  if (MUST_ALIGN)
1259    size
1260      = force_operand (plus_constant (size,
1261				      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1262		       NULL_RTX);
1263
1264#ifdef SETJMP_VIA_SAVE_AREA
1265  /* If setjmp restores regs from a save area in the stack frame,
1266     avoid clobbering the reg save area.  Note that the offset of
1267     virtual_incoming_args_rtx includes the preallocated stack args space.
1268     It would be no problem to clobber that, but it's on the wrong side
1269     of the old save area.  */
1270  {
1271    rtx dynamic_offset
1272      = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1273		      stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1274
1275    if (!current_function_calls_setjmp)
1276      {
1277	int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1278
1279	/* See optimize_save_area_alloca to understand what is being
1280	   set up here.  */
1281
1282	/* ??? Code below assumes that the save area needs maximal
1283	   alignment.  This constraint may be too strong.  */
1284	if (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1285	  abort ();
1286
1287	if (GET_CODE (size) == CONST_INT)
1288	  {
1289	    HOST_WIDE_INT new = INTVAL (size) / align * align;
1290
1291	    if (INTVAL (size) != new)
1292	      setjmpless_size = GEN_INT (new);
1293	    else
1294	      setjmpless_size = size;
1295	  }
1296	else
1297	  {
1298	    /* Since we know overflow is not possible, we avoid using
1299	       CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
1300	    setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1301					     GEN_INT (align), NULL_RTX, 1);
1302	    setjmpless_size = expand_mult (Pmode, setjmpless_size,
1303					   GEN_INT (align), NULL_RTX, 1);
1304	  }
1305	/* Our optimization works based upon being able to perform a simple
1306	   transformation of this RTL into a (set REG REG) so make sure things
1307	   did in fact end up in a REG.  */
1308	if (!register_operand (setjmpless_size, Pmode))
1309	  setjmpless_size = force_reg (Pmode, setjmpless_size);
1310      }
1311
1312    size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1313			 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1314  }
1315#endif /* SETJMP_VIA_SAVE_AREA */
1316
1317  /* Round the size to a multiple of the required stack alignment.
1318     Since the stack if presumed to be rounded before this allocation,
1319     this will maintain the required alignment.
1320
1321     If the stack grows downward, we could save an insn by subtracting
1322     SIZE from the stack pointer and then aligning the stack pointer.
1323     The problem with this is that the stack pointer may be unaligned
1324     between the execution of the subtraction and alignment insns and
1325     some machines do not allow this.  Even on those that do, some
1326     signal handlers malfunction if a signal should occur between those
1327     insns.  Since this is an extremely rare event, we have no reliable
1328     way of knowing which systems have this problem.  So we avoid even
1329     momentarily mis-aligning the stack.  */
1330
1331  /* If we added a variable amount to SIZE,
1332     we can no longer assume it is aligned.  */
1333#if !defined (SETJMP_VIA_SAVE_AREA)
1334  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1335#endif
1336    size = round_push (size);
1337
1338  do_pending_stack_adjust ();
1339
1340 /* We ought to be called always on the toplevel and stack ought to be aligned
1341    properly.  */
1342  if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
1343    abort ();
1344
1345  /* If needed, check that we have the required amount of stack.  Take into
1346     account what has already been checked.  */
1347  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1348    probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1349
1350  /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1351  if (target == 0 || GET_CODE (target) != REG
1352      || REGNO (target) < FIRST_PSEUDO_REGISTER
1353      || GET_MODE (target) != Pmode)
1354    target = gen_reg_rtx (Pmode);
1355
1356  mark_reg_pointer (target, known_align);
1357
1358  /* Perform the required allocation from the stack.  Some systems do
1359     this differently than simply incrementing/decrementing from the
1360     stack pointer, such as acquiring the space by calling malloc().  */
1361#ifdef HAVE_allocate_stack
1362  if (HAVE_allocate_stack)
1363    {
1364      enum machine_mode mode = STACK_SIZE_MODE;
1365      insn_operand_predicate_fn pred;
1366
1367      /* We don't have to check against the predicate for operand 0 since
1368	 TARGET is known to be a pseudo of the proper mode, which must
1369	 be valid for the operand.  For operand 1, convert to the
1370	 proper mode and validate.  */
1371      if (mode == VOIDmode)
1372	mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1373
1374      pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1375      if (pred && ! ((*pred) (size, mode)))
1376	size = copy_to_mode_reg (mode, size);
1377
1378      emit_insn (gen_allocate_stack (target, size));
1379    }
1380  else
1381#endif
1382    {
1383#ifndef STACK_GROWS_DOWNWARD
1384      emit_move_insn (target, virtual_stack_dynamic_rtx);
1385#endif
1386
1387      /* Check stack bounds if necessary.  */
1388      if (current_function_limit_stack)
1389	{
1390	  rtx available;
1391	  rtx space_available = gen_label_rtx ();
1392#ifdef STACK_GROWS_DOWNWARD
1393	  available = expand_binop (Pmode, sub_optab,
1394				    stack_pointer_rtx, stack_limit_rtx,
1395				    NULL_RTX, 1, OPTAB_WIDEN);
1396#else
1397	  available = expand_binop (Pmode, sub_optab,
1398				    stack_limit_rtx, stack_pointer_rtx,
1399				    NULL_RTX, 1, OPTAB_WIDEN);
1400#endif
1401	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1402				   space_available);
1403#ifdef HAVE_trap
1404	  if (HAVE_trap)
1405	    emit_insn (gen_trap ());
1406	  else
1407#endif
1408	    error ("stack limits not supported on this target");
1409	  emit_barrier ();
1410	  emit_label (space_available);
1411	}
1412
1413      anti_adjust_stack (size);
1414#ifdef SETJMP_VIA_SAVE_AREA
1415      if (setjmpless_size != NULL_RTX)
1416	{
1417	  rtx note_target = get_last_insn ();
1418
1419	  REG_NOTES (note_target)
1420	    = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1421				 REG_NOTES (note_target));
1422	}
1423#endif /* SETJMP_VIA_SAVE_AREA */
1424
1425#ifdef STACK_GROWS_DOWNWARD
1426      emit_move_insn (target, virtual_stack_dynamic_rtx);
1427#endif
1428    }
1429
1430  if (MUST_ALIGN)
1431    {
1432      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1433	 but we know it can't.  So add ourselves and then do
1434	 TRUNC_DIV_EXPR.  */
1435      target = expand_binop (Pmode, add_optab, target,
1436			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1437			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1438      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1439			      GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1440			      NULL_RTX, 1);
1441      target = expand_mult (Pmode, target,
1442			    GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1443			    NULL_RTX, 1);
1444    }
1445
1446  /* Some systems require a particular insn to refer to the stack
1447     to make the pages exist.  */
1448#ifdef HAVE_probe
1449  if (HAVE_probe)
1450    emit_insn (gen_probe ());
1451#endif
1452
1453  /* Record the new stack level for nonlocal gotos.  */
1454  if (nonlocal_goto_handler_slots != 0)
1455    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1456
1457  return target;
1458}
1459
1460/* A front end may want to override GCC's stack checking by providing a
1461   run-time routine to call to check the stack, so provide a mechanism for
1462   calling that routine.  */
1463
1464static GTY(()) rtx stack_check_libfunc;
1465
1466void
1467set_stack_check_libfunc (libfunc)
1468     rtx libfunc;
1469{
1470  stack_check_libfunc = libfunc;
1471}
1472
1473/* Emit one stack probe at ADDRESS, an address within the stack.  */
1474
1475static void
1476emit_stack_probe (address)
1477     rtx address;
1478{
1479  rtx memref = gen_rtx_MEM (word_mode, address);
1480
1481  MEM_VOLATILE_P (memref) = 1;
1482
1483  if (STACK_CHECK_PROBE_LOAD)
1484    emit_move_insn (gen_reg_rtx (word_mode), memref);
1485  else
1486    emit_move_insn (memref, const0_rtx);
1487}
1488
1489/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1490   FIRST is a constant and size is a Pmode RTX.  These are offsets from the
1491   current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
1492   subtract from the stack.  If SIZE is constant, this is done
1493   with a fixed number of probes.  Otherwise, we must make a loop.  */
1494
1495#ifdef STACK_GROWS_DOWNWARD
1496#define STACK_GROW_OP MINUS
1497#else
1498#define STACK_GROW_OP PLUS
1499#endif
1500
1501void
1502probe_stack_range (first, size)
1503     HOST_WIDE_INT first;
1504     rtx size;
1505{
1506  /* First ensure SIZE is Pmode.  */
1507  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1508    size = convert_to_mode (Pmode, size, 1);
1509
1510  /* Next see if the front end has set up a function for us to call to
1511     check the stack.  */
1512  if (stack_check_libfunc != 0)
1513    {
1514      rtx addr = memory_address (QImode,
1515				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1516					         stack_pointer_rtx,
1517					         plus_constant (size, first)));
1518
1519#ifdef POINTERS_EXTEND_UNSIGNED
1520      if (GET_MODE (addr) != ptr_mode)
1521	addr = convert_memory_address (ptr_mode, addr);
1522#endif
1523
1524      emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1525			 ptr_mode);
1526    }
1527
1528  /* Next see if we have an insn to check the stack.  Use it if so.  */
1529#ifdef HAVE_check_stack
1530  else if (HAVE_check_stack)
1531    {
1532      insn_operand_predicate_fn pred;
1533      rtx last_addr
1534	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1535					 stack_pointer_rtx,
1536					 plus_constant (size, first)),
1537			 NULL_RTX);
1538
1539      pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1540      if (pred && ! ((*pred) (last_addr, Pmode)))
1541	last_addr = copy_to_mode_reg (Pmode, last_addr);
1542
1543      emit_insn (gen_check_stack (last_addr));
1544    }
1545#endif
1546
1547  /* If we have to generate explicit probes, see if we have a constant
1548     small number of them to generate.  If so, that's the easy case.  */
1549  else if (GET_CODE (size) == CONST_INT
1550	   && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1551    {
1552      HOST_WIDE_INT offset;
1553
1554      /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1555	 for values of N from 1 until it exceeds LAST.  If only one
1556	 probe is needed, this will not generate any code.  Then probe
1557	 at LAST.  */
1558      for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1559	   offset < INTVAL (size);
1560	   offset = offset + STACK_CHECK_PROBE_INTERVAL)
1561	emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1562					  stack_pointer_rtx,
1563					  GEN_INT (offset)));
1564
1565      emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1566					stack_pointer_rtx,
1567					plus_constant (size, first)));
1568    }
1569
1570  /* In the variable case, do the same as above, but in a loop.  We emit loop
1571     notes so that loop optimization can be done.  */
1572  else
1573    {
1574      rtx test_addr
1575	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1576					 stack_pointer_rtx,
1577					 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1578			 NULL_RTX);
1579      rtx last_addr
1580	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1581					 stack_pointer_rtx,
1582					 plus_constant (size, first)),
1583			 NULL_RTX);
1584      rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1585      rtx loop_lab = gen_label_rtx ();
1586      rtx test_lab = gen_label_rtx ();
1587      rtx end_lab = gen_label_rtx ();
1588      rtx temp;
1589
1590      if (GET_CODE (test_addr) != REG
1591	  || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1592	test_addr = force_reg (Pmode, test_addr);
1593
1594      emit_note (NULL, NOTE_INSN_LOOP_BEG);
1595      emit_jump (test_lab);
1596
1597      emit_label (loop_lab);
1598      emit_stack_probe (test_addr);
1599
1600      emit_note (NULL, NOTE_INSN_LOOP_CONT);
1601
1602#ifdef STACK_GROWS_DOWNWARD
1603#define CMP_OPCODE GTU
1604      temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1605			   1, OPTAB_WIDEN);
1606#else
1607#define CMP_OPCODE LTU
1608      temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1609			   1, OPTAB_WIDEN);
1610#endif
1611
1612      if (temp != test_addr)
1613	abort ();
1614
1615      emit_label (test_lab);
1616      emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1617			       NULL_RTX, Pmode, 1, loop_lab);
1618      emit_jump (end_lab);
1619      emit_note (NULL, NOTE_INSN_LOOP_END);
1620      emit_label (end_lab);
1621
1622      emit_stack_probe (last_addr);
1623    }
1624}
1625
1626/* Return an rtx representing the register or memory location
1627   in which a scalar value of data type VALTYPE
1628   was returned by a function call to function FUNC.
1629   FUNC is a FUNCTION_DECL node if the precise function is known,
1630   otherwise 0.
1631   OUTGOING is 1 if on a machine with register windows this function
1632   should return the register in which the function will put its result
1633   and 0 otherwise.  */
1634
1635rtx
1636hard_function_value (valtype, func, outgoing)
1637     tree valtype;
1638     tree func ATTRIBUTE_UNUSED;
1639     int outgoing ATTRIBUTE_UNUSED;
1640{
1641  rtx val;
1642
1643#ifdef FUNCTION_OUTGOING_VALUE
1644  if (outgoing)
1645    val = FUNCTION_OUTGOING_VALUE (valtype, func);
1646  else
1647#endif
1648    val = FUNCTION_VALUE (valtype, func);
1649
1650  if (GET_CODE (val) == REG
1651      && GET_MODE (val) == BLKmode)
1652    {
1653      unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1654      enum machine_mode tmpmode;
1655
1656      /* int_size_in_bytes can return -1.  We don't need a check here
1657	 since the value of bytes will be large enough that no mode
1658	 will match and we will abort later in this function.  */
1659
1660      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1661	   tmpmode != VOIDmode;
1662	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1663	{
1664	  /* Have we found a large enough mode?  */
1665	  if (GET_MODE_SIZE (tmpmode) >= bytes)
1666	    break;
1667	}
1668
1669      /* No suitable mode found.  */
1670      if (tmpmode == VOIDmode)
1671	abort ();
1672
1673      PUT_MODE (val, tmpmode);
1674    }
1675  return val;
1676}
1677
1678/* Return an rtx representing the register or memory location
1679   in which a scalar value of mode MODE was returned by a library call.  */
1680
1681rtx
1682hard_libcall_value (mode)
1683     enum machine_mode mode;
1684{
1685  return LIBCALL_VALUE (mode);
1686}
1687
1688/* Look up the tree code for a given rtx code
1689   to provide the arithmetic operation for REAL_ARITHMETIC.
1690   The function returns an int because the caller may not know
1691   what `enum tree_code' means.  */
1692
1693int
1694rtx_to_tree_code (code)
1695     enum rtx_code code;
1696{
1697  enum tree_code tcode;
1698
1699  switch (code)
1700    {
1701    case PLUS:
1702      tcode = PLUS_EXPR;
1703      break;
1704    case MINUS:
1705      tcode = MINUS_EXPR;
1706      break;
1707    case MULT:
1708      tcode = MULT_EXPR;
1709      break;
1710    case DIV:
1711      tcode = RDIV_EXPR;
1712      break;
1713    case SMIN:
1714      tcode = MIN_EXPR;
1715      break;
1716    case SMAX:
1717      tcode = MAX_EXPR;
1718      break;
1719    default:
1720      tcode = LAST_AND_UNUSED_TREE_CODE;
1721      break;
1722    }
1723  return ((int) tcode);
1724}
1725
1726#include "gt-explow.h"
1727