explow.c revision 169690
1204076Spjd/* Subroutines for manipulating rtx's in semantically interesting ways.
2204076Spjd   Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3219351Spjd   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4204076Spjd   Free Software Foundation, Inc.
5204076Spjd
6204076SpjdThis file is part of GCC.
7204076Spjd
8204076SpjdGCC is free software; you can redistribute it and/or modify it under
9204076Spjdthe terms of the GNU General Public License as published by the Free
10204076SpjdSoftware Foundation; either version 2, or (at your option) any later
11204076Spjdversion.
12204076Spjd
13204076SpjdGCC is distributed in the hope that it will be useful, but WITHOUT ANY
14204076SpjdWARRANTY; without even the implied warranty of MERCHANTABILITY or
15204076SpjdFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16204076Spjdfor more details.
17204076Spjd
18204076SpjdYou should have received a copy of the GNU General Public License
19204076Spjdalong with GCC; see the file COPYING.  If not, write to the Free
20204076SpjdSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21204076Spjd02110-1301, USA.  */
22204076Spjd
23204076Spjd
24204076Spjd#include "config.h"
25204076Spjd#include "system.h"
26204076Spjd#include "coretypes.h"
27204076Spjd#include "tm.h"
28204076Spjd#include "toplev.h"
29204076Spjd#include "rtl.h"
30204076Spjd#include "tree.h"
31204076Spjd#include "tm_p.h"
32204076Spjd#include "flags.h"
33204076Spjd#include "function.h"
34204076Spjd#include "expr.h"
35204076Spjd#include "optabs.h"
36204076Spjd#include "hard-reg-set.h"
37218044Spjd#include "insn-config.h"
38204076Spjd#include "ggc.h"
39204076Spjd#include "recog.h"
40204076Spjd#include "langhooks.h"
41204076Spjd#include "target.h"
42204076Spjd#include "output.h"
43204076Spjd
44204076Spjdstatic rtx break_out_memory_refs (rtx);
45204076Spjdstatic void emit_stack_probe (rtx);
46204076Spjd
47204076Spjd
48204076Spjd/* Truncate and perhaps sign-extend C as appropriate for MODE.  */
49219813Spjd
50204076SpjdHOST_WIDE_INT
51204076Spjdtrunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
52204076Spjd{
53204076Spjd  int width = GET_MODE_BITSIZE (mode);
54204076Spjd
55204076Spjd  /* You want to truncate to a _what_?  */
56212038Spjd  gcc_assert (SCALAR_INT_MODE_P (mode));
57204076Spjd
58204076Spjd  /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
59204076Spjd  if (mode == BImode)
60211977Spjd    return c & 1 ? STORE_FLAG_VALUE : 0;
61204076Spjd
62204076Spjd  /* Sign-extend for the requested mode.  */
63204076Spjd
64210886Spjd  if (width < HOST_BITS_PER_WIDE_INT)
65204076Spjd    {
66204076Spjd      HOST_WIDE_INT sign = 1;
67204076Spjd      sign <<= width - 1;
68204076Spjd      c &= (sign << 1) - 1;
69204076Spjd      c ^= sign;
70204076Spjd      c -= sign;
71204076Spjd    }
72211977Spjd
73213430Spjd  return c;
74211977Spjd}
75204076Spjd
76204076Spjd/* Return an rtx for the sum of X and the integer C.  */
77204076Spjd
78204076Spjdrtx
79204076Spjdplus_constant (rtx x, HOST_WIDE_INT c)
80204076Spjd{
81204076Spjd  RTX_CODE code;
82204076Spjd  rtx y;
83204076Spjd  enum machine_mode mode;
84204076Spjd  rtx tem;
85204076Spjd  int all_constant = 0;
86204076Spjd
87204076Spjd  if (c == 0)
88204076Spjd    return x;
89204076Spjd
90204076Spjd restart:
91204076Spjd
92204076Spjd  code = GET_CODE (x);
93204076Spjd  mode = GET_MODE (x);
94204076Spjd  y = x;
95204076Spjd
96204076Spjd  switch (code)
97218041Spjd    {
98218041Spjd    case CONST_INT:
99218041Spjd      return GEN_INT (INTVAL (x) + c);
100218041Spjd
101218041Spjd    case CONST_DOUBLE:
102218041Spjd      {
103218041Spjd	unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
104218041Spjd	HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
105218041Spjd	unsigned HOST_WIDE_INT l2 = c;
106218041Spjd	HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
107218041Spjd	unsigned HOST_WIDE_INT lv;
108218041Spjd	HOST_WIDE_INT hv;
109218041Spjd
110218041Spjd	add_double (l1, h1, l2, h2, &lv, &hv);
111218041Spjd
112218041Spjd	return immed_double_const (lv, hv, VOIDmode);
113218370Spjd      }
114218370Spjd
115218370Spjd    case MEM:
116218370Spjd      /* If this is a reference to the constant pool, try replacing it with
117218370Spjd	 a reference to a new constant.  If the resulting address isn't
118218370Spjd	 valid, don't return it because we have no way to validize it.  */
119218041Spjd      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
120218041Spjd	  && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
121218041Spjd	{
122218041Spjd	  tem
123218041Spjd	    = force_const_mem (GET_MODE (x),
124218041Spjd			       plus_constant (get_pool_constant (XEXP (x, 0)),
125218041Spjd					      c));
126218041Spjd	  if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
127218041Spjd	    return tem;
128218041Spjd	}
129218044Spjd      break;
130218044Spjd
131218044Spjd    case CONST:
132218044Spjd      /* If adding to something entirely constant, set a flag
133218044Spjd	 so that we can add a CONST around the result.  */
134218044Spjd      x = XEXP (x, 0);
135218044Spjd      all_constant = 1;
136218044Spjd      goto restart;
137218044Spjd
138218044Spjd    case SYMBOL_REF:
139218044Spjd    case LABEL_REF:
140218044Spjd      all_constant = 1;
141218044Spjd      break;
142218044Spjd
143218044Spjd    case PLUS:
144218044Spjd      /* The interesting case is adding the integer to a sum.
145218044Spjd	 Look for constant term in the sum and combine
146218044Spjd	 with C.  For an integer constant term, we make a combined
147218044Spjd	 integer.  For a constant term that is not an explicit integer,
148218044Spjd	 we cannot really combine, but group them together anyway.
149218044Spjd
150218044Spjd	 Restart or use a recursive call in case the remaining operand is
151218044Spjd	 something that we handle specially, such as a SYMBOL_REF.
152218044Spjd
153218044Spjd	 We may not immediately return from the recursive call here, lest
154218044Spjd	 all_constant gets lost.  */
155218044Spjd
156218044Spjd      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
157218044Spjd	{
158218044Spjd	  c += INTVAL (XEXP (x, 1));
159218044Spjd
160218044Spjd	  if (GET_MODE (x) != VOIDmode)
161218044Spjd	    c = trunc_int_for_mode (c, GET_MODE (x));
162218044Spjd
163218044Spjd	  x = XEXP (x, 0);
164218044Spjd	  goto restart;
165218044Spjd	}
166218044Spjd      else if (CONSTANT_P (XEXP (x, 1)))
167218044Spjd	{
168218044Spjd	  x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
169218044Spjd	  c = 0;
170218044Spjd	}
171218044Spjd      else if (find_constant_term_loc (&y))
172218373Spjd	{
173218373Spjd	  /* We need to be careful since X may be shared and we can't
174218373Spjd	     modify it in place.  */
175218044Spjd	  rtx copy = copy_rtx (x);
176218373Spjd	  rtx *const_loc = find_constant_term_loc (&copy);
177218044Spjd
178218044Spjd	  *const_loc = plus_constant (*const_loc, c);
179218044Spjd	  x = copy;
180218044Spjd	  c = 0;
181218044Spjd	}
182218044Spjd      break;
183218044Spjd
184218044Spjd    default:
185218044Spjd      break;
186218044Spjd    }
187218375Spjd
188218044Spjd  if (c != 0)
189218044Spjd    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
190218374Spjd
191218044Spjd  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
192218044Spjd    return x;
193218044Spjd  else if (all_constant)
194218044Spjd    return gen_rtx_CONST (mode, x);
195218375Spjd  else
196218044Spjd    return x;
197218044Spjd}
198218044Spjd
199218044Spjd/* If X is a sum, return a new sum like X but lacking any constant terms.
200218044Spjd   Add all the removed constant terms into *CONSTPTR.
201218044Spjd   X itself is not altered.  The result != X if and only if
202218044Spjd   it is not isomorphic to X.  */
203218375Spjd
204218044Spjdrtx
205218044Spjdeliminate_constant_term (rtx x, rtx *constptr)
206218044Spjd{
207218044Spjd  rtx x0, x1;
208218044Spjd  rtx tem;
209218375Spjd
210218044Spjd  if (GET_CODE (x) != PLUS)
211218044Spjd    return x;
212218044Spjd
213218044Spjd  /* First handle constants appearing at this level explicitly.  */
214218044Spjd  if (GET_CODE (XEXP (x, 1)) == CONST_INT
215218044Spjd      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
216218375Spjd						XEXP (x, 1)))
217218044Spjd      && GET_CODE (tem) == CONST_INT)
218218044Spjd    {
219218044Spjd      *constptr = tem;
220218044Spjd      return eliminate_constant_term (XEXP (x, 0), constptr);
221218044Spjd    }
222218375Spjd
223218044Spjd  tem = const0_rtx;
224218044Spjd  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
225218044Spjd  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
226218044Spjd  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
227218218Spjd      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
228218218Spjd						*constptr, tem))
229218375Spjd      && GET_CODE (tem) == CONST_INT)
230218218Spjd    {
231218218Spjd      *constptr = tem;
232218218Spjd      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
233218218Spjd    }
234218218Spjd
235218375Spjd  return x;
236218218Spjd}
237218218Spjd
238218218Spjd/* Return an rtx for the size in bytes of the value of EXP.  */
239218218Spjd
240218044Spjdrtx
241218044Spjdexpr_size (tree exp)
242218044Spjd{
243218375Spjd  tree size;
244218044Spjd
245218044Spjd  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
246218044Spjd    size = TREE_OPERAND (exp, 1);
247218044Spjd  else
248218044Spjd    size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lang_hooks.expr_size (exp), exp);
249218375Spjd
250218044Spjd  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
251218044Spjd}
252218044Spjd
253218044Spjd/* Return a wide integer for the size in bytes of the value of EXP, or -1
254218044Spjd   if the size can vary or is larger than an integer.  */
255218044Spjd
256218044SpjdHOST_WIDE_INT
257218375Spjdint_expr_size (tree exp)
258218044Spjd{
259218044Spjd  tree size;
260218044Spjd
261218044Spjd  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
262218044Spjd    size = TREE_OPERAND (exp, 1);
263218375Spjd  else
264218044Spjd    size = lang_hooks.expr_size (exp);
265218044Spjd
266218044Spjd  if (size == 0 || !host_integerp (size, 0))
267218044Spjd    return -1;
268218044Spjd
269218044Spjd  return tree_low_cst (size, 0);
270218375Spjd}
271218044Spjd
272218044Spjd/* Return a copy of X in which all memory references
273218044Spjd   and all constants that involve symbol refs
274218044Spjd   have been replaced with new temporary registers.
275218044Spjd   Also emit code to load the memory locations and constants
276218044Spjd   into those registers.
277218044Spjd
278218044Spjd   If X contains no such constants or memory references,
279218044Spjd   X itself (not a copy) is returned.
280218044Spjd
281218044Spjd   If a constant is found in the address that is not a legitimate constant
282218044Spjd   in an insn, it is left alone in the hope that it might be valid in the
283218044Spjd   address.
284218044Spjd
285204076Spjd   X may contain no arithmetic except addition, subtraction and multiplication.
286207372Spjd   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
287207372Spjd
288207372Spjdstatic rtx
289207372Spjdbreak_out_memory_refs (rtx x)
290207372Spjd{
291207372Spjd  if (MEM_P (x)
292207372Spjd      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
293207372Spjd	  && GET_MODE (x) != VOIDmode))
294207372Spjd    x = force_reg (GET_MODE (x), x);
295207372Spjd  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
296207372Spjd	   || GET_CODE (x) == MULT)
297207372Spjd    {
298207372Spjd      rtx op0 = break_out_memory_refs (XEXP (x, 0));
299207372Spjd      rtx op1 = break_out_memory_refs (XEXP (x, 1));
300207372Spjd
301207372Spjd      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
302204076Spjd	x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
303204076Spjd    }
304204076Spjd
305204076Spjd  return x;
306204076Spjd}
307204076Spjd
308204076Spjd/* Given X, a memory address in ptr_mode, convert it to an address
309204076Spjd   in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
310204076Spjd   the fact that pointers are not allowed to overflow by commuting arithmetic
311204076Spjd   operations over conversions so that address arithmetic insns can be
312204076Spjd   used.  */
313204076Spjd
314204076Spjdrtx
315204076Spjdconvert_memory_address (enum machine_mode to_mode ATTRIBUTE_UNUSED,
316204076Spjd			rtx x)
317211977Spjd{
318211977Spjd#ifndef POINTERS_EXTEND_UNSIGNED
319204076Spjd  gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
320211977Spjd  return x;
321204076Spjd#else /* defined(POINTERS_EXTEND_UNSIGNED) */
322204076Spjd  enum machine_mode from_mode;
323204076Spjd  rtx temp;
324204076Spjd  enum rtx_code code;
325207372Spjd
326213006Spjd  /* If X already has the right mode, just return it.  */
327204076Spjd  if (GET_MODE (x) == to_mode)
328207372Spjd    return x;
329207372Spjd
330207372Spjd  from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
331207372Spjd
332207372Spjd  /* Here we handle some special cases.  If none of them apply, fall through
333207372Spjd     to the default case.  */
334207372Spjd  switch (GET_CODE (x))
335207348Spjd    {
336207348Spjd    case CONST_INT:
337207348Spjd    case CONST_DOUBLE:
338207348Spjd      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
339207348Spjd	code = TRUNCATE;
340207348Spjd      else if (POINTERS_EXTEND_UNSIGNED < 0)
341207348Spjd	break;
342207348Spjd      else if (POINTERS_EXTEND_UNSIGNED > 0)
343204076Spjd	code = ZERO_EXTEND;
344204076Spjd      else
345204076Spjd	code = SIGN_EXTEND;
346204076Spjd      temp = simplify_unary_operation (code, to_mode, x, from_mode);
347204076Spjd      if (temp)
348210886Spjd	return temp;
349210886Spjd      break;
350210886Spjd
351210886Spjd    case SUBREG:
352210886Spjd      if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
353218138Spjd	  && GET_MODE (SUBREG_REG (x)) == to_mode)
354210886Spjd	return SUBREG_REG (x);
355210886Spjd      break;
356210886Spjd
357210886Spjd    case LABEL_REF:
358210886Spjd      temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
359210886Spjd      LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
360210886Spjd      return temp;
361210886Spjd      break;
362210886Spjd
363210886Spjd    case SYMBOL_REF:
364210886Spjd      temp = shallow_copy_rtx (x);
365219351Spjd      PUT_MODE (temp, to_mode);
366219351Spjd      return temp;
367219354Spjd      break;
368219354Spjd
369210886Spjd    case CONST:
370210886Spjd      return gen_rtx_CONST (to_mode,
371211886Spjd			    convert_memory_address (to_mode, XEXP (x, 0)));
372211886Spjd      break;
373210886Spjd
374210886Spjd    case PLUS:
375210886Spjd    case MULT:
376210886Spjd      /* For addition we can safely permute the conversion and addition
377210886Spjd	 operation if one operand is a constant and converting the constant
378210886Spjd	 does not change it.  We can always safely permute them if we are
379210886Spjd	 making the address narrower.  */
380210886Spjd      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
381210886Spjd	  || (GET_CODE (x) == PLUS
382218138Spjd	      && GET_CODE (XEXP (x, 1)) == CONST_INT
383218138Spjd	      && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
384218138Spjd	return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
385210886Spjd			       convert_memory_address (to_mode, XEXP (x, 0)),
386210886Spjd			       XEXP (x, 1));
387210886Spjd      break;
388210886Spjd
389210886Spjd    default:
390210886Spjd      break;
391210886Spjd    }
392210886Spjd
393219351Spjd  return convert_modes (to_mode, from_mode,
394219351Spjd			x, POINTERS_EXTEND_UNSIGNED);
395219354Spjd#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
396219354Spjd}
397210886Spjd
398210886Spjd/* Return something equivalent to X but valid as a memory address
399211886Spjd   for something of mode MODE.  When X is not itself valid, this
400211886Spjd   works by copying X or subexpressions of it into registers.  */
401210886Spjd
402210886Spjdrtx
403210886Spjdmemory_address (enum machine_mode mode, rtx x)
404204076Spjd{
405217784Spjd  rtx oldx = x;
406217784Spjd
407217784Spjd  x = convert_memory_address (Pmode, x);
408217784Spjd
409217784Spjd  /* By passing constant addresses through registers
410218138Spjd     we get a chance to cse them.  */
411217784Spjd  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
412217784Spjd    x = force_reg (Pmode, x);
413217784Spjd
414217784Spjd  /* We get better cse by rejecting indirect addressing at this stage.
415217784Spjd     Let the combiner create indirect addresses where appropriate.
416219351Spjd     For now, generate the code so that the subexpressions useful to share
417219354Spjd     are visible.  But not if cse won't be done!  */
418217784Spjd  else
419217784Spjd    {
420217784Spjd      if (! cse_not_expected && !REG_P (x))
421217784Spjd	x = break_out_memory_refs (x);
422217784Spjd
423217784Spjd      /* At this point, any valid address is accepted.  */
424217784Spjd      if (memory_address_p (mode, x))
425217784Spjd	goto win;
426217784Spjd
427217784Spjd      /* If it was valid before but breaking out memory refs invalidated it,
428217784Spjd	 use it the old way.  */
429217784Spjd      if (memory_address_p (mode, oldx))
430217784Spjd	goto win2;
431217784Spjd
432217784Spjd      /* Perform machine-dependent transformations on X
433217784Spjd	 in certain cases.  This is not necessary since the code
434217784Spjd	 below can handle all possible cases, but machine-dependent
435217784Spjd	 transformations can make better code.  */
436217784Spjd      LEGITIMIZE_ADDRESS (x, oldx, mode, win);
437217784Spjd
438217784Spjd      /* PLUS and MULT can appear in special ways
439217784Spjd	 as the result of attempts to make an address usable for indexing.
440217784Spjd	 Usually they are dealt with by calling force_operand, below.
441217784Spjd	 But a sum containing constant terms is special
442217784Spjd	 if removing them makes the sum a valid address:
443217784Spjd	 then we generate that address in a register
444217784Spjd	 and index off of it.  We do this because it often makes
445217784Spjd	 shorter code, and because the addresses thus generated
446204076Spjd	 in registers often become common subexpressions.  */
447204076Spjd      if (GET_CODE (x) == PLUS)
448210886Spjd	{
449210886Spjd	  rtx constant_term = const0_rtx;
450210886Spjd	  rtx y = eliminate_constant_term (x, &constant_term);
451204076Spjd	  if (constant_term == const0_rtx
452210886Spjd	      || ! memory_address_p (mode, y))
453210886Spjd	    x = force_operand (x, NULL_RTX);
454210886Spjd	  else
455210886Spjd	    {
456210886Spjd	      y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
457210886Spjd	      if (! memory_address_p (mode, y))
458210886Spjd		x = force_operand (x, NULL_RTX);
459210886Spjd	      else
460210886Spjd		x = y;
461210886Spjd	    }
462210886Spjd	}
463210886Spjd
464210886Spjd      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
465210886Spjd	x = force_operand (x, NULL_RTX);
466210886Spjd
467210886Spjd      /* If we have a register that's an invalid address,
468210886Spjd	 it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
469210886Spjd      else if (REG_P (x))
470210886Spjd	x = copy_to_reg (x);
471210886Spjd
472210886Spjd      /* Last resort: copy the value to a register, since
473210886Spjd	 the register is a valid address.  */
474210886Spjd      else
475210886Spjd	x = force_reg (Pmode, x);
476210886Spjd
477210886Spjd      goto done;
478210886Spjd
479210886Spjd    win2:
480210886Spjd      x = oldx;
481210886Spjd    win:
482210886Spjd      if (flag_force_addr && ! cse_not_expected && !REG_P (x))
483210886Spjd	{
484210886Spjd	  x = force_operand (x, NULL_RTX);
485210886Spjd	  x = force_reg (Pmode, x);
486210886Spjd	}
487210886Spjd    }
488210886Spjd
489210886Spjd done:
490210886Spjd
491210886Spjd  /* If we didn't change the address, we are done.  Otherwise, mark
492210886Spjd     a reg as a pointer if we have REG or REG + CONST_INT.  */
493210886Spjd  if (oldx == x)
494210886Spjd    return x;
495210886Spjd  else if (REG_P (x))
496210886Spjd    mark_reg_pointer (x, BITS_PER_UNIT);
497210886Spjd  else if (GET_CODE (x) == PLUS
498210886Spjd	   && REG_P (XEXP (x, 0))
499210886Spjd	   && GET_CODE (XEXP (x, 1)) == CONST_INT)
500210886Spjd    mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
501210886Spjd
502210886Spjd  /* OLDX may have been the address on a temporary.  Update the address
503210886Spjd     to indicate that X is now used.  */
504210886Spjd  update_temp_slot_address (oldx, x);
505210886Spjd
506210886Spjd  return x;
507210886Spjd}
508210886Spjd
509210886Spjd/* Like `memory_address' but pretend `flag_force_addr' is 0.  */
510210886Spjd
511210886Spjdrtx
512210886Spjdmemory_address_noforce (enum machine_mode mode, rtx x)
513210886Spjd{
514210886Spjd  int ambient_force_addr = flag_force_addr;
515210886Spjd  rtx val;
516210886Spjd
517210886Spjd  flag_force_addr = 0;
518210886Spjd  val = memory_address (mode, x);
519210886Spjd  flag_force_addr = ambient_force_addr;
520210886Spjd  return val;
521210886Spjd}
522210886Spjd
523210886Spjd/* Convert a mem ref into one with a valid memory address.
524210886Spjd   Pass through anything else unchanged.  */
525210886Spjd
526210886Spjdrtx
527210886Spjdvalidize_mem (rtx ref)
528210886Spjd{
529210886Spjd  if (!MEM_P (ref))
530210886Spjd    return ref;
531210886Spjd  ref = use_anchored_address (ref);
532210886Spjd  if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
533210886Spjd      && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
534210886Spjd    return ref;
535210886Spjd
536210886Spjd  /* Don't alter REF itself, since that is probably a stack slot.  */
537210886Spjd  return replace_equiv_address (ref, XEXP (ref, 0));
538210886Spjd}
539210886Spjd
540210886Spjd/* If X is a memory reference to a member of an object block, try rewriting
541210886Spjd   it to use an anchor instead.  Return the new memory reference on success
542210886Spjd   and the old one on failure.  */
543210886Spjd
544210886Spjdrtx
545210886Spjduse_anchored_address (rtx x)
546210886Spjd{
547217729Spjd  rtx base;
548217729Spjd  HOST_WIDE_INT offset;
549217729Spjd
550210886Spjd  if (!flag_section_anchors)
551210886Spjd    return x;
552210886Spjd
553210886Spjd  if (!MEM_P (x))
554210886Spjd    return x;
555210886Spjd
556210886Spjd  /* Split the address into a base and offset.  */
557210886Spjd  base = XEXP (x, 0);
558210886Spjd  offset = 0;
559218138Spjd  if (GET_CODE (base) == CONST
560210886Spjd      && GET_CODE (XEXP (base, 0)) == PLUS
561210886Spjd      && GET_CODE (XEXP (XEXP (base, 0), 1)) == CONST_INT)
562210886Spjd    {
563210886Spjd      offset += INTVAL (XEXP (XEXP (base, 0), 1));
564210886Spjd      base = XEXP (XEXP (base, 0), 0);
565210886Spjd    }
566210886Spjd
567210886Spjd  /* Check whether BASE is suitable for anchors.  */
568210886Spjd  if (GET_CODE (base) != SYMBOL_REF
569210886Spjd      || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
570210886Spjd      || SYMBOL_REF_ANCHOR_P (base)
571210886Spjd      || SYMBOL_REF_BLOCK (base) == NULL
572210886Spjd      || !targetm.use_anchors_for_symbol_p (base))
573210886Spjd    return x;
574210886Spjd
575210886Spjd  /* Decide where BASE is going to be.  */
576219351Spjd  place_block_symbol (base);
577219354Spjd
578210886Spjd  /* Get the anchor we need to use.  */
579217729Spjd  offset += SYMBOL_REF_BLOCK_OFFSET (base);
580217729Spjd  base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
581217784Spjd			     SYMBOL_REF_TLS_MODEL (base));
582217784Spjd
583210886Spjd  /* Work out the offset from the anchor.  */
584210886Spjd  offset -= SYMBOL_REF_BLOCK_OFFSET (base);
585210886Spjd
586210886Spjd  /* If we're going to run a CSE pass, force the anchor into a register.
587210886Spjd     We will then be able to reuse registers for several accesses, if the
588210886Spjd     target costs say that that's worthwhile.  */
589210886Spjd  if (!cse_not_expected)
590210886Spjd    base = force_reg (GET_MODE (base), base);
591210886Spjd
592210886Spjd  return replace_equiv_address (x, plus_constant (base, offset));
593210886Spjd}
594210886Spjd
595210886Spjd/* Copy the value or contents of X to a new temp reg and return that reg.  */
596210886Spjd
597210886Spjdrtx
598204076Spjdcopy_to_reg (rtx x)
599204076Spjd{
600204076Spjd  rtx temp = gen_reg_rtx (GET_MODE (x));
601211899Spjd
602211899Spjd  /* If not an operand, must be an address with PLUS and MULT so
603211899Spjd     do the computation.  */
604211899Spjd  if (! general_operand (x, VOIDmode))
605211899Spjd    x = force_operand (x, temp);
606211899Spjd
607211899Spjd  if (x != temp)
608211899Spjd    emit_move_insn (temp, x);
609211899Spjd
610211899Spjd  return temp;
611211899Spjd}
612211899Spjd
613211899Spjd/* Like copy_to_reg but always give the new register mode Pmode
614211899Spjd   in case X is a constant.  */
615211899Spjd
616211899Spjdrtx
617211899Spjdcopy_addr_to_reg (rtx x)
618211899Spjd{
619211899Spjd  return copy_to_mode_reg (Pmode, x);
620204076Spjd}
621204076Spjd
622204076Spjd/* Like copy_to_reg but always give the new register mode MODE
623204076Spjd   in case X is a constant.  */
624204076Spjd
625204076Spjdrtx
626204076Spjdcopy_to_mode_reg (enum machine_mode mode, rtx x)
627204076Spjd{
628204076Spjd  rtx temp = gen_reg_rtx (mode);
629204076Spjd
630204076Spjd  /* If not an operand, must be an address with PLUS and MULT so
631204076Spjd     do the computation.  */
632204076Spjd  if (! general_operand (x, VOIDmode))
633204076Spjd    x = force_operand (x, temp);
634204076Spjd
635204076Spjd  gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
636204076Spjd  if (x != temp)
637204076Spjd    emit_move_insn (temp, x);
638204076Spjd  return temp;
639204076Spjd}
640204076Spjd
641204076Spjd/* Load X into a register if it is not already one.
642209185Spjd   Use mode MODE for the register.
643204076Spjd   X should be valid for mode MODE, but it may be a constant which
644207371Spjd   is valid for all integer modes; that's why caller must specify MODE.
645207371Spjd
646207371Spjd   The caller must not alter the value in the register we return,
647207371Spjd   since we mark it as a "constant" register.  */
648204076Spjd
649204076Spjdrtx
650204076Spjdforce_reg (enum machine_mode mode, rtx x)
651204076Spjd{
652204076Spjd  rtx temp, insn, set;
653204076Spjd
654204076Spjd  if (REG_P (x))
655204076Spjd    return x;
656204076Spjd
657204076Spjd  if (general_operand (x, mode))
658204076Spjd    {
659204076Spjd      temp = gen_reg_rtx (mode);
660204076Spjd      insn = emit_move_insn (temp, x);
661204076Spjd    }
662204076Spjd  else
663204076Spjd    {
664204076Spjd      temp = force_operand (x, NULL_RTX);
665204076Spjd      if (REG_P (temp))
666204076Spjd	insn = get_last_insn ();
667204076Spjd      else
668204076Spjd	{
669204076Spjd	  rtx temp2 = gen_reg_rtx (mode);
670204076Spjd	  insn = emit_move_insn (temp2, temp);
671204076Spjd	  temp = temp2;
672204076Spjd	}
673204076Spjd    }
674204076Spjd
675204076Spjd  /* Let optimizers know that TEMP's value never changes
676204076Spjd     and that X can be substituted for it.  Don't get confused
677204076Spjd     if INSN set something else (such as a SUBREG of TEMP).  */
678204076Spjd  if (CONSTANT_P (x)
679204076Spjd      && (set = single_set (insn)) != 0
680204076Spjd      && SET_DEST (set) == temp
681204076Spjd      && ! rtx_equal_p (x, SET_SRC (set)))
682204076Spjd    set_unique_reg_note (insn, REG_EQUAL, x);
683204076Spjd
684204076Spjd  /* Let optimizers know that TEMP is a pointer, and if so, the
685204076Spjd     known alignment of that pointer.  */
686204076Spjd  {
687204076Spjd    unsigned align = 0;
688204076Spjd    if (GET_CODE (x) == SYMBOL_REF)
689204076Spjd      {
690204076Spjd        align = BITS_PER_UNIT;
691204076Spjd	if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
692204076Spjd	  align = DECL_ALIGN (SYMBOL_REF_DECL (x));
693204076Spjd      }
694204076Spjd    else if (GET_CODE (x) == LABEL_REF)
695204076Spjd      align = BITS_PER_UNIT;
696204076Spjd    else if (GET_CODE (x) == CONST
697204076Spjd	     && GET_CODE (XEXP (x, 0)) == PLUS
698204076Spjd	     && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
699204076Spjd	     && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
700204076Spjd      {
701204076Spjd	rtx s = XEXP (XEXP (x, 0), 0);
702204076Spjd	rtx c = XEXP (XEXP (x, 0), 1);
703204076Spjd	unsigned sa, ca;
704204076Spjd
705204076Spjd	sa = BITS_PER_UNIT;
706204076Spjd	if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
707204076Spjd	  sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
708204076Spjd
709204076Spjd	ca = exact_log2 (INTVAL (c) & -INTVAL (c)) * BITS_PER_UNIT;
710204076Spjd
711204076Spjd	align = MIN (sa, ca);
712204076Spjd      }
713204076Spjd    else if (MEM_P (x) && MEM_POINTER (x))
714204076Spjd      align = MEM_ALIGN (x);
715204076Spjd
716204076Spjd    if (align)
717204076Spjd      mark_reg_pointer (temp, align);
718204076Spjd  }
719204076Spjd
720204076Spjd  return temp;
721204076Spjd}
722204076Spjd
723204076Spjd/* If X is a memory ref, copy its contents to a new temp reg and return
724204076Spjd   that reg.  Otherwise, return X.  */
725204076Spjd
726204076Spjdrtx
727204076Spjdforce_not_mem (rtx x)
728209185Spjd{
729204076Spjd  rtx temp;
730204076Spjd
731204076Spjd  if (!MEM_P (x) || GET_MODE (x) == BLKmode)
732204076Spjd    return x;
733204076Spjd
734204076Spjd  temp = gen_reg_rtx (GET_MODE (x));
735204076Spjd
736204076Spjd  if (MEM_POINTER (x))
737218138Spjd    REG_POINTER (temp) = 1;
738204076Spjd
739204076Spjd  emit_move_insn (temp, x);
740218138Spjd  return temp;
741204076Spjd}
742204076Spjd
743204076Spjd/* Copy X to TARGET (if it's nonzero and a reg)
744204076Spjd   or to a new temp reg and return that reg.
745204076Spjd   MODE is the mode to use for X in case it is a constant.  */
746204076Spjd
747204076Spjdrtx
748204076Spjdcopy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
749204076Spjd{
750204076Spjd  rtx temp;
751204076Spjd
752204076Spjd  if (target && REG_P (target))
753204076Spjd    temp = target;
754204076Spjd  else
755204076Spjd    temp = gen_reg_rtx (mode);
756204076Spjd
757207372Spjd  emit_move_insn (temp, x);
758204076Spjd  return temp;
759204076Spjd}
760204076Spjd
761204076Spjd/* Return the mode to use to store a scalar of TYPE and MODE.
762207372Spjd   PUNSIGNEDP points to the signedness of the type and may be adjusted
763204076Spjd   to show what signedness to use on extension operations.
764213006Spjd
765204076Spjd   FOR_CALL is nonzero if this call is promoting args for a call.  */
766204076Spjd
767204076Spjd#if defined(PROMOTE_MODE) && !defined(PROMOTE_FUNCTION_MODE)
768213981Spjd#define PROMOTE_FUNCTION_MODE PROMOTE_MODE
769213981Spjd#endif
770204076Spjd
771204076Spjdenum machine_mode
772204076Spjdpromote_mode (tree type, enum machine_mode mode, int *punsignedp,
773204076Spjd	      int for_call ATTRIBUTE_UNUSED)
774204076Spjd{
775204076Spjd  enum tree_code code = TREE_CODE (type);
776204076Spjd  int unsignedp = *punsignedp;
777204076Spjd
778204076Spjd#ifndef PROMOTE_MODE
779204076Spjd  if (! for_call)
780204076Spjd    return mode;
781204076Spjd#endif
782204076Spjd
783204076Spjd  switch (code)
784204076Spjd    {
785204076Spjd#ifdef PROMOTE_FUNCTION_MODE
786204076Spjd    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
787204076Spjd    case REAL_TYPE:      case OFFSET_TYPE:
788204076Spjd#ifdef PROMOTE_MODE
789204076Spjd      if (for_call)
790204076Spjd	{
791204076Spjd#endif
792204076Spjd	  PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
793204076Spjd#ifdef PROMOTE_MODE
794204076Spjd	}
795204076Spjd      else
796204076Spjd	{
797204076Spjd	  PROMOTE_MODE (mode, unsignedp, type);
798204076Spjd	}
799204076Spjd#endif
800204076Spjd      break;
801204076Spjd#endif
802204076Spjd
803204076Spjd#ifdef POINTERS_EXTEND_UNSIGNED
804204076Spjd    case REFERENCE_TYPE:
805204076Spjd    case POINTER_TYPE:
806204076Spjd      mode = Pmode;
807204076Spjd      unsignedp = POINTERS_EXTEND_UNSIGNED;
808204076Spjd      break;
809204076Spjd#endif
810204076Spjd
811204076Spjd    default:
812204076Spjd      break;
813204076Spjd    }
814204076Spjd
815204076Spjd  *punsignedp = unsignedp;
816204076Spjd  return mode;
817204076Spjd}
818204076Spjd
819204076Spjd/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
820204076Spjd   This pops when ADJUST is positive.  ADJUST need not be constant.  */
821204076Spjd
822204076Spjdvoid
823204076Spjdadjust_stack (rtx adjust)
824204076Spjd{
825204076Spjd  rtx temp;
826204076Spjd
827204076Spjd  if (adjust == const0_rtx)
828204076Spjd    return;
829204076Spjd
830204076Spjd  /* We expect all variable sized adjustments to be multiple of
831204076Spjd     PREFERRED_STACK_BOUNDARY.  */
832204076Spjd  if (GET_CODE (adjust) == CONST_INT)
833204076Spjd    stack_pointer_delta -= INTVAL (adjust);
834204076Spjd
835204076Spjd  temp = expand_binop (Pmode,
836204076Spjd#ifdef STACK_GROWS_DOWNWARD
837204076Spjd		       add_optab,
838204076Spjd#else
839204076Spjd		       sub_optab,
840218218Spjd#endif
841218218Spjd		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
842218218Spjd		       OPTAB_LIB_WIDEN);
843218218Spjd
844218218Spjd  if (temp != stack_pointer_rtx)
845218218Spjd    emit_move_insn (stack_pointer_rtx, temp);
846218218Spjd}
847218218Spjd
848218218Spjd/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
849218218Spjd   This pushes when ADJUST is positive.  ADJUST need not be constant.  */
850218218Spjd
851218218Spjdvoid
852218218Spjdanti_adjust_stack (rtx adjust)
853218218Spjd{
854218218Spjd  rtx temp;
855218218Spjd
856218218Spjd  if (adjust == const0_rtx)
857218218Spjd    return;
858218218Spjd
859218218Spjd  /* We expect all variable sized adjustments to be multiple of
860218218Spjd     PREFERRED_STACK_BOUNDARY.  */
861218218Spjd  if (GET_CODE (adjust) == CONST_INT)
862218218Spjd    stack_pointer_delta += INTVAL (adjust);
863218218Spjd
864218218Spjd  temp = expand_binop (Pmode,
865218218Spjd#ifdef STACK_GROWS_DOWNWARD
866218218Spjd		       sub_optab,
867218218Spjd#else
868218218Spjd		       add_optab,
869218218Spjd#endif
870218218Spjd		       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
871218218Spjd		       OPTAB_LIB_WIDEN);
872218218Spjd
873218218Spjd  if (temp != stack_pointer_rtx)
874218218Spjd    emit_move_insn (stack_pointer_rtx, temp);
875204076Spjd}
876204076Spjd
877212038Spjd/* Round the size of a block to be pushed up to the boundary required
878213009Spjd   by this machine.  SIZE is the desired size, which need not be constant.  */
879213009Spjd
880213009Spjdstatic rtx
881219813Spjdround_push (rtx size)
882213009Spjd{
883212037Spjd  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
884204076Spjd
885219813Spjd  if (align == 1)
886213009Spjd    return size;
887213009Spjd
888213009Spjd  if (GET_CODE (size) == CONST_INT)
889213009Spjd    {
890211977Spjd      HOST_WIDE_INT new = (INTVAL (size) + align - 1) / align * align;
891213009Spjd
892213009Spjd      if (INTVAL (size) != new)
893213009Spjd	size = GEN_INT (new);
894213009Spjd    }
895213009Spjd  else
896213009Spjd    {
897216477Spjd      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
898216477Spjd	 but we know it can't.  So add ourselves and then do
899216477Spjd	 TRUNC_DIV_EXPR.  */
900204076Spjd      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
901213009Spjd			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
902213009Spjd      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
903213009Spjd			    NULL_RTX, 1);
904213009Spjd      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
905213009Spjd    }
906213009Spjd
907217967Spjd  return size;
908213009Spjd}
909213009Spjd
910213009Spjd/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
911213009Spjd   to a previously-created save area.  If no save area has been allocated,
912213009Spjd   this function will allocate one.  If a save area is specified, it
913213009Spjd   must be of the proper mode.
914213009Spjd
915213009Spjd   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
916213009Spjd   are emitted at the current position.  */
917218138Spjd
918213009Spjdvoid
919211899Spjdemit_stack_save (enum save_level save_level, rtx *psave, rtx after)
920204076Spjd{
921209177Spjd  rtx sa = *psave;
922204076Spjd  /* The default is that we use a move insn and save in a Pmode object.  */
923212038Spjd  rtx (*fcn) (rtx, rtx) = gen_move_insn;
924218138Spjd  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
925212038Spjd
926212038Spjd  /* See if this machine has anything special to do for this kind of save.  */
927218138Spjd  switch (save_level)
928212038Spjd    {
929212038Spjd#ifdef HAVE_save_stack_block
930212038Spjd    case SAVE_BLOCK:
931212038Spjd      if (HAVE_save_stack_block)
932212038Spjd	fcn = gen_save_stack_block;
933218218Spjd      break;
934212038Spjd#endif
935218138Spjd#ifdef HAVE_save_stack_function
936212038Spjd    case SAVE_FUNCTION:
937212038Spjd      if (HAVE_save_stack_function)
938218218Spjd	fcn = gen_save_stack_function;
939218218Spjd      break;
940218218Spjd#endif
941218218Spjd#ifdef HAVE_save_stack_nonlocal
942218218Spjd    case SAVE_NONLOCAL:
943218218Spjd      if (HAVE_save_stack_nonlocal)
944218218Spjd	fcn = gen_save_stack_nonlocal;
945212038Spjd      break;
946204076Spjd#endif
947218138Spjd    default:
948213009Spjd      break;
949219813Spjd    }
950219813Spjd
951213429Spjd  /* If there is no save area and we have to allocate one, do so.  Otherwise
952219813Spjd     verify the save area is the proper mode.  */
953219813Spjd
954219813Spjd  if (sa == 0)
955219813Spjd    {
956219813Spjd      if (mode != VOIDmode)
957219813Spjd	{
958219813Spjd	  if (save_level == SAVE_NONLOCAL)
959219813Spjd	    *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
960219813Spjd	  else
961204076Spjd	    *psave = sa = gen_reg_rtx (mode);
962204076Spjd	}
963204076Spjd    }
964204076Spjd
965204076Spjd  if (after)
966204076Spjd    {
967212038Spjd      rtx seq;
968204076Spjd
969212038Spjd      start_sequence ();
970204076Spjd      do_pending_stack_adjust ();
971212038Spjd      /* We must validize inside the sequence, to ensure that any instructions
972212038Spjd	 created by the validize call also get moved to the right place.  */
973212038Spjd      if (sa != 0)
974218218Spjd	sa = validize_mem (sa);
975212038Spjd      emit_insn (fcn (sa, stack_pointer_rtx));
976212038Spjd      seq = get_insns ();
977212038Spjd      end_sequence ();
978212038Spjd      emit_insn_after (seq, after);
979212038Spjd    }
980212038Spjd  else
981218218Spjd    {
982218218Spjd      do_pending_stack_adjust ();
983218218Spjd      if (sa != 0)
984212038Spjd	sa = validize_mem (sa);
985218218Spjd      emit_insn (fcn (sa, stack_pointer_rtx));
986218218Spjd    }
987218218Spjd}
988218218Spjd
989212038Spjd/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
990204076Spjd   area made by emit_stack_save.  If it is zero, we have nothing to do.
991204076Spjd
992204076Spjd   Put any emitted insns after insn AFTER, if nonzero, otherwise at
993213428Spjd   current position.  */
994213428Spjd
995213428Spjdvoid
996213428Spjdemit_stack_restore (enum save_level save_level, rtx sa, rtx after)
997213428Spjd{
998213428Spjd  /* The default is that we use a move insn.  */
999204076Spjd  rtx (*fcn) (rtx, rtx) = gen_move_insn;
1000204076Spjd
1001204076Spjd  /* See if this machine has anything special to do for this kind of save.  */
1002204076Spjd  switch (save_level)
1003204076Spjd    {
1004204076Spjd#ifdef HAVE_restore_stack_block
1005204076Spjd    case SAVE_BLOCK:
1006213009Spjd      if (HAVE_restore_stack_block)
1007204076Spjd	fcn = gen_restore_stack_block;
1008204076Spjd      break;
1009204076Spjd#endif
1010204076Spjd#ifdef HAVE_restore_stack_function
1011204076Spjd    case SAVE_FUNCTION:
1012204076Spjd      if (HAVE_restore_stack_function)
1013204076Spjd	fcn = gen_restore_stack_function;
1014204076Spjd      break;
1015204076Spjd#endif
1016204076Spjd#ifdef HAVE_restore_stack_nonlocal
1017204076Spjd    case SAVE_NONLOCAL:
1018204076Spjd      if (HAVE_restore_stack_nonlocal)
1019204076Spjd	fcn = gen_restore_stack_nonlocal;
1020204076Spjd      break;
1021204076Spjd#endif
1022204076Spjd    default:
1023204076Spjd      break;
1024204076Spjd    }
1025204076Spjd
1026204076Spjd  if (sa != 0)
1027204076Spjd    {
1028204076Spjd      sa = validize_mem (sa);
1029204076Spjd      /* These clobbers prevent the scheduler from moving
1030204076Spjd	 references to variable arrays below the code
1031204076Spjd	 that deletes (pops) the arrays.  */
1032204076Spjd      emit_insn (gen_rtx_CLOBBER (VOIDmode,
1033204076Spjd		    gen_rtx_MEM (BLKmode,
1034204076Spjd			gen_rtx_SCRATCH (VOIDmode))));
1035204076Spjd      emit_insn (gen_rtx_CLOBBER (VOIDmode,
1036204076Spjd		    gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
1037204076Spjd    }
1038204076Spjd
1039217965Spjd  discard_pending_stack_adjust ();
1040204076Spjd
1041204076Spjd  if (after)
1042214273Spjd    {
1043214273Spjd      rtx seq;
1044204076Spjd
1045204076Spjd      start_sequence ();
1046204076Spjd      emit_insn (fcn (stack_pointer_rtx, sa));
1047204076Spjd      seq = get_insns ();
1048204076Spjd      end_sequence ();
1049204076Spjd      emit_insn_after (seq, after);
1050204076Spjd    }
1051204076Spjd  else
1052210879Spjd    emit_insn (fcn (stack_pointer_rtx, sa));
1053204076Spjd}
1054204076Spjd
1055210883Spjd/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1056218138Spjd   function.  This function should be called whenever we allocate or
1057204076Spjd   deallocate dynamic stack space.  */
1058213428Spjd
1059217307Spjdvoid
1060217307Spjdupdate_nonlocal_goto_save_area (void)
1061217307Spjd{
1062217307Spjd  tree t_save;
1063217307Spjd  rtx r_save;
1064217307Spjd
1065217307Spjd  /* The nonlocal_goto_save_area object is an array of N pointers.  The
1066213428Spjd     first one is used for the frame pointer save; the rest are sized by
1067213428Spjd     STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
1068213428Spjd     of the stack save area slots.  */
1069213428Spjd  t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1070217307Spjd		   integer_one_node, NULL_TREE, NULL_TREE);
1071213009Spjd  r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1072213009Spjd
1073213009Spjd  emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
1074213009Spjd}
1075213009Spjd
1076213009Spjd/* Return an rtx representing the address of an area of memory dynamically
1077204076Spjd   pushed on the stack.  This region of memory is always aligned to
1078204076Spjd   a multiple of BIGGEST_ALIGNMENT.
1079204076Spjd
1080204076Spjd   Any required stack pointer alignment is preserved.
1081204076Spjd
1082204076Spjd   SIZE is an rtx representing the size of the area.
1083204076Spjd   TARGET is a place in which the address can be placed.
1084204076Spjd
1085204076Spjd   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
1086204076Spjd
1087204076Spjdrtx
1088204076Spjdallocate_dynamic_stack_space (rtx size, rtx target, int known_align)
1089204076Spjd{
1090204076Spjd  /* If we're asking for zero bytes, it doesn't matter what we point
1091204076Spjd     to since we can't dereference it.  But return a reasonable
1092204076Spjd     address anyway.  */
1093204076Spjd  if (size == const0_rtx)
1094204076Spjd    return virtual_stack_dynamic_rtx;
1095204076Spjd
1096204076Spjd  /* Otherwise, show we're calling alloca or equivalent.  */
1097204076Spjd  current_function_calls_alloca = 1;
1098204076Spjd
1099204076Spjd  /* Ensure the size is in the proper mode.  */
1100204076Spjd  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1101204076Spjd    size = convert_to_mode (Pmode, size, 1);
1102204076Spjd
1103204076Spjd  /* We can't attempt to minimize alignment necessary, because we don't
1104204076Spjd     know the final value of preferred_stack_boundary yet while executing
1105204076Spjd     this code.  */
1106204076Spjd  cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1107211977Spjd
1108211977Spjd  /* We will need to ensure that the address we return is aligned to
1109204076Spjd     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1110204076Spjd     always know its final value at this point in the compilation (it
1111204076Spjd     might depend on the size of the outgoing parameter lists, for
1112204076Spjd     example), so we must align the value to be returned in that case.
1113     (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1114     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1115     We must also do an alignment operation on the returned value if
1116     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1117
1118     If we have to align, we must leave space in SIZE for the hole
1119     that might result from the alignment operation.  */
1120
1121#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1122#define MUST_ALIGN 1
1123#else
1124#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1125#endif
1126
1127  if (MUST_ALIGN)
1128    size
1129      = force_operand (plus_constant (size,
1130				      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1131		       NULL_RTX);
1132
1133#ifdef SETJMP_VIA_SAVE_AREA
1134  /* If setjmp restores regs from a save area in the stack frame,
1135     avoid clobbering the reg save area.  Note that the offset of
1136     virtual_incoming_args_rtx includes the preallocated stack args space.
1137     It would be no problem to clobber that, but it's on the wrong side
1138     of the old save area.
1139
1140     What used to happen is that, since we did not know for sure
1141     whether setjmp() was invoked until after RTL generation, we
1142     would use reg notes to store the "optimized" size and fix things
1143     up later.  These days we know this information before we ever
1144     start building RTL so the reg notes are unnecessary.  */
1145  if (!current_function_calls_setjmp)
1146    {
1147      int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1148
1149      /* ??? Code below assumes that the save area needs maximal
1150	 alignment.  This constraint may be too strong.  */
1151      gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
1152
1153      if (GET_CODE (size) == CONST_INT)
1154	{
1155	  HOST_WIDE_INT new = INTVAL (size) / align * align;
1156
1157	  if (INTVAL (size) != new)
1158	    size = GEN_INT (new);
1159	}
1160      else
1161	{
1162	  /* Since we know overflow is not possible, we avoid using
1163	     CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
1164	  size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1165				GEN_INT (align), NULL_RTX, 1);
1166	  size = expand_mult (Pmode, size,
1167			      GEN_INT (align), NULL_RTX, 1);
1168	}
1169    }
1170  else
1171    {
1172      rtx dynamic_offset
1173	= expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1174			stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1175
1176      size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1177			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
1178    }
1179#endif /* SETJMP_VIA_SAVE_AREA */
1180
1181  /* Round the size to a multiple of the required stack alignment.
1182     Since the stack if presumed to be rounded before this allocation,
1183     this will maintain the required alignment.
1184
1185     If the stack grows downward, we could save an insn by subtracting
1186     SIZE from the stack pointer and then aligning the stack pointer.
1187     The problem with this is that the stack pointer may be unaligned
1188     between the execution of the subtraction and alignment insns and
1189     some machines do not allow this.  Even on those that do, some
1190     signal handlers malfunction if a signal should occur between those
1191     insns.  Since this is an extremely rare event, we have no reliable
1192     way of knowing which systems have this problem.  So we avoid even
1193     momentarily mis-aligning the stack.  */
1194
1195  /* If we added a variable amount to SIZE,
1196     we can no longer assume it is aligned.  */
1197#if !defined (SETJMP_VIA_SAVE_AREA)
1198  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1199#endif
1200    size = round_push (size);
1201
1202  do_pending_stack_adjust ();
1203
1204 /* We ought to be called always on the toplevel and stack ought to be aligned
1205    properly.  */
1206  gcc_assert (!(stack_pointer_delta
1207		% (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1208
1209  /* If needed, check that we have the required amount of stack.  Take into
1210     account what has already been checked.  */
1211  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1212    probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1213
1214  /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1215  if (target == 0 || !REG_P (target)
1216      || REGNO (target) < FIRST_PSEUDO_REGISTER
1217      || GET_MODE (target) != Pmode)
1218    target = gen_reg_rtx (Pmode);
1219
1220  mark_reg_pointer (target, known_align);
1221
1222  /* Perform the required allocation from the stack.  Some systems do
1223     this differently than simply incrementing/decrementing from the
1224     stack pointer, such as acquiring the space by calling malloc().  */
1225#ifdef HAVE_allocate_stack
1226  if (HAVE_allocate_stack)
1227    {
1228      enum machine_mode mode = STACK_SIZE_MODE;
1229      insn_operand_predicate_fn pred;
1230
1231      /* We don't have to check against the predicate for operand 0 since
1232	 TARGET is known to be a pseudo of the proper mode, which must
1233	 be valid for the operand.  For operand 1, convert to the
1234	 proper mode and validate.  */
1235      if (mode == VOIDmode)
1236	mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1237
1238      pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1239      if (pred && ! ((*pred) (size, mode)))
1240	size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
1241
1242      emit_insn (gen_allocate_stack (target, size));
1243    }
1244  else
1245#endif
1246    {
1247#ifndef STACK_GROWS_DOWNWARD
1248      emit_move_insn (target, virtual_stack_dynamic_rtx);
1249#endif
1250
1251      /* Check stack bounds if necessary.  */
1252      if (current_function_limit_stack)
1253	{
1254	  rtx available;
1255	  rtx space_available = gen_label_rtx ();
1256#ifdef STACK_GROWS_DOWNWARD
1257	  available = expand_binop (Pmode, sub_optab,
1258				    stack_pointer_rtx, stack_limit_rtx,
1259				    NULL_RTX, 1, OPTAB_WIDEN);
1260#else
1261	  available = expand_binop (Pmode, sub_optab,
1262				    stack_limit_rtx, stack_pointer_rtx,
1263				    NULL_RTX, 1, OPTAB_WIDEN);
1264#endif
1265	  emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1266				   space_available);
1267#ifdef HAVE_trap
1268	  if (HAVE_trap)
1269	    emit_insn (gen_trap ());
1270	  else
1271#endif
1272	    error ("stack limits not supported on this target");
1273	  emit_barrier ();
1274	  emit_label (space_available);
1275	}
1276
1277      anti_adjust_stack (size);
1278
1279#ifdef STACK_GROWS_DOWNWARD
1280      emit_move_insn (target, virtual_stack_dynamic_rtx);
1281#endif
1282    }
1283
1284  if (MUST_ALIGN)
1285    {
1286      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1287	 but we know it can't.  So add ourselves and then do
1288	 TRUNC_DIV_EXPR.  */
1289      target = expand_binop (Pmode, add_optab, target,
1290			     GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1291			     NULL_RTX, 1, OPTAB_LIB_WIDEN);
1292      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1293			      GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1294			      NULL_RTX, 1);
1295      target = expand_mult (Pmode, target,
1296			    GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1297			    NULL_RTX, 1);
1298    }
1299
1300  /* Record the new stack level for nonlocal gotos.  */
1301  if (cfun->nonlocal_goto_save_area != 0)
1302    update_nonlocal_goto_save_area ();
1303
1304  return target;
1305}
1306
1307/* A front end may want to override GCC's stack checking by providing a
1308   run-time routine to call to check the stack, so provide a mechanism for
1309   calling that routine.  */
1310
1311static GTY(()) rtx stack_check_libfunc;
1312
1313void
1314set_stack_check_libfunc (rtx libfunc)
1315{
1316  stack_check_libfunc = libfunc;
1317}
1318
1319/* Emit one stack probe at ADDRESS, an address within the stack.  */
1320
1321static void
1322emit_stack_probe (rtx address)
1323{
1324  rtx memref = gen_rtx_MEM (word_mode, address);
1325
1326  MEM_VOLATILE_P (memref) = 1;
1327
1328  if (STACK_CHECK_PROBE_LOAD)
1329    emit_move_insn (gen_reg_rtx (word_mode), memref);
1330  else
1331    emit_move_insn (memref, const0_rtx);
1332}
1333
1334/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1335   FIRST is a constant and size is a Pmode RTX.  These are offsets from the
1336   current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
1337   subtract from the stack.  If SIZE is constant, this is done
1338   with a fixed number of probes.  Otherwise, we must make a loop.  */
1339
1340#ifdef STACK_GROWS_DOWNWARD
1341#define STACK_GROW_OP MINUS
1342#else
1343#define STACK_GROW_OP PLUS
1344#endif
1345
1346void
1347probe_stack_range (HOST_WIDE_INT first, rtx size)
1348{
1349  /* First ensure SIZE is Pmode.  */
1350  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1351    size = convert_to_mode (Pmode, size, 1);
1352
1353  /* Next see if the front end has set up a function for us to call to
1354     check the stack.  */
1355  if (stack_check_libfunc != 0)
1356    {
1357      rtx addr = memory_address (QImode,
1358				 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1359					         stack_pointer_rtx,
1360					         plus_constant (size, first)));
1361
1362      addr = convert_memory_address (ptr_mode, addr);
1363      emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1364			 ptr_mode);
1365    }
1366
1367  /* Next see if we have an insn to check the stack.  Use it if so.  */
1368#ifdef HAVE_check_stack
1369  else if (HAVE_check_stack)
1370    {
1371      insn_operand_predicate_fn pred;
1372      rtx last_addr
1373	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1374					 stack_pointer_rtx,
1375					 plus_constant (size, first)),
1376			 NULL_RTX);
1377
1378      pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1379      if (pred && ! ((*pred) (last_addr, Pmode)))
1380	last_addr = copy_to_mode_reg (Pmode, last_addr);
1381
1382      emit_insn (gen_check_stack (last_addr));
1383    }
1384#endif
1385
1386  /* If we have to generate explicit probes, see if we have a constant
1387     small number of them to generate.  If so, that's the easy case.  */
1388  else if (GET_CODE (size) == CONST_INT
1389	   && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1390    {
1391      HOST_WIDE_INT offset;
1392
1393      /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1394	 for values of N from 1 until it exceeds LAST.  If only one
1395	 probe is needed, this will not generate any code.  Then probe
1396	 at LAST.  */
1397      for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1398	   offset < INTVAL (size);
1399	   offset = offset + STACK_CHECK_PROBE_INTERVAL)
1400	emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1401					  stack_pointer_rtx,
1402					  GEN_INT (offset)));
1403
1404      emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1405					stack_pointer_rtx,
1406					plus_constant (size, first)));
1407    }
1408
1409  /* In the variable case, do the same as above, but in a loop.  We emit loop
1410     notes so that loop optimization can be done.  */
1411  else
1412    {
1413      rtx test_addr
1414	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1415					 stack_pointer_rtx,
1416					 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1417			 NULL_RTX);
1418      rtx last_addr
1419	= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1420					 stack_pointer_rtx,
1421					 plus_constant (size, first)),
1422			 NULL_RTX);
1423      rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1424      rtx loop_lab = gen_label_rtx ();
1425      rtx test_lab = gen_label_rtx ();
1426      rtx end_lab = gen_label_rtx ();
1427      rtx temp;
1428
1429      if (!REG_P (test_addr)
1430	  || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1431	test_addr = force_reg (Pmode, test_addr);
1432
1433      emit_jump (test_lab);
1434
1435      emit_label (loop_lab);
1436      emit_stack_probe (test_addr);
1437
1438#ifdef STACK_GROWS_DOWNWARD
1439#define CMP_OPCODE GTU
1440      temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1441			   1, OPTAB_WIDEN);
1442#else
1443#define CMP_OPCODE LTU
1444      temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1445			   1, OPTAB_WIDEN);
1446#endif
1447
1448      gcc_assert (temp == test_addr);
1449
1450      emit_label (test_lab);
1451      emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1452			       NULL_RTX, Pmode, 1, loop_lab);
1453      emit_jump (end_lab);
1454      emit_label (end_lab);
1455
1456      emit_stack_probe (last_addr);
1457    }
1458}
1459
1460/* Return an rtx representing the register or memory location
1461   in which a scalar value of data type VALTYPE
1462   was returned by a function call to function FUNC.
1463   FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1464   function is known, otherwise 0.
1465   OUTGOING is 1 if on a machine with register windows this function
1466   should return the register in which the function will put its result
1467   and 0 otherwise.  */
1468
1469rtx
1470hard_function_value (tree valtype, tree func, tree fntype,
1471		     int outgoing ATTRIBUTE_UNUSED)
1472{
1473  rtx val;
1474
1475  val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1476
1477  if (REG_P (val)
1478      && GET_MODE (val) == BLKmode)
1479    {
1480      unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1481      enum machine_mode tmpmode;
1482
1483      /* int_size_in_bytes can return -1.  We don't need a check here
1484	 since the value of bytes will then be large enough that no
1485	 mode will match anyway.  */
1486
1487      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1488	   tmpmode != VOIDmode;
1489	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1490	{
1491	  /* Have we found a large enough mode?  */
1492	  if (GET_MODE_SIZE (tmpmode) >= bytes)
1493	    break;
1494	}
1495
1496      /* No suitable mode found.  */
1497      gcc_assert (tmpmode != VOIDmode);
1498
1499      PUT_MODE (val, tmpmode);
1500    }
1501  return val;
1502}
1503
1504/* Return an rtx representing the register or memory location
1505   in which a scalar value of mode MODE was returned by a library call.  */
1506
1507rtx
1508hard_libcall_value (enum machine_mode mode)
1509{
1510  return LIBCALL_VALUE (mode);
1511}
1512
1513/* Look up the tree code for a given rtx code
1514   to provide the arithmetic operation for REAL_ARITHMETIC.
1515   The function returns an int because the caller may not know
1516   what `enum tree_code' means.  */
1517
1518int
1519rtx_to_tree_code (enum rtx_code code)
1520{
1521  enum tree_code tcode;
1522
1523  switch (code)
1524    {
1525    case PLUS:
1526      tcode = PLUS_EXPR;
1527      break;
1528    case MINUS:
1529      tcode = MINUS_EXPR;
1530      break;
1531    case MULT:
1532      tcode = MULT_EXPR;
1533      break;
1534    case DIV:
1535      tcode = RDIV_EXPR;
1536      break;
1537    case SMIN:
1538      tcode = MIN_EXPR;
1539      break;
1540    case SMAX:
1541      tcode = MAX_EXPR;
1542      break;
1543    default:
1544      tcode = LAST_AND_UNUSED_TREE_CODE;
1545      break;
1546    }
1547  return ((int) tcode);
1548}
1549
1550#include "gt-explow.h"
1551