1169689Skan/* A pass for lowering trees to RTL.
2169689Skan   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3169689Skan
4169689SkanThis file is part of GCC.
5169689Skan
6169689SkanGCC is free software; you can redistribute it and/or modify
7169689Skanit under the terms of the GNU General Public License as published by
8169689Skanthe Free Software Foundation; either version 2, or (at your option)
9169689Skanany later version.
10169689Skan
11169689SkanGCC is distributed in the hope that it will be useful,
12169689Skanbut WITHOUT ANY WARRANTY; without even the implied warranty of
13169689SkanMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14169689SkanGNU General Public License for more details.
15169689Skan
16169689SkanYou should have received a copy of the GNU General Public License
17169689Skanalong with GCC; see the file COPYING.  If not, write to
18169689Skanthe Free Software Foundation, 51 Franklin Street, Fifth Floor,
19169689SkanBoston, MA 02110-1301, USA.  */
20169689Skan
21169689Skan#include "config.h"
22169689Skan#include "system.h"
23169689Skan#include "coretypes.h"
24169689Skan#include "tm.h"
25169689Skan#include "tree.h"
26169689Skan#include "rtl.h"
27169689Skan#include "tm_p.h"
28169689Skan#include "basic-block.h"
29169689Skan#include "function.h"
30169689Skan#include "expr.h"
31169689Skan#include "langhooks.h"
32169689Skan#include "tree-flow.h"
33169689Skan#include "timevar.h"
34169689Skan#include "tree-dump.h"
35169689Skan#include "tree-pass.h"
36169689Skan#include "except.h"
37169689Skan#include "flags.h"
38169689Skan#include "diagnostic.h"
39169689Skan#include "toplev.h"
40169689Skan#include "debug.h"
41169689Skan#include "params.h"
42169689Skan
43169689Skan/* Verify that there is exactly single jump instruction since last and attach
44169689Skan   REG_BR_PROB note specifying probability.
45169689Skan   ??? We really ought to pass the probability down to RTL expanders and let it
46169689Skan   re-distribute it when the conditional expands into multiple conditionals.
47169689Skan   This is however difficult to do.  */
48169689Skanstatic void
49169689Skanadd_reg_br_prob_note (rtx last, int probability)
50169689Skan{
51169689Skan  if (profile_status == PROFILE_ABSENT)
52169689Skan    return;
53169689Skan  for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
54169689Skan    if (JUMP_P (last))
55169689Skan      {
56169689Skan	/* It is common to emit condjump-around-jump sequence when we don't know
57169689Skan	   how to reverse the conditional.  Special case this.  */
58169689Skan	if (!any_condjump_p (last)
59169689Skan	    || !JUMP_P (NEXT_INSN (last))
60169689Skan	    || !simplejump_p (NEXT_INSN (last))
61169689Skan	    || !NEXT_INSN (NEXT_INSN (last))
62169689Skan	    || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
63169689Skan	    || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
64169689Skan	    || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
65169689Skan	    || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
66169689Skan	  goto failed;
67169689Skan	gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
68169689Skan	REG_NOTES (last)
69169689Skan	  = gen_rtx_EXPR_LIST (REG_BR_PROB,
70169689Skan			       GEN_INT (REG_BR_PROB_BASE - probability),
71169689Skan			       REG_NOTES (last));
72169689Skan	return;
73169689Skan      }
74169689Skan  if (!last || !JUMP_P (last) || !any_condjump_p (last))
75169689Skan    goto failed;
76169689Skan  gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
77169689Skan  REG_NOTES (last)
78169689Skan    = gen_rtx_EXPR_LIST (REG_BR_PROB,
79169689Skan			 GEN_INT (probability), REG_NOTES (last));
80169689Skan  return;
81169689Skanfailed:
82169689Skan  if (dump_file)
83169689Skan    fprintf (dump_file, "Failed to add probability note\n");
84169689Skan}
85169689Skan
86169689Skan
87169689Skan#ifndef LOCAL_ALIGNMENT
88169689Skan#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
89169689Skan#endif
90169689Skan
91169689Skan#ifndef STACK_ALIGNMENT_NEEDED
92169689Skan#define STACK_ALIGNMENT_NEEDED 1
93169689Skan#endif
94169689Skan
95169689Skan
96169689Skan/* This structure holds data relevant to one variable that will be
97169689Skan   placed in a stack slot.  */
98169689Skanstruct stack_var
99169689Skan{
100169689Skan  /* The Variable.  */
101169689Skan  tree decl;
102169689Skan
103169689Skan  /* The offset of the variable.  During partitioning, this is the
104169689Skan     offset relative to the partition.  After partitioning, this
105169689Skan     is relative to the stack frame.  */
106169689Skan  HOST_WIDE_INT offset;
107169689Skan
108169689Skan  /* Initially, the size of the variable.  Later, the size of the partition,
109169689Skan     if this variable becomes it's partition's representative.  */
110169689Skan  HOST_WIDE_INT size;
111169689Skan
112169689Skan  /* The *byte* alignment required for this variable.  Or as, with the
113169689Skan     size, the alignment for this partition.  */
114169689Skan  unsigned int alignb;
115169689Skan
116169689Skan  /* The partition representative.  */
117169689Skan  size_t representative;
118169689Skan
119169689Skan  /* The next stack variable in the partition, or EOC.  */
120169689Skan  size_t next;
121169689Skan};
122169689Skan
123169689Skan#define EOC  ((size_t)-1)
124169689Skan
125169689Skan/* We have an array of such objects while deciding allocation.  */
126169689Skanstatic struct stack_var *stack_vars;
127169689Skanstatic size_t stack_vars_alloc;
128169689Skanstatic size_t stack_vars_num;
129169689Skan
130169689Skan/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
131169689Skan   is non-decreasing.  */
132169689Skanstatic size_t *stack_vars_sorted;
133169689Skan
134169689Skan/* We have an interference graph between such objects.  This graph
135169689Skan   is lower triangular.  */
136169689Skanstatic bool *stack_vars_conflict;
137169689Skanstatic size_t stack_vars_conflict_alloc;
138169689Skan
139169689Skan/* The phase of the stack frame.  This is the known misalignment of
140169689Skan   virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
141169689Skan   (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
142169689Skanstatic int frame_phase;
143169689Skan
144169689Skan/* Used during expand_used_vars to remember if we saw any decls for
145169689Skan   which we'd like to enable stack smashing protection.  */
146169689Skanstatic bool has_protected_decls;
147169689Skan
148169689Skan/* Used during expand_used_vars.  Remember if we say a character buffer
149169689Skan   smaller than our cutoff threshold.  Used for -Wstack-protector.  */
150169689Skanstatic bool has_short_buffer;
151169689Skan
152169689Skan/* Discover the byte alignment to use for DECL.  Ignore alignment
153169689Skan   we can't do with expected alignment of the stack boundary.  */
154169689Skan
155169689Skanstatic unsigned int
156169689Skanget_decl_align_unit (tree decl)
157169689Skan{
158169689Skan  unsigned int align;
159169689Skan
160169689Skan  align = DECL_ALIGN (decl);
161169689Skan  align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
162169689Skan  if (align > PREFERRED_STACK_BOUNDARY)
163169689Skan    align = PREFERRED_STACK_BOUNDARY;
164169689Skan  if (cfun->stack_alignment_needed < align)
165169689Skan    cfun->stack_alignment_needed = align;
166169689Skan
167169689Skan  return align / BITS_PER_UNIT;
168169689Skan}
169169689Skan
170169689Skan/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
171169689Skan   Return the frame offset.  */
172169689Skan
173169689Skanstatic HOST_WIDE_INT
174169689Skanalloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
175169689Skan{
176169689Skan  HOST_WIDE_INT offset, new_frame_offset;
177169689Skan
178169689Skan  new_frame_offset = frame_offset;
179169689Skan  if (FRAME_GROWS_DOWNWARD)
180169689Skan    {
181169689Skan      new_frame_offset -= size + frame_phase;
182169689Skan      new_frame_offset &= -align;
183169689Skan      new_frame_offset += frame_phase;
184169689Skan      offset = new_frame_offset;
185169689Skan    }
186169689Skan  else
187169689Skan    {
188169689Skan      new_frame_offset -= frame_phase;
189169689Skan      new_frame_offset += align - 1;
190169689Skan      new_frame_offset &= -align;
191169689Skan      new_frame_offset += frame_phase;
192169689Skan      offset = new_frame_offset;
193169689Skan      new_frame_offset += size;
194169689Skan    }
195169689Skan  frame_offset = new_frame_offset;
196169689Skan
197169689Skan  if (frame_offset_overflow (frame_offset, cfun->decl))
198169689Skan    frame_offset = offset = 0;
199169689Skan
200169689Skan  return offset;
201169689Skan}
202169689Skan
203169689Skan/* Accumulate DECL into STACK_VARS.  */
204169689Skan
205169689Skanstatic void
206169689Skanadd_stack_var (tree decl)
207169689Skan{
208169689Skan  if (stack_vars_num >= stack_vars_alloc)
209169689Skan    {
210169689Skan      if (stack_vars_alloc)
211169689Skan	stack_vars_alloc = stack_vars_alloc * 3 / 2;
212169689Skan      else
213169689Skan	stack_vars_alloc = 32;
214169689Skan      stack_vars
215169689Skan	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
216169689Skan    }
217169689Skan  stack_vars[stack_vars_num].decl = decl;
218169689Skan  stack_vars[stack_vars_num].offset = 0;
219169689Skan  stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
220169689Skan  stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
221169689Skan
222169689Skan  /* All variables are initially in their own partition.  */
223169689Skan  stack_vars[stack_vars_num].representative = stack_vars_num;
224169689Skan  stack_vars[stack_vars_num].next = EOC;
225169689Skan
226169689Skan  /* Ensure that this decl doesn't get put onto the list twice.  */
227169689Skan  SET_DECL_RTL (decl, pc_rtx);
228169689Skan
229169689Skan  stack_vars_num++;
230169689Skan}
231169689Skan
232169689Skan/* Compute the linear index of a lower-triangular coordinate (I, J).  */
233169689Skan
234169689Skanstatic size_t
235169689Skantriangular_index (size_t i, size_t j)
236169689Skan{
237169689Skan  if (i < j)
238169689Skan    {
239169689Skan      size_t t;
240169689Skan      t = i, i = j, j = t;
241169689Skan    }
242169689Skan  return (i * (i + 1)) / 2 + j;
243169689Skan}
244169689Skan
245169689Skan/* Ensure that STACK_VARS_CONFLICT is large enough for N objects.  */
246169689Skan
247169689Skanstatic void
248169689Skanresize_stack_vars_conflict (size_t n)
249169689Skan{
250169689Skan  size_t size = triangular_index (n-1, n-1) + 1;
251169689Skan
252169689Skan  if (size <= stack_vars_conflict_alloc)
253169689Skan    return;
254169689Skan
255169689Skan  stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
256169689Skan  memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
257169689Skan	  (size - stack_vars_conflict_alloc) * sizeof (bool));
258169689Skan  stack_vars_conflict_alloc = size;
259169689Skan}
260169689Skan
261169689Skan/* Make the decls associated with luid's X and Y conflict.  */
262169689Skan
263169689Skanstatic void
264169689Skanadd_stack_var_conflict (size_t x, size_t y)
265169689Skan{
266169689Skan  size_t index = triangular_index (x, y);
267169689Skan  gcc_assert (index < stack_vars_conflict_alloc);
268169689Skan  stack_vars_conflict[index] = true;
269169689Skan}
270169689Skan
271169689Skan/* Check whether the decls associated with luid's X and Y conflict.  */
272169689Skan
273169689Skanstatic bool
274169689Skanstack_var_conflict_p (size_t x, size_t y)
275169689Skan{
276169689Skan  size_t index = triangular_index (x, y);
277169689Skan  gcc_assert (index < stack_vars_conflict_alloc);
278169689Skan  return stack_vars_conflict[index];
279169689Skan}
280169689Skan
281169689Skan/* Returns true if TYPE is or contains a union type.  */
282169689Skan
283169689Skanstatic bool
284169689Skanaggregate_contains_union_type (tree type)
285169689Skan{
286169689Skan  tree field;
287169689Skan
288169689Skan  if (TREE_CODE (type) == UNION_TYPE
289169689Skan      || TREE_CODE (type) == QUAL_UNION_TYPE)
290169689Skan    return true;
291169689Skan  if (TREE_CODE (type) == ARRAY_TYPE)
292169689Skan    return aggregate_contains_union_type (TREE_TYPE (type));
293169689Skan  if (TREE_CODE (type) != RECORD_TYPE)
294169689Skan    return false;
295169689Skan
296169689Skan  for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
297169689Skan    if (TREE_CODE (field) == FIELD_DECL)
298169689Skan      if (aggregate_contains_union_type (TREE_TYPE (field)))
299169689Skan	return true;
300169689Skan
301169689Skan  return false;
302169689Skan}
303169689Skan
304169689Skan/* A subroutine of expand_used_vars.  If two variables X and Y have alias
305169689Skan   sets that do not conflict, then do add a conflict for these variables
306169689Skan   in the interference graph.  We also need to make sure to add conflicts
307169689Skan   for union containing structures.  Else RTL alias analysis comes along
308169689Skan   and due to type based aliasing rules decides that for two overlapping
309169689Skan   union temporaries { short s; int i; } accesses to the same mem through
310169689Skan   different types may not alias and happily reorders stores across
311169689Skan   life-time boundaries of the temporaries (See PR25654).
312169689Skan   We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P.  */
313169689Skan
314169689Skanstatic void
315169689Skanadd_alias_set_conflicts (void)
316169689Skan{
317169689Skan  size_t i, j, n = stack_vars_num;
318169689Skan
319169689Skan  for (i = 0; i < n; ++i)
320169689Skan    {
321169689Skan      tree type_i = TREE_TYPE (stack_vars[i].decl);
322169689Skan      bool aggr_i = AGGREGATE_TYPE_P (type_i);
323169689Skan      bool contains_union;
324169689Skan
325169689Skan      contains_union = aggregate_contains_union_type (type_i);
326169689Skan      for (j = 0; j < i; ++j)
327169689Skan	{
328169689Skan	  tree type_j = TREE_TYPE (stack_vars[j].decl);
329169689Skan	  bool aggr_j = AGGREGATE_TYPE_P (type_j);
330169689Skan	  if (aggr_i != aggr_j
331169689Skan	      /* Either the objects conflict by means of type based
332169689Skan		 aliasing rules, or we need to add a conflict.  */
333169689Skan	      || !objects_must_conflict_p (type_i, type_j)
334169689Skan	      /* In case the types do not conflict ensure that access
335169689Skan		 to elements will conflict.  In case of unions we have
336169689Skan		 to be careful as type based aliasing rules may say
337169689Skan		 access to the same memory does not conflict.  So play
338169689Skan		 safe and add a conflict in this case.  */
339169689Skan	      || contains_union)
340169689Skan	    add_stack_var_conflict (i, j);
341169689Skan	}
342169689Skan    }
343169689Skan}
344169689Skan
345169689Skan/* A subroutine of partition_stack_vars.  A comparison function for qsort,
346169689Skan   sorting an array of indicies by the size of the object.  */
347169689Skan
348169689Skanstatic int
349169689Skanstack_var_size_cmp (const void *a, const void *b)
350169689Skan{
351169689Skan  HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
352169689Skan  HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
353169689Skan  unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
354169689Skan  unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
355169689Skan
356169689Skan  if (sa < sb)
357169689Skan    return -1;
358169689Skan  if (sa > sb)
359169689Skan    return 1;
360169689Skan  /* For stack variables of the same size use the uid of the decl
361169689Skan     to make the sort stable.  */
362169689Skan  if (uida < uidb)
363169689Skan    return -1;
364169689Skan  if (uida > uidb)
365169689Skan    return 1;
366169689Skan  return 0;
367169689Skan}
368169689Skan
369169689Skan/* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
370169689Skan   partitioning algorithm.  Partitions A and B are known to be non-conflicting.
371169689Skan   Merge them into a single partition A.
372169689Skan
373169689Skan   At the same time, add OFFSET to all variables in partition B.  At the end
374169689Skan   of the partitioning process we've have a nice block easy to lay out within
375169689Skan   the stack frame.  */
376169689Skan
377169689Skanstatic void
378169689Skanunion_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
379169689Skan{
380169689Skan  size_t i, last;
381169689Skan
382169689Skan  /* Update each element of partition B with the given offset,
383169689Skan     and merge them into partition A.  */
384169689Skan  for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
385169689Skan    {
386169689Skan      stack_vars[i].offset += offset;
387169689Skan      stack_vars[i].representative = a;
388169689Skan    }
389169689Skan  stack_vars[last].next = stack_vars[a].next;
390169689Skan  stack_vars[a].next = b;
391169689Skan
392169689Skan  /* Update the required alignment of partition A to account for B.  */
393169689Skan  if (stack_vars[a].alignb < stack_vars[b].alignb)
394169689Skan    stack_vars[a].alignb = stack_vars[b].alignb;
395169689Skan
396169689Skan  /* Update the interference graph and merge the conflicts.  */
397169689Skan  for (last = stack_vars_num, i = 0; i < last; ++i)
398169689Skan    if (stack_var_conflict_p (b, i))
399169689Skan      add_stack_var_conflict (a, i);
400169689Skan}
401169689Skan
402169689Skan/* A subroutine of expand_used_vars.  Binpack the variables into
403169689Skan   partitions constrained by the interference graph.  The overall
404169689Skan   algorithm used is as follows:
405169689Skan
406169689Skan	Sort the objects by size.
407169689Skan	For each object A {
408169689Skan	  S = size(A)
409169689Skan	  O = 0
410169689Skan	  loop {
411169689Skan	    Look for the largest non-conflicting object B with size <= S.
412169689Skan	    UNION (A, B)
413169689Skan	    offset(B) = O
414169689Skan	    O += size(B)
415169689Skan	    S -= size(B)
416169689Skan	  }
417169689Skan	}
418169689Skan*/
419169689Skan
420169689Skanstatic void
421169689Skanpartition_stack_vars (void)
422169689Skan{
423169689Skan  size_t si, sj, n = stack_vars_num;
424169689Skan
425169689Skan  stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
426169689Skan  for (si = 0; si < n; ++si)
427169689Skan    stack_vars_sorted[si] = si;
428169689Skan
429169689Skan  if (n == 1)
430169689Skan    return;
431169689Skan
432169689Skan  qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
433169689Skan
434169689Skan  /* Special case: detect when all variables conflict, and thus we can't
435169689Skan     do anything during the partitioning loop.  It isn't uncommon (with
436169689Skan     C code at least) to declare all variables at the top of the function,
437169689Skan     and if we're not inlining, then all variables will be in the same scope.
438169689Skan     Take advantage of very fast libc routines for this scan.  */
439169689Skan  gcc_assert (sizeof(bool) == sizeof(char));
440169689Skan  if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
441169689Skan    return;
442169689Skan
443169689Skan  for (si = 0; si < n; ++si)
444169689Skan    {
445169689Skan      size_t i = stack_vars_sorted[si];
446169689Skan      HOST_WIDE_INT isize = stack_vars[i].size;
447169689Skan      HOST_WIDE_INT offset = 0;
448169689Skan
449169689Skan      for (sj = si; sj-- > 0; )
450169689Skan	{
451169689Skan	  size_t j = stack_vars_sorted[sj];
452169689Skan	  HOST_WIDE_INT jsize = stack_vars[j].size;
453169689Skan	  unsigned int jalign = stack_vars[j].alignb;
454169689Skan
455169689Skan	  /* Ignore objects that aren't partition representatives.  */
456169689Skan	  if (stack_vars[j].representative != j)
457169689Skan	    continue;
458169689Skan
459169689Skan	  /* Ignore objects too large for the remaining space.  */
460169689Skan	  if (isize < jsize)
461169689Skan	    continue;
462169689Skan
463169689Skan	  /* Ignore conflicting objects.  */
464169689Skan	  if (stack_var_conflict_p (i, j))
465169689Skan	    continue;
466169689Skan
467169689Skan	  /* Refine the remaining space check to include alignment.  */
468169689Skan	  if (offset & (jalign - 1))
469169689Skan	    {
470169689Skan	      HOST_WIDE_INT toff = offset;
471169689Skan	      toff += jalign - 1;
472169689Skan	      toff &= -(HOST_WIDE_INT)jalign;
473169689Skan	      if (isize - (toff - offset) < jsize)
474169689Skan		continue;
475169689Skan
476169689Skan	      isize -= toff - offset;
477169689Skan	      offset = toff;
478169689Skan	    }
479169689Skan
480169689Skan	  /* UNION the objects, placing J at OFFSET.  */
481169689Skan	  union_stack_vars (i, j, offset);
482169689Skan
483169689Skan	  isize -= jsize;
484169689Skan	  if (isize == 0)
485169689Skan	    break;
486169689Skan	}
487169689Skan    }
488169689Skan}
489169689Skan
490169689Skan/* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
491169689Skan
492169689Skanstatic void
493169689Skandump_stack_var_partition (void)
494169689Skan{
495169689Skan  size_t si, i, j, n = stack_vars_num;
496169689Skan
497169689Skan  for (si = 0; si < n; ++si)
498169689Skan    {
499169689Skan      i = stack_vars_sorted[si];
500169689Skan
501169689Skan      /* Skip variables that aren't partition representatives, for now.  */
502169689Skan      if (stack_vars[i].representative != i)
503169689Skan	continue;
504169689Skan
505169689Skan      fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
506169689Skan	       " align %u\n", (unsigned long) i, stack_vars[i].size,
507169689Skan	       stack_vars[i].alignb);
508169689Skan
509169689Skan      for (j = i; j != EOC; j = stack_vars[j].next)
510169689Skan	{
511169689Skan	  fputc ('\t', dump_file);
512169689Skan	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
513169689Skan	  fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
514169689Skan		   stack_vars[i].offset);
515169689Skan	}
516169689Skan    }
517169689Skan}
518169689Skan
519169689Skan/* Assign rtl to DECL at frame offset OFFSET.  */
520169689Skan
521169689Skanstatic void
522169689Skanexpand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
523169689Skan{
524169689Skan  HOST_WIDE_INT align;
525169689Skan  rtx x;
526169689Skan
527169689Skan  /* If this fails, we've overflowed the stack frame.  Error nicely?  */
528169689Skan  gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
529169689Skan
530169689Skan  x = plus_constant (virtual_stack_vars_rtx, offset);
531169689Skan  x = gen_rtx_MEM (DECL_MODE (decl), x);
532169689Skan
533169689Skan  /* Set alignment we actually gave this decl.  */
534169689Skan  offset -= frame_phase;
535169689Skan  align = offset & -offset;
536169689Skan  align *= BITS_PER_UNIT;
537169689Skan  if (align > STACK_BOUNDARY || align == 0)
538169689Skan    align = STACK_BOUNDARY;
539169689Skan  DECL_ALIGN (decl) = align;
540169689Skan  DECL_USER_ALIGN (decl) = 0;
541169689Skan
542169689Skan  set_mem_attributes (x, decl, true);
543169689Skan  SET_DECL_RTL (decl, x);
544169689Skan}
545169689Skan
546169689Skan/* A subroutine of expand_used_vars.  Give each partition representative
547169689Skan   a unique location within the stack frame.  Update each partition member
548169689Skan   with that location.  */
549169689Skan
550169689Skanstatic void
551169689Skanexpand_stack_vars (bool (*pred) (tree))
552169689Skan{
553169689Skan  size_t si, i, j, n = stack_vars_num;
554169689Skan
555169689Skan  for (si = 0; si < n; ++si)
556169689Skan    {
557169689Skan      HOST_WIDE_INT offset;
558169689Skan
559169689Skan      i = stack_vars_sorted[si];
560169689Skan
561169689Skan      /* Skip variables that aren't partition representatives, for now.  */
562169689Skan      if (stack_vars[i].representative != i)
563169689Skan	continue;
564169689Skan
565169689Skan      /* Skip variables that have already had rtl assigned.  See also
566169689Skan	 add_stack_var where we perpetrate this pc_rtx hack.  */
567169689Skan      if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
568169689Skan	continue;
569169689Skan
570169689Skan      /* Check the predicate to see whether this variable should be
571169689Skan	 allocated in this pass.  */
572169689Skan      if (pred && !pred (stack_vars[i].decl))
573169689Skan	continue;
574169689Skan
575169689Skan      offset = alloc_stack_frame_space (stack_vars[i].size,
576169689Skan					stack_vars[i].alignb);
577169689Skan
578169689Skan      /* Create rtl for each variable based on their location within the
579169689Skan	 partition.  */
580169689Skan      for (j = i; j != EOC; j = stack_vars[j].next)
581169689Skan	expand_one_stack_var_at (stack_vars[j].decl,
582169689Skan				 stack_vars[j].offset + offset);
583169689Skan    }
584169689Skan}
585169689Skan
586169689Skan/* A subroutine of expand_one_var.  Called to immediately assign rtl
587169689Skan   to a variable to be allocated in the stack frame.  */
588169689Skan
589169689Skanstatic void
590169689Skanexpand_one_stack_var (tree var)
591169689Skan{
592169689Skan  HOST_WIDE_INT size, offset, align;
593169689Skan
594169689Skan  size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
595169689Skan  align = get_decl_align_unit (var);
596169689Skan  offset = alloc_stack_frame_space (size, align);
597169689Skan
598169689Skan  expand_one_stack_var_at (var, offset);
599169689Skan}
600169689Skan
601169689Skan/* A subroutine of expand_one_var.  Called to assign rtl
602169689Skan   to a TREE_STATIC VAR_DECL.  */
603169689Skan
604169689Skanstatic void
605169689Skanexpand_one_static_var (tree var)
606169689Skan{
607169689Skan  /* In unit-at-a-time all the static variables are expanded at the end
608169689Skan     of compilation process.  */
609169689Skan  if (flag_unit_at_a_time)
610169689Skan    return;
611169689Skan  /* If this is an inlined copy of a static local variable,
612169689Skan     look up the original.  */
613169689Skan  var = DECL_ORIGIN (var);
614169689Skan
615169689Skan  /* If we've already processed this variable because of that, do nothing.  */
616169689Skan  if (TREE_ASM_WRITTEN (var))
617169689Skan    return;
618169689Skan
619169689Skan  /* Give the front end a chance to do whatever.  In practice, this is
620169689Skan     resolving duplicate names for IMA in C.  */
621169689Skan  if (lang_hooks.expand_decl (var))
622169689Skan    return;
623169689Skan
624169689Skan  /* Otherwise, just emit the variable.  */
625169689Skan  rest_of_decl_compilation (var, 0, 0);
626169689Skan}
627169689Skan
628169689Skan/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
629169689Skan   that will reside in a hard register.  */
630169689Skan
631169689Skanstatic void
632169689Skanexpand_one_hard_reg_var (tree var)
633169689Skan{
634169689Skan  rest_of_decl_compilation (var, 0, 0);
635169689Skan}
636169689Skan
637169689Skan/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
638169689Skan   that will reside in a pseudo register.  */
639169689Skan
640169689Skanstatic void
641169689Skanexpand_one_register_var (tree var)
642169689Skan{
643169689Skan  tree type = TREE_TYPE (var);
644169689Skan  int unsignedp = TYPE_UNSIGNED (type);
645169689Skan  enum machine_mode reg_mode
646169689Skan    = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
647169689Skan  rtx x = gen_reg_rtx (reg_mode);
648169689Skan
649169689Skan  SET_DECL_RTL (var, x);
650169689Skan
651169689Skan  /* Note if the object is a user variable.  */
652169689Skan  if (!DECL_ARTIFICIAL (var))
653169689Skan    {
654169689Skan      mark_user_reg (x);
655169689Skan
656169689Skan      /* Trust user variables which have a pointer type to really
657169689Skan	 be pointers.  Do not trust compiler generated temporaries
658169689Skan	 as our type system is totally busted as it relates to
659169689Skan	 pointer arithmetic which translates into lots of compiler
660169689Skan	 generated objects with pointer types, but which are not really
661169689Skan	 pointers.  */
662169689Skan      if (POINTER_TYPE_P (type))
663169689Skan	mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
664169689Skan    }
665169689Skan}
666169689Skan
667169689Skan/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
668169689Skan   has some associated error, e.g. its type is error-mark.  We just need
669169689Skan   to pick something that won't crash the rest of the compiler.  */
670169689Skan
671169689Skanstatic void
672169689Skanexpand_one_error_var (tree var)
673169689Skan{
674169689Skan  enum machine_mode mode = DECL_MODE (var);
675169689Skan  rtx x;
676169689Skan
677169689Skan  if (mode == BLKmode)
678169689Skan    x = gen_rtx_MEM (BLKmode, const0_rtx);
679169689Skan  else if (mode == VOIDmode)
680169689Skan    x = const0_rtx;
681169689Skan  else
682169689Skan    x = gen_reg_rtx (mode);
683169689Skan
684169689Skan  SET_DECL_RTL (var, x);
685169689Skan}
686169689Skan
687169689Skan/* A subroutine of expand_one_var.  VAR is a variable that will be
688169689Skan   allocated to the local stack frame.  Return true if we wish to
689169689Skan   add VAR to STACK_VARS so that it will be coalesced with other
690169689Skan   variables.  Return false to allocate VAR immediately.
691169689Skan
692169689Skan   This function is used to reduce the number of variables considered
693169689Skan   for coalescing, which reduces the size of the quadratic problem.  */
694169689Skan
695169689Skanstatic bool
696169689Skandefer_stack_allocation (tree var, bool toplevel)
697169689Skan{
698169689Skan  /* If stack protection is enabled, *all* stack variables must be deferred,
699169689Skan     so that we can re-order the strings to the top of the frame.  */
700169689Skan  if (flag_stack_protect)
701169689Skan    return true;
702169689Skan
703169689Skan  /* Variables in the outermost scope automatically conflict with
704169689Skan     every other variable.  The only reason to want to defer them
705169689Skan     at all is that, after sorting, we can more efficiently pack
706169689Skan     small variables in the stack frame.  Continue to defer at -O2.  */
707169689Skan  if (toplevel && optimize < 2)
708169689Skan    return false;
709169689Skan
710169689Skan  /* Without optimization, *most* variables are allocated from the
711169689Skan     stack, which makes the quadratic problem large exactly when we
712169689Skan     want compilation to proceed as quickly as possible.  On the
713169689Skan     other hand, we don't want the function's stack frame size to
714169689Skan     get completely out of hand.  So we avoid adding scalars and
715169689Skan     "small" aggregates to the list at all.  */
716169689Skan  if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
717169689Skan    return false;
718169689Skan
719169689Skan  return true;
720169689Skan}
721169689Skan
722169689Skan/* A subroutine of expand_used_vars.  Expand one variable according to
723169689Skan   its flavor.  Variables to be placed on the stack are not actually
724169689Skan   expanded yet, merely recorded.  */
725169689Skan
726169689Skanstatic void
727169689Skanexpand_one_var (tree var, bool toplevel)
728169689Skan{
729169689Skan  if (TREE_CODE (var) != VAR_DECL)
730169689Skan    lang_hooks.expand_decl (var);
731169689Skan  else if (DECL_EXTERNAL (var))
732169689Skan    ;
733169689Skan  else if (DECL_HAS_VALUE_EXPR_P (var))
734169689Skan    ;
735169689Skan  else if (TREE_STATIC (var))
736169689Skan    expand_one_static_var (var);
737169689Skan  else if (DECL_RTL_SET_P (var))
738169689Skan    ;
739169689Skan  else if (TREE_TYPE (var) == error_mark_node)
740169689Skan    expand_one_error_var (var);
741169689Skan  else if (DECL_HARD_REGISTER (var))
742169689Skan    expand_one_hard_reg_var (var);
743169689Skan  else if (use_register_for_decl (var))
744169689Skan    expand_one_register_var (var);
745169689Skan  else if (defer_stack_allocation (var, toplevel))
746169689Skan    add_stack_var (var);
747169689Skan  else
748169689Skan    expand_one_stack_var (var);
749169689Skan}
750169689Skan
751169689Skan/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
752169689Skan   expanding variables.  Those variables that can be put into registers
753169689Skan   are allocated pseudos; those that can't are put on the stack.
754169689Skan
755169689Skan   TOPLEVEL is true if this is the outermost BLOCK.  */
756169689Skan
757169689Skanstatic void
758169689Skanexpand_used_vars_for_block (tree block, bool toplevel)
759169689Skan{
760169689Skan  size_t i, j, old_sv_num, this_sv_num, new_sv_num;
761169689Skan  tree t;
762169689Skan
763169689Skan  old_sv_num = toplevel ? 0 : stack_vars_num;
764169689Skan
765169689Skan  /* Expand all variables at this level.  */
766169689Skan  for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
767169689Skan    if (TREE_USED (t)
768169689Skan	/* Force local static variables to be output when marked by
769169689Skan	   used attribute.  For unit-at-a-time, cgraph code already takes
770169689Skan	   care of this.  */
771169689Skan	|| (!flag_unit_at_a_time && TREE_STATIC (t)
772169689Skan	    && DECL_PRESERVE_P (t)))
773169689Skan      expand_one_var (t, toplevel);
774169689Skan
775169689Skan  this_sv_num = stack_vars_num;
776169689Skan
777169689Skan  /* Expand all variables at containing levels.  */
778169689Skan  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
779169689Skan    expand_used_vars_for_block (t, false);
780169689Skan
781169689Skan  /* Since we do not track exact variable lifetimes (which is not even
782169689Skan     possible for variables whose address escapes), we mirror the block
783169689Skan     tree in the interference graph.  Here we cause all variables at this
784169689Skan     level, and all sublevels, to conflict.  Do make certain that a
785169689Skan     variable conflicts with itself.  */
786169689Skan  if (old_sv_num < this_sv_num)
787169689Skan    {
788169689Skan      new_sv_num = stack_vars_num;
789169689Skan      resize_stack_vars_conflict (new_sv_num);
790169689Skan
791169689Skan      for (i = old_sv_num; i < new_sv_num; ++i)
792169689Skan	for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
793169689Skan	  add_stack_var_conflict (i, j);
794169689Skan    }
795169689Skan}
796169689Skan
797169689Skan/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
798169689Skan   and clear TREE_USED on all local variables.  */
799169689Skan
800169689Skanstatic void
801169689Skanclear_tree_used (tree block)
802169689Skan{
803169689Skan  tree t;
804169689Skan
805169689Skan  for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
806169689Skan    /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
807169689Skan      TREE_USED (t) = 0;
808169689Skan
809169689Skan  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
810169689Skan    clear_tree_used (t);
811169689Skan}
812169689Skan
813169689Skan/* Examine TYPE and determine a bit mask of the following features.  */
814169689Skan
815169689Skan#define SPCT_HAS_LARGE_CHAR_ARRAY	1
816169689Skan#define SPCT_HAS_SMALL_CHAR_ARRAY	2
817169689Skan#define SPCT_HAS_ARRAY			4
818169689Skan#define SPCT_HAS_AGGREGATE		8
819169689Skan
820169689Skanstatic unsigned int
821169689Skanstack_protect_classify_type (tree type)
822169689Skan{
823169689Skan  unsigned int ret = 0;
824169689Skan  tree t;
825169689Skan
826169689Skan  switch (TREE_CODE (type))
827169689Skan    {
828169689Skan    case ARRAY_TYPE:
829169689Skan      t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
830169689Skan      if (t == char_type_node
831169689Skan	  || t == signed_char_type_node
832169689Skan	  || t == unsigned_char_type_node)
833169689Skan	{
834169689Skan	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
835169689Skan	  unsigned HOST_WIDE_INT len;
836169689Skan
837169689Skan	  if (!TYPE_SIZE_UNIT (type)
838169689Skan	      || !host_integerp (TYPE_SIZE_UNIT (type), 1))
839169689Skan	    len = max;
840169689Skan	  else
841169689Skan	    len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
842169689Skan
843169689Skan	  if (len < max)
844169689Skan	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
845169689Skan	  else
846169689Skan	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
847169689Skan	}
848169689Skan      else
849169689Skan	ret = SPCT_HAS_ARRAY;
850169689Skan      break;
851169689Skan
852169689Skan    case UNION_TYPE:
853169689Skan    case QUAL_UNION_TYPE:
854169689Skan    case RECORD_TYPE:
855169689Skan      ret = SPCT_HAS_AGGREGATE;
856169689Skan      for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
857169689Skan	if (TREE_CODE (t) == FIELD_DECL)
858169689Skan	  ret |= stack_protect_classify_type (TREE_TYPE (t));
859169689Skan      break;
860169689Skan
861169689Skan    default:
862169689Skan      break;
863169689Skan    }
864169689Skan
865169689Skan  return ret;
866169689Skan}
867169689Skan
868169689Skan/* Return nonzero if DECL should be segregated into the "vulnerable" upper
869169689Skan   part of the local stack frame.  Remember if we ever return nonzero for
870169689Skan   any variable in this function.  The return value is the phase number in
871169689Skan   which the variable should be allocated.  */
872169689Skan
873169689Skanstatic int
874169689Skanstack_protect_decl_phase (tree decl)
875169689Skan{
876169689Skan  unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
877169689Skan  int ret = 0;
878169689Skan
879169689Skan  if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
880169689Skan    has_short_buffer = true;
881169689Skan
882169689Skan  if (flag_stack_protect == 2)
883169689Skan    {
884169689Skan      if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
885169689Skan	  && !(bits & SPCT_HAS_AGGREGATE))
886169689Skan	ret = 1;
887169689Skan      else if (bits & SPCT_HAS_ARRAY)
888169689Skan	ret = 2;
889169689Skan    }
890169689Skan  else
891169689Skan    ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
892169689Skan
893169689Skan  if (ret)
894169689Skan    has_protected_decls = true;
895169689Skan
896169689Skan  return ret;
897169689Skan}
898169689Skan
899169689Skan/* Two helper routines that check for phase 1 and phase 2.  These are used
900169689Skan   as callbacks for expand_stack_vars.  */
901169689Skan
902169689Skanstatic bool
903169689Skanstack_protect_decl_phase_1 (tree decl)
904169689Skan{
905169689Skan  return stack_protect_decl_phase (decl) == 1;
906169689Skan}
907169689Skan
908169689Skanstatic bool
909169689Skanstack_protect_decl_phase_2 (tree decl)
910169689Skan{
911169689Skan  return stack_protect_decl_phase (decl) == 2;
912169689Skan}
913169689Skan
914169689Skan/* Ensure that variables in different stack protection phases conflict
915169689Skan   so that they are not merged and share the same stack slot.  */
916169689Skan
917169689Skanstatic void
918169689Skanadd_stack_protection_conflicts (void)
919169689Skan{
920169689Skan  size_t i, j, n = stack_vars_num;
921169689Skan  unsigned char *phase;
922169689Skan
923169689Skan  phase = XNEWVEC (unsigned char, n);
924169689Skan  for (i = 0; i < n; ++i)
925169689Skan    phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
926169689Skan
927169689Skan  for (i = 0; i < n; ++i)
928169689Skan    {
929169689Skan      unsigned char ph_i = phase[i];
930169689Skan      for (j = 0; j < i; ++j)
931169689Skan	if (ph_i != phase[j])
932169689Skan	  add_stack_var_conflict (i, j);
933169689Skan    }
934169689Skan
935169689Skan  XDELETEVEC (phase);
936169689Skan}
937169689Skan
938169689Skan/* Create a decl for the guard at the top of the stack frame.  */
939169689Skan
940169689Skanstatic void
941169689Skancreate_stack_guard (void)
942169689Skan{
943169689Skan  tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
944169689Skan  TREE_THIS_VOLATILE (guard) = 1;
945169689Skan  TREE_USED (guard) = 1;
946169689Skan  expand_one_stack_var (guard);
947169689Skan  cfun->stack_protect_guard = guard;
948169689Skan}
949169689Skan
950169689Skan/* Expand all variables used in the function.  */
951169689Skan
952169689Skanstatic void
953169689Skanexpand_used_vars (void)
954169689Skan{
955169689Skan  tree t, outer_block = DECL_INITIAL (current_function_decl);
956169689Skan
957169689Skan  /* Compute the phase of the stack frame for this function.  */
958169689Skan  {
959169689Skan    int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
960169689Skan    int off = STARTING_FRAME_OFFSET % align;
961169689Skan    frame_phase = off ? align - off : 0;
962169689Skan  }
963169689Skan
964169689Skan  /* Set TREE_USED on all variables in the unexpanded_var_list.  */
965169689Skan  for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
966169689Skan    TREE_USED (TREE_VALUE (t)) = 1;
967169689Skan
968169689Skan  /* Clear TREE_USED on all variables associated with a block scope.  */
969169689Skan  clear_tree_used (outer_block);
970169689Skan
971169689Skan  /* Initialize local stack smashing state.  */
972169689Skan  has_protected_decls = false;
973169689Skan  has_short_buffer = false;
974169689Skan
975169689Skan  /* At this point all variables on the unexpanded_var_list with TREE_USED
976169689Skan     set are not associated with any block scope.  Lay them out.  */
977169689Skan  for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
978169689Skan    {
979169689Skan      tree var = TREE_VALUE (t);
980169689Skan      bool expand_now = false;
981169689Skan
982169689Skan      /* We didn't set a block for static or extern because it's hard
983169689Skan	 to tell the difference between a global variable (re)declared
984169689Skan	 in a local scope, and one that's really declared there to
985169689Skan	 begin with.  And it doesn't really matter much, since we're
986169689Skan	 not giving them stack space.  Expand them now.  */
987169689Skan      if (TREE_STATIC (var) || DECL_EXTERNAL (var))
988169689Skan	expand_now = true;
989169689Skan
990169689Skan      /* Any variable that could have been hoisted into an SSA_NAME
991169689Skan	 will have been propagated anywhere the optimizers chose,
992169689Skan	 i.e. not confined to their original block.  Allocate them
993169689Skan	 as if they were defined in the outermost scope.  */
994169689Skan      else if (is_gimple_reg (var))
995169689Skan	expand_now = true;
996169689Skan
997169689Skan      /* If the variable is not associated with any block, then it
998169689Skan	 was created by the optimizers, and could be live anywhere
999169689Skan	 in the function.  */
1000169689Skan      else if (TREE_USED (var))
1001169689Skan	expand_now = true;
1002169689Skan
1003169689Skan      /* Finally, mark all variables on the list as used.  We'll use
1004169689Skan	 this in a moment when we expand those associated with scopes.  */
1005169689Skan      TREE_USED (var) = 1;
1006169689Skan
1007169689Skan      if (expand_now)
1008169689Skan	expand_one_var (var, true);
1009169689Skan    }
1010169689Skan  cfun->unexpanded_var_list = NULL_TREE;
1011169689Skan
1012169689Skan  /* At this point, all variables within the block tree with TREE_USED
1013169689Skan     set are actually used by the optimized function.  Lay them out.  */
1014169689Skan  expand_used_vars_for_block (outer_block, true);
1015169689Skan
1016169689Skan  if (stack_vars_num > 0)
1017169689Skan    {
1018169689Skan      /* Due to the way alias sets work, no variables with non-conflicting
1019169689Skan	 alias sets may be assigned the same address.  Add conflicts to
1020169689Skan	 reflect this.  */
1021169689Skan      add_alias_set_conflicts ();
1022169689Skan
1023169689Skan      /* If stack protection is enabled, we don't share space between
1024169689Skan	 vulnerable data and non-vulnerable data.  */
1025169689Skan      if (flag_stack_protect)
1026169689Skan	add_stack_protection_conflicts ();
1027169689Skan
1028169689Skan      /* Now that we have collected all stack variables, and have computed a
1029169689Skan	 minimal interference graph, attempt to save some stack space.  */
1030169689Skan      partition_stack_vars ();
1031169689Skan      if (dump_file)
1032169689Skan	dump_stack_var_partition ();
1033169689Skan    }
1034169689Skan
1035169689Skan  /* There are several conditions under which we should create a
1036169689Skan     stack guard: protect-all, alloca used, protected decls present.  */
1037169689Skan  if (flag_stack_protect == 2
1038169689Skan      || (flag_stack_protect
1039169689Skan	  && (current_function_calls_alloca || has_protected_decls)))
1040169689Skan    create_stack_guard ();
1041169689Skan
1042169689Skan  /* Assign rtl to each variable based on these partitions.  */
1043169689Skan  if (stack_vars_num > 0)
1044169689Skan    {
1045169689Skan      /* Reorder decls to be protected by iterating over the variables
1046169689Skan	 array multiple times, and allocating out of each phase in turn.  */
1047169689Skan      /* ??? We could probably integrate this into the qsort we did
1048169689Skan	 earlier, such that we naturally see these variables first,
1049169689Skan	 and thus naturally allocate things in the right order.  */
1050169689Skan      if (has_protected_decls)
1051169689Skan	{
1052169689Skan	  /* Phase 1 contains only character arrays.  */
1053169689Skan	  expand_stack_vars (stack_protect_decl_phase_1);
1054169689Skan
1055169689Skan	  /* Phase 2 contains other kinds of arrays.  */
1056169689Skan	  if (flag_stack_protect == 2)
1057169689Skan	    expand_stack_vars (stack_protect_decl_phase_2);
1058169689Skan	}
1059169689Skan
1060169689Skan      expand_stack_vars (NULL);
1061169689Skan
1062169689Skan      /* Free up stack variable graph data.  */
1063169689Skan      XDELETEVEC (stack_vars);
1064169689Skan      XDELETEVEC (stack_vars_sorted);
1065169689Skan      XDELETEVEC (stack_vars_conflict);
1066169689Skan      stack_vars = NULL;
1067169689Skan      stack_vars_alloc = stack_vars_num = 0;
1068169689Skan      stack_vars_conflict = NULL;
1069169689Skan      stack_vars_conflict_alloc = 0;
1070169689Skan    }
1071169689Skan
1072169689Skan  /* If the target requires that FRAME_OFFSET be aligned, do it.  */
1073169689Skan  if (STACK_ALIGNMENT_NEEDED)
1074169689Skan    {
1075169689Skan      HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1076169689Skan      if (!FRAME_GROWS_DOWNWARD)
1077169689Skan	frame_offset += align - 1;
1078169689Skan      frame_offset &= -align;
1079169689Skan    }
1080169689Skan}
1081169689Skan
1082169689Skan
1083169689Skan/* If we need to produce a detailed dump, print the tree representation
1084169689Skan   for STMT to the dump file.  SINCE is the last RTX after which the RTL
1085169689Skan   generated for STMT should have been appended.  */
1086169689Skan
1087169689Skanstatic void
1088169689Skanmaybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1089169689Skan{
1090169689Skan  if (dump_file && (dump_flags & TDF_DETAILS))
1091169689Skan    {
1092169689Skan      fprintf (dump_file, "\n;; ");
1093169689Skan      print_generic_expr (dump_file, stmt, TDF_SLIM);
1094169689Skan      fprintf (dump_file, "\n");
1095169689Skan
1096169689Skan      print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1097169689Skan    }
1098169689Skan}
1099169689Skan
1100169689Skan/* A subroutine of expand_gimple_basic_block.  Expand one COND_EXPR.
1101169689Skan   Returns a new basic block if we've terminated the current basic
1102169689Skan   block and created a new one.  */
1103169689Skan
1104169689Skanstatic basic_block
1105169689Skanexpand_gimple_cond_expr (basic_block bb, tree stmt)
1106169689Skan{
1107169689Skan  basic_block new_bb, dest;
1108169689Skan  edge new_edge;
1109169689Skan  edge true_edge;
1110169689Skan  edge false_edge;
1111169689Skan  tree pred = COND_EXPR_COND (stmt);
1112169689Skan  tree then_exp = COND_EXPR_THEN (stmt);
1113169689Skan  tree else_exp = COND_EXPR_ELSE (stmt);
1114169689Skan  rtx last2, last;
1115169689Skan
1116169689Skan  last2 = last = get_last_insn ();
1117169689Skan
1118169689Skan  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1119169689Skan  if (EXPR_LOCUS (stmt))
1120169689Skan    {
1121169689Skan      emit_line_note (*(EXPR_LOCUS (stmt)));
1122169689Skan      record_block_change (TREE_BLOCK (stmt));
1123169689Skan    }
1124169689Skan
1125169689Skan  /* These flags have no purpose in RTL land.  */
1126169689Skan  true_edge->flags &= ~EDGE_TRUE_VALUE;
1127169689Skan  false_edge->flags &= ~EDGE_FALSE_VALUE;
1128169689Skan
1129169689Skan  /* We can either have a pure conditional jump with one fallthru edge or
1130169689Skan     two-way jump that needs to be decomposed into two basic blocks.  */
1131169689Skan  if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1132169689Skan    {
1133169689Skan      jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1134169689Skan      add_reg_br_prob_note (last, true_edge->probability);
1135169689Skan      maybe_dump_rtl_for_tree_stmt (stmt, last);
1136169689Skan      if (EXPR_LOCUS (then_exp))
1137169689Skan	emit_line_note (*(EXPR_LOCUS (then_exp)));
1138169689Skan      return NULL;
1139169689Skan    }
1140169689Skan  if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1141169689Skan    {
1142169689Skan      jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
1143169689Skan      add_reg_br_prob_note (last, false_edge->probability);
1144169689Skan      maybe_dump_rtl_for_tree_stmt (stmt, last);
1145169689Skan      if (EXPR_LOCUS (else_exp))
1146169689Skan	emit_line_note (*(EXPR_LOCUS (else_exp)));
1147169689Skan      return NULL;
1148169689Skan    }
1149169689Skan  gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1150169689Skan	      && TREE_CODE (else_exp) == GOTO_EXPR);
1151169689Skan
1152169689Skan  jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1153169689Skan  add_reg_br_prob_note (last, true_edge->probability);
1154169689Skan  last = get_last_insn ();
1155169689Skan  expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1156169689Skan
1157169689Skan  BB_END (bb) = last;
1158169689Skan  if (BARRIER_P (BB_END (bb)))
1159169689Skan    BB_END (bb) = PREV_INSN (BB_END (bb));
1160169689Skan  update_bb_for_insn (bb);
1161169689Skan
1162169689Skan  new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1163169689Skan  dest = false_edge->dest;
1164169689Skan  redirect_edge_succ (false_edge, new_bb);
1165169689Skan  false_edge->flags |= EDGE_FALLTHRU;
1166169689Skan  new_bb->count = false_edge->count;
1167169689Skan  new_bb->frequency = EDGE_FREQUENCY (false_edge);
1168169689Skan  new_edge = make_edge (new_bb, dest, 0);
1169169689Skan  new_edge->probability = REG_BR_PROB_BASE;
1170169689Skan  new_edge->count = new_bb->count;
1171169689Skan  if (BARRIER_P (BB_END (new_bb)))
1172169689Skan    BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1173169689Skan  update_bb_for_insn (new_bb);
1174169689Skan
1175169689Skan  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1176169689Skan
1177169689Skan  if (EXPR_LOCUS (else_exp))
1178169689Skan    emit_line_note (*(EXPR_LOCUS (else_exp)));
1179169689Skan
1180169689Skan  return new_bb;
1181169689Skan}
1182169689Skan
1183169689Skan/* A subroutine of expand_gimple_basic_block.  Expand one CALL_EXPR
1184169689Skan   that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
1185169689Skan   generated a tail call (something that might be denied by the ABI
1186169689Skan   rules governing the call; see calls.c).
1187169689Skan
1188169689Skan   Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1189169689Skan   can still reach the rest of BB.  The case here is __builtin_sqrt,
1190169689Skan   where the NaN result goes through the external function (with a
1191169689Skan   tailcall) and the normal result happens via a sqrt instruction.  */
1192169689Skan
1193169689Skanstatic basic_block
1194169689Skanexpand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1195169689Skan{
1196169689Skan  rtx last2, last;
1197169689Skan  edge e;
1198169689Skan  edge_iterator ei;
1199169689Skan  int probability;
1200169689Skan  gcov_type count;
1201169689Skan
1202169689Skan  last2 = last = get_last_insn ();
1203169689Skan
1204169689Skan  expand_expr_stmt (stmt);
1205169689Skan
1206169689Skan  for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1207169689Skan    if (CALL_P (last) && SIBLING_CALL_P (last))
1208169689Skan      goto found;
1209169689Skan
1210169689Skan  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1211169689Skan
1212169689Skan  *can_fallthru = true;
1213169689Skan  return NULL;
1214169689Skan
1215169689Skan found:
1216169689Skan  /* ??? Wouldn't it be better to just reset any pending stack adjust?
1217169689Skan     Any instructions emitted here are about to be deleted.  */
1218169689Skan  do_pending_stack_adjust ();
1219169689Skan
1220169689Skan  /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
1221169689Skan  /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
1222169689Skan     EH or abnormal edges, we shouldn't have created a tail call in
1223169689Skan     the first place.  So it seems to me we should just be removing
1224169689Skan     all edges here, or redirecting the existing fallthru edge to
1225169689Skan     the exit block.  */
1226169689Skan
1227169689Skan  probability = 0;
1228169689Skan  count = 0;
1229169689Skan
1230169689Skan  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1231169689Skan    {
1232169689Skan      if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1233169689Skan	{
1234169689Skan	  if (e->dest != EXIT_BLOCK_PTR)
1235169689Skan	    {
1236169689Skan	      e->dest->count -= e->count;
1237169689Skan	      e->dest->frequency -= EDGE_FREQUENCY (e);
1238169689Skan	      if (e->dest->count < 0)
1239169689Skan		e->dest->count = 0;
1240169689Skan	      if (e->dest->frequency < 0)
1241169689Skan		e->dest->frequency = 0;
1242169689Skan	    }
1243169689Skan	  count += e->count;
1244169689Skan	  probability += e->probability;
1245169689Skan	  remove_edge (e);
1246169689Skan	}
1247169689Skan      else
1248169689Skan	ei_next (&ei);
1249169689Skan    }
1250169689Skan
1251169689Skan  /* This is somewhat ugly: the call_expr expander often emits instructions
1252169689Skan     after the sibcall (to perform the function return).  These confuse the
1253169689Skan     find_many_sub_basic_blocks code, so we need to get rid of these.  */
1254169689Skan  last = NEXT_INSN (last);
1255169689Skan  gcc_assert (BARRIER_P (last));
1256169689Skan
1257169689Skan  *can_fallthru = false;
1258169689Skan  while (NEXT_INSN (last))
1259169689Skan    {
1260169689Skan      /* For instance an sqrt builtin expander expands if with
1261169689Skan	 sibcall in the then and label for `else`.  */
1262169689Skan      if (LABEL_P (NEXT_INSN (last)))
1263169689Skan	{
1264169689Skan	  *can_fallthru = true;
1265169689Skan	  break;
1266169689Skan	}
1267169689Skan      delete_insn (NEXT_INSN (last));
1268169689Skan    }
1269169689Skan
1270169689Skan  e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1271169689Skan  e->probability += probability;
1272169689Skan  e->count += count;
1273169689Skan  BB_END (bb) = last;
1274169689Skan  update_bb_for_insn (bb);
1275169689Skan
1276169689Skan  if (NEXT_INSN (last))
1277169689Skan    {
1278169689Skan      bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1279169689Skan
1280169689Skan      last = BB_END (bb);
1281169689Skan      if (BARRIER_P (last))
1282169689Skan	BB_END (bb) = PREV_INSN (last);
1283169689Skan    }
1284169689Skan
1285169689Skan  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1286169689Skan
1287169689Skan  return bb;
1288169689Skan}
1289169689Skan
1290169689Skan/* Expand basic block BB from GIMPLE trees to RTL.  */
1291169689Skan
1292169689Skanstatic basic_block
1293169689Skanexpand_gimple_basic_block (basic_block bb)
1294169689Skan{
1295169689Skan  block_stmt_iterator bsi = bsi_start (bb);
1296169689Skan  tree stmt = NULL;
1297169689Skan  rtx note, last;
1298169689Skan  edge e;
1299169689Skan  edge_iterator ei;
1300169689Skan
1301169689Skan  if (dump_file)
1302169689Skan    {
1303169689Skan      fprintf (dump_file,
1304169689Skan	       "\n;; Generating RTL for tree basic block %d\n",
1305169689Skan	       bb->index);
1306169689Skan    }
1307169689Skan
1308169689Skan  init_rtl_bb_info (bb);
1309169689Skan  bb->flags |= BB_RTL;
1310169689Skan
1311169689Skan  if (!bsi_end_p (bsi))
1312169689Skan    stmt = bsi_stmt (bsi);
1313169689Skan
1314169689Skan  if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1315169689Skan    {
1316169689Skan      last = get_last_insn ();
1317169689Skan
1318169689Skan      expand_expr_stmt (stmt);
1319169689Skan
1320169689Skan      /* Java emits line number notes in the top of labels.
1321169689Skan	 ??? Make this go away once line number notes are obsoleted.  */
1322169689Skan      BB_HEAD (bb) = NEXT_INSN (last);
1323169689Skan      if (NOTE_P (BB_HEAD (bb)))
1324169689Skan	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1325169689Skan      bsi_next (&bsi);
1326169689Skan      note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1327169689Skan
1328169689Skan      maybe_dump_rtl_for_tree_stmt (stmt, last);
1329169689Skan    }
1330169689Skan  else
1331169689Skan    note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1332169689Skan
1333169689Skan  NOTE_BASIC_BLOCK (note) = bb;
1334169689Skan
1335169689Skan  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1336169689Skan    {
1337169689Skan      /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
1338169689Skan      e->flags &= ~EDGE_EXECUTABLE;
1339169689Skan
1340169689Skan      /* At the moment not all abnormal edges match the RTL representation.
1341169689Skan	 It is safe to remove them here as find_many_sub_basic_blocks will
1342169689Skan	 rediscover them.  In the future we should get this fixed properly.  */
1343169689Skan      if (e->flags & EDGE_ABNORMAL)
1344169689Skan	remove_edge (e);
1345169689Skan      else
1346169689Skan	ei_next (&ei);
1347169689Skan    }
1348169689Skan
1349169689Skan  for (; !bsi_end_p (bsi); bsi_next (&bsi))
1350169689Skan    {
1351169689Skan      tree stmt = bsi_stmt (bsi);
1352169689Skan      basic_block new_bb;
1353169689Skan
1354169689Skan      if (!stmt)
1355169689Skan	continue;
1356169689Skan
1357169689Skan      /* Expand this statement, then evaluate the resulting RTL and
1358169689Skan	 fixup the CFG accordingly.  */
1359169689Skan      if (TREE_CODE (stmt) == COND_EXPR)
1360169689Skan	{
1361169689Skan	  new_bb = expand_gimple_cond_expr (bb, stmt);
1362169689Skan	  if (new_bb)
1363169689Skan	    return new_bb;
1364169689Skan	}
1365169689Skan      else
1366169689Skan	{
1367169689Skan	  tree call = get_call_expr_in (stmt);
1368169689Skan	  if (call && CALL_EXPR_TAILCALL (call))
1369169689Skan	    {
1370169689Skan	      bool can_fallthru;
1371169689Skan	      new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1372169689Skan	      if (new_bb)
1373169689Skan		{
1374169689Skan		  if (can_fallthru)
1375169689Skan		    bb = new_bb;
1376169689Skan		  else
1377169689Skan		    return new_bb;
1378169689Skan		}
1379169689Skan	    }
1380169689Skan	  else
1381169689Skan	    {
1382169689Skan	      last = get_last_insn ();
1383169689Skan	      expand_expr_stmt (stmt);
1384169689Skan	      maybe_dump_rtl_for_tree_stmt (stmt, last);
1385169689Skan	    }
1386169689Skan	}
1387169689Skan    }
1388169689Skan
1389169689Skan  do_pending_stack_adjust ();
1390169689Skan
1391169689Skan  /* Find the block tail.  The last insn in the block is the insn
1392169689Skan     before a barrier and/or table jump insn.  */
1393169689Skan  last = get_last_insn ();
1394169689Skan  if (BARRIER_P (last))
1395169689Skan    last = PREV_INSN (last);
1396169689Skan  if (JUMP_TABLE_DATA_P (last))
1397169689Skan    last = PREV_INSN (PREV_INSN (last));
1398169689Skan  BB_END (bb) = last;
1399169689Skan
1400169689Skan  update_bb_for_insn (bb);
1401169689Skan
1402169689Skan  return bb;
1403169689Skan}
1404169689Skan
1405169689Skan
1406169689Skan/* Create a basic block for initialization code.  */
1407169689Skan
1408169689Skanstatic basic_block
1409169689Skanconstruct_init_block (void)
1410169689Skan{
1411169689Skan  basic_block init_block, first_block;
1412169689Skan  edge e = NULL;
1413169689Skan  int flags;
1414169689Skan
1415169689Skan  /* Multiple entry points not supported yet.  */
1416169689Skan  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1417169689Skan  init_rtl_bb_info (ENTRY_BLOCK_PTR);
1418169689Skan  init_rtl_bb_info (EXIT_BLOCK_PTR);
1419169689Skan  ENTRY_BLOCK_PTR->flags |= BB_RTL;
1420169689Skan  EXIT_BLOCK_PTR->flags |= BB_RTL;
1421169689Skan
1422169689Skan  e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1423169689Skan
1424169689Skan  /* When entry edge points to first basic block, we don't need jump,
1425169689Skan     otherwise we have to jump into proper target.  */
1426169689Skan  if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1427169689Skan    {
1428169689Skan      tree label = tree_block_label (e->dest);
1429169689Skan
1430169689Skan      emit_jump (label_rtx (label));
1431169689Skan      flags = 0;
1432169689Skan    }
1433169689Skan  else
1434169689Skan    flags = EDGE_FALLTHRU;
1435169689Skan
1436169689Skan  init_block = create_basic_block (NEXT_INSN (get_insns ()),
1437169689Skan				   get_last_insn (),
1438169689Skan				   ENTRY_BLOCK_PTR);
1439169689Skan  init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1440169689Skan  init_block->count = ENTRY_BLOCK_PTR->count;
1441169689Skan  if (e)
1442169689Skan    {
1443169689Skan      first_block = e->dest;
1444169689Skan      redirect_edge_succ (e, init_block);
1445169689Skan      e = make_edge (init_block, first_block, flags);
1446169689Skan    }
1447169689Skan  else
1448169689Skan    e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1449169689Skan  e->probability = REG_BR_PROB_BASE;
1450169689Skan  e->count = ENTRY_BLOCK_PTR->count;
1451169689Skan
1452169689Skan  update_bb_for_insn (init_block);
1453169689Skan  return init_block;
1454169689Skan}
1455169689Skan
1456169689Skan
1457169689Skan/* Create a block containing landing pads and similar stuff.  */
1458169689Skan
1459169689Skanstatic void
1460169689Skanconstruct_exit_block (void)
1461169689Skan{
1462169689Skan  rtx head = get_last_insn ();
1463169689Skan  rtx end;
1464169689Skan  basic_block exit_block;
1465169689Skan  edge e, e2;
1466169689Skan  unsigned ix;
1467169689Skan  edge_iterator ei;
1468169689Skan
1469169689Skan  /* Make sure the locus is set to the end of the function, so that
1470169689Skan     epilogue line numbers and warnings are set properly.  */
1471169689Skan#ifdef USE_MAPPED_LOCATION
1472169689Skan  if (cfun->function_end_locus != UNKNOWN_LOCATION)
1473169689Skan#else
1474169689Skan  if (cfun->function_end_locus.file)
1475169689Skan#endif
1476169689Skan    input_location = cfun->function_end_locus;
1477169689Skan
1478169689Skan  /* The following insns belong to the top scope.  */
1479169689Skan  record_block_change (DECL_INITIAL (current_function_decl));
1480169689Skan
1481169689Skan  /* Generate rtl for function exit.  */
1482169689Skan  expand_function_end ();
1483169689Skan
1484169689Skan  end = get_last_insn ();
1485169689Skan  if (head == end)
1486169689Skan    return;
1487169689Skan  while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1488169689Skan    head = NEXT_INSN (head);
1489169689Skan  exit_block = create_basic_block (NEXT_INSN (head), end,
1490169689Skan				   EXIT_BLOCK_PTR->prev_bb);
1491169689Skan  exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1492169689Skan  exit_block->count = EXIT_BLOCK_PTR->count;
1493169689Skan
1494169689Skan  ix = 0;
1495169689Skan  while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1496169689Skan    {
1497169689Skan      e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1498169689Skan      if (!(e->flags & EDGE_ABNORMAL))
1499169689Skan	redirect_edge_succ (e, exit_block);
1500169689Skan      else
1501169689Skan	ix++;
1502169689Skan    }
1503169689Skan
1504169689Skan  e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1505169689Skan  e->probability = REG_BR_PROB_BASE;
1506169689Skan  e->count = EXIT_BLOCK_PTR->count;
1507169689Skan  FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1508169689Skan    if (e2 != e)
1509169689Skan      {
1510169689Skan	e->count -= e2->count;
1511169689Skan	exit_block->count -= e2->count;
1512169689Skan	exit_block->frequency -= EDGE_FREQUENCY (e2);
1513169689Skan      }
1514169689Skan  if (e->count < 0)
1515169689Skan    e->count = 0;
1516169689Skan  if (exit_block->count < 0)
1517169689Skan    exit_block->count = 0;
1518169689Skan  if (exit_block->frequency < 0)
1519169689Skan    exit_block->frequency = 0;
1520169689Skan  update_bb_for_insn (exit_block);
1521169689Skan}
1522169689Skan
1523169689Skan/* Helper function for discover_nonconstant_array_refs.
1524169689Skan   Look for ARRAY_REF nodes with non-constant indexes and mark them
1525169689Skan   addressable.  */
1526169689Skan
1527169689Skanstatic tree
1528169689Skandiscover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1529169689Skan				   void *data ATTRIBUTE_UNUSED)
1530169689Skan{
1531169689Skan  tree t = *tp;
1532169689Skan
1533169689Skan  if (IS_TYPE_OR_DECL_P (t))
1534169689Skan    *walk_subtrees = 0;
1535169689Skan  else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1536169689Skan    {
1537169689Skan      while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1538169689Skan	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1539169689Skan	      && (!TREE_OPERAND (t, 2)
1540169689Skan		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1541169689Skan	     || (TREE_CODE (t) == COMPONENT_REF
1542169689Skan		 && (!TREE_OPERAND (t,2)
1543169689Skan		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1544169689Skan	     || TREE_CODE (t) == BIT_FIELD_REF
1545169689Skan	     || TREE_CODE (t) == REALPART_EXPR
1546169689Skan	     || TREE_CODE (t) == IMAGPART_EXPR
1547169689Skan	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1548169689Skan	     || TREE_CODE (t) == NOP_EXPR
1549169689Skan	     || TREE_CODE (t) == CONVERT_EXPR)
1550169689Skan	t = TREE_OPERAND (t, 0);
1551169689Skan
1552169689Skan      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1553169689Skan	{
1554169689Skan	  t = get_base_address (t);
1555169689Skan	  if (t && DECL_P (t))
1556169689Skan	    TREE_ADDRESSABLE (t) = 1;
1557169689Skan	}
1558169689Skan
1559169689Skan      *walk_subtrees = 0;
1560169689Skan    }
1561169689Skan
1562169689Skan  return NULL_TREE;
1563169689Skan}
1564169689Skan
1565169689Skan/* RTL expansion is not able to compile array references with variable
1566169689Skan   offsets for arrays stored in single register.  Discover such
1567169689Skan   expressions and mark variables as addressable to avoid this
1568169689Skan   scenario.  */
1569169689Skan
1570169689Skanstatic void
1571169689Skandiscover_nonconstant_array_refs (void)
1572169689Skan{
1573169689Skan  basic_block bb;
1574169689Skan  block_stmt_iterator bsi;
1575169689Skan
1576169689Skan  FOR_EACH_BB (bb)
1577169689Skan    {
1578169689Skan      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1579169689Skan	walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1580169689Skan		   NULL , NULL);
1581169689Skan    }
1582169689Skan}
1583169689Skan
1584169689Skan/* Translate the intermediate representation contained in the CFG
1585169689Skan   from GIMPLE trees to RTL.
1586169689Skan
1587169689Skan   We do conversion per basic block and preserve/update the tree CFG.
1588169689Skan   This implies we have to do some magic as the CFG can simultaneously
1589169689Skan   consist of basic blocks containing RTL and GIMPLE trees.  This can
1590169689Skan   confuse the CFG hooks, so be careful to not manipulate CFG during
1591169689Skan   the expansion.  */
1592169689Skan
1593169689Skanstatic unsigned int
1594169689Skantree_expand_cfg (void)
1595169689Skan{
1596169689Skan  basic_block bb, init_block;
1597169689Skan  sbitmap blocks;
1598169689Skan  edge_iterator ei;
1599169689Skan  edge e;
1600169689Skan
1601169689Skan  /* Some backends want to know that we are expanding to RTL.  */
1602169689Skan  currently_expanding_to_rtl = 1;
1603169689Skan
1604169689Skan  /* Prepare the rtl middle end to start recording block changes.  */
1605169689Skan  reset_block_changes ();
1606169689Skan
1607169689Skan  /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
1608169689Skan  discover_nonconstant_array_refs ();
1609169689Skan
1610169689Skan  /* Expand the variables recorded during gimple lowering.  */
1611169689Skan  expand_used_vars ();
1612169689Skan
1613169689Skan  /* Honor stack protection warnings.  */
1614169689Skan  if (warn_stack_protect)
1615169689Skan    {
1616169689Skan      if (current_function_calls_alloca)
1617169689Skan	warning (0, "not protecting local variables: variable length buffer");
1618169689Skan      if (has_short_buffer && !cfun->stack_protect_guard)
1619169689Skan	warning (0, "not protecting function: no buffer at least %d bytes long",
1620169689Skan		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1621169689Skan    }
1622169689Skan
1623169689Skan  /* Set up parameters and prepare for return, for the function.  */
1624169689Skan  expand_function_start (current_function_decl);
1625169689Skan
1626169689Skan  /* If this function is `main', emit a call to `__main'
1627169689Skan     to run global initializers, etc.  */
1628169689Skan  if (DECL_NAME (current_function_decl)
1629169689Skan      && MAIN_NAME_P (DECL_NAME (current_function_decl))
1630169689Skan      && DECL_FILE_SCOPE_P (current_function_decl))
1631169689Skan    expand_main_function ();
1632169689Skan
1633169689Skan  /* Initialize the stack_protect_guard field.  This must happen after the
1634169689Skan     call to __main (if any) so that the external decl is initialized.  */
1635169689Skan  if (cfun->stack_protect_guard)
1636169689Skan    stack_protect_prologue ();
1637169689Skan
1638169689Skan  /* Register rtl specific functions for cfg.  */
1639169689Skan  rtl_register_cfg_hooks ();
1640169689Skan
1641169689Skan  init_block = construct_init_block ();
1642169689Skan
1643169689Skan  /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
1644169689Skan     remaining edges in expand_gimple_basic_block.  */
1645169689Skan  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1646169689Skan    e->flags &= ~EDGE_EXECUTABLE;
1647169689Skan
1648169689Skan  FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1649169689Skan    bb = expand_gimple_basic_block (bb);
1650169689Skan
1651169689Skan  construct_exit_block ();
1652169689Skan
1653169689Skan  /* We're done expanding trees to RTL.  */
1654169689Skan  currently_expanding_to_rtl = 0;
1655169689Skan
1656169689Skan  /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1657169689Skan     EH regions.  */
1658169689Skan  convert_from_eh_region_ranges ();
1659169689Skan
1660169689Skan  rebuild_jump_labels (get_insns ());
1661169689Skan  find_exception_handler_labels ();
1662169689Skan
1663169689Skan  blocks = sbitmap_alloc (last_basic_block);
1664169689Skan  sbitmap_ones (blocks);
1665169689Skan  find_many_sub_basic_blocks (blocks);
1666169689Skan  purge_all_dead_edges ();
1667169689Skan  sbitmap_free (blocks);
1668169689Skan
1669169689Skan  compact_blocks ();
1670169689Skan#ifdef ENABLE_CHECKING
1671169689Skan  verify_flow_info();
1672169689Skan#endif
1673169689Skan
1674169689Skan  /* There's no need to defer outputting this function any more; we
1675169689Skan     know we want to output it.  */
1676169689Skan  DECL_DEFER_OUTPUT (current_function_decl) = 0;
1677169689Skan
1678169689Skan  /* Now that we're done expanding trees to RTL, we shouldn't have any
1679169689Skan     more CONCATs anywhere.  */
1680169689Skan  generating_concat_p = 0;
1681169689Skan
1682169689Skan  finalize_block_changes ();
1683169689Skan
1684169689Skan  if (dump_file)
1685169689Skan    {
1686169689Skan      fprintf (dump_file,
1687169689Skan	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1688169689Skan      /* And the pass manager will dump RTL for us.  */
1689169689Skan    }
1690169689Skan
1691169689Skan  /* If we're emitting a nested function, make sure its parent gets
1692169689Skan     emitted as well.  Doing otherwise confuses debug info.  */
1693169689Skan  {
1694169689Skan    tree parent;
1695169689Skan    for (parent = DECL_CONTEXT (current_function_decl);
1696169689Skan	 parent != NULL_TREE;
1697169689Skan	 parent = get_containing_scope (parent))
1698169689Skan      if (TREE_CODE (parent) == FUNCTION_DECL)
1699169689Skan	TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1700169689Skan  }
1701169689Skan
1702169689Skan  /* We are now committed to emitting code for this function.  Do any
1703169689Skan     preparation, such as emitting abstract debug info for the inline
1704169689Skan     before it gets mangled by optimization.  */
1705169689Skan  if (cgraph_function_possibly_inlined_p (current_function_decl))
1706169689Skan    (*debug_hooks->outlining_inline_function) (current_function_decl);
1707169689Skan
1708169689Skan  TREE_ASM_WRITTEN (current_function_decl) = 1;
1709169689Skan
1710169689Skan  /* After expanding, the return labels are no longer needed. */
1711169689Skan  return_label = NULL;
1712169689Skan  naked_return_label = NULL;
1713169689Skan  return 0;
1714169689Skan}
1715169689Skan
1716169689Skanstruct tree_opt_pass pass_expand =
1717169689Skan{
1718169689Skan  "expand",				/* name */
1719169689Skan  NULL,                                 /* gate */
1720169689Skan  tree_expand_cfg,			/* execute */
1721169689Skan  NULL,                                 /* sub */
1722169689Skan  NULL,                                 /* next */
1723169689Skan  0,                                    /* static_pass_number */
1724169689Skan  TV_EXPAND,				/* tv_id */
1725169689Skan  /* ??? If TER is enabled, we actually receive GENERIC.  */
1726169689Skan  PROP_gimple_leh | PROP_cfg,           /* properties_required */
1727169689Skan  PROP_rtl,                             /* properties_provided */
1728169689Skan  PROP_trees,				/* properties_destroyed */
1729169689Skan  0,                                    /* todo_flags_start */
1730169689Skan  TODO_dump_func,                       /* todo_flags_finish */
1731169689Skan  'r'					/* letter */
1732169689Skan};
1733