1169689Skan/* A pass for lowering trees to RTL.
2169689Skan   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3169689Skan
4169689SkanThis file is part of GCC.
5169689Skan
6169689SkanGCC is free software; you can redistribute it and/or modify
7169689Skanit under the terms of the GNU General Public License as published by
8169689Skanthe Free Software Foundation; either version 2, or (at your option)
9169689Skanany later version.
10169689Skan
11169689SkanGCC is distributed in the hope that it will be useful,
12169689Skanbut WITHOUT ANY WARRANTY; without even the implied warranty of
13169689SkanMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14169689SkanGNU General Public License for more details.
15169689Skan
16169689SkanYou should have received a copy of the GNU General Public License
17169689Skanalong with GCC; see the file COPYING.  If not, write to
18169689Skanthe Free Software Foundation, 51 Franklin Street, Fifth Floor,
19169689SkanBoston, MA 02110-1301, USA.  */
20169689Skan
21169689Skan#include "config.h"
22169689Skan#include "system.h"
23169689Skan#include "coretypes.h"
24169689Skan#include "tm.h"
25169689Skan#include "tree.h"
26169689Skan#include "rtl.h"
27169689Skan#include "tm_p.h"
28169689Skan#include "basic-block.h"
29169689Skan#include "function.h"
30169689Skan#include "expr.h"
31169689Skan#include "langhooks.h"
32169689Skan#include "tree-flow.h"
33169689Skan#include "timevar.h"
34169689Skan#include "tree-dump.h"
35169689Skan#include "tree-pass.h"
36169689Skan#include "except.h"
37169689Skan#include "flags.h"
38169689Skan#include "diagnostic.h"
39169689Skan#include "toplev.h"
40169689Skan#include "debug.h"
41169689Skan#include "params.h"
42169689Skan
43169689Skan/* Verify that there is exactly single jump instruction since last and attach
44169689Skan   REG_BR_PROB note specifying probability.
45169689Skan   ??? We really ought to pass the probability down to RTL expanders and let it
46169689Skan   re-distribute it when the conditional expands into multiple conditionals.
47169689Skan   This is however difficult to do.  */
48169689Skanstatic void
49169689Skanadd_reg_br_prob_note (rtx last, int probability)
50169689Skan{
51169689Skan  if (profile_status == PROFILE_ABSENT)
52169689Skan    return;
53169689Skan  for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
54169689Skan    if (JUMP_P (last))
55169689Skan      {
56169689Skan	/* It is common to emit condjump-around-jump sequence when we don't know
57169689Skan	   how to reverse the conditional.  Special case this.  */
58169689Skan	if (!any_condjump_p (last)
59169689Skan	    || !JUMP_P (NEXT_INSN (last))
60169689Skan	    || !simplejump_p (NEXT_INSN (last))
61169689Skan	    || !NEXT_INSN (NEXT_INSN (last))
62169689Skan	    || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
63169689Skan	    || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
64169689Skan	    || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
65169689Skan	    || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
66169689Skan	  goto failed;
67169689Skan	gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
68169689Skan	REG_NOTES (last)
69169689Skan	  = gen_rtx_EXPR_LIST (REG_BR_PROB,
70169689Skan			       GEN_INT (REG_BR_PROB_BASE - probability),
71169689Skan			       REG_NOTES (last));
72169689Skan	return;
73169689Skan      }
74169689Skan  if (!last || !JUMP_P (last) || !any_condjump_p (last))
75169689Skan    goto failed;
76169689Skan  gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
77169689Skan  REG_NOTES (last)
78169689Skan    = gen_rtx_EXPR_LIST (REG_BR_PROB,
79169689Skan			 GEN_INT (probability), REG_NOTES (last));
80169689Skan  return;
81169689Skanfailed:
82169689Skan  if (dump_file)
83169689Skan    fprintf (dump_file, "Failed to add probability note\n");
84169689Skan}
85169689Skan
86169689Skan
87169689Skan#ifndef LOCAL_ALIGNMENT
88169689Skan#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
89169689Skan#endif
90169689Skan
91169689Skan#ifndef STACK_ALIGNMENT_NEEDED
92169689Skan#define STACK_ALIGNMENT_NEEDED 1
93169689Skan#endif
94169689Skan
95169689Skan
96169689Skan/* This structure holds data relevant to one variable that will be
97169689Skan   placed in a stack slot.  */
98169689Skanstruct stack_var
99169689Skan{
100169689Skan  /* The Variable.  */
101169689Skan  tree decl;
102169689Skan
103169689Skan  /* The offset of the variable.  During partitioning, this is the
104169689Skan     offset relative to the partition.  After partitioning, this
105169689Skan     is relative to the stack frame.  */
106169689Skan  HOST_WIDE_INT offset;
107169689Skan
108169689Skan  /* Initially, the size of the variable.  Later, the size of the partition,
109169689Skan     if this variable becomes it's partition's representative.  */
110169689Skan  HOST_WIDE_INT size;
111169689Skan
112169689Skan  /* The *byte* alignment required for this variable.  Or as, with the
113169689Skan     size, the alignment for this partition.  */
114169689Skan  unsigned int alignb;
115169689Skan
116169689Skan  /* The partition representative.  */
117169689Skan  size_t representative;
118169689Skan
119169689Skan  /* The next stack variable in the partition, or EOC.  */
120169689Skan  size_t next;
121169689Skan};
122169689Skan
123169689Skan#define EOC  ((size_t)-1)
124169689Skan
125169689Skan/* We have an array of such objects while deciding allocation.  */
126169689Skanstatic struct stack_var *stack_vars;
127169689Skanstatic size_t stack_vars_alloc;
128169689Skanstatic size_t stack_vars_num;
129169689Skan
130169689Skan/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
131169689Skan   is non-decreasing.  */
132169689Skanstatic size_t *stack_vars_sorted;
133169689Skan
134169689Skan/* We have an interference graph between such objects.  This graph
135169689Skan   is lower triangular.  */
136169689Skanstatic bool *stack_vars_conflict;
137169689Skanstatic size_t stack_vars_conflict_alloc;
138169689Skan
139169689Skan/* The phase of the stack frame.  This is the known misalignment of
140169689Skan   virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
141169689Skan   (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
142169689Skanstatic int frame_phase;
143169689Skan
144169689Skan/* Used during expand_used_vars to remember if we saw any decls for
145169689Skan   which we'd like to enable stack smashing protection.  */
146169689Skanstatic bool has_protected_decls;
147169689Skan
148169689Skan/* Used during expand_used_vars.  Remember if we say a character buffer
149169689Skan   smaller than our cutoff threshold.  Used for -Wstack-protector.  */
150169689Skanstatic bool has_short_buffer;
151169689Skan
152169689Skan/* Discover the byte alignment to use for DECL.  Ignore alignment
153169689Skan   we can't do with expected alignment of the stack boundary.  */
154169689Skan
155169689Skanstatic unsigned int
156169689Skanget_decl_align_unit (tree decl)
157169689Skan{
158169689Skan  unsigned int align;
159169689Skan
160169689Skan  align = DECL_ALIGN (decl);
161169689Skan  align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
162169689Skan  if (align > PREFERRED_STACK_BOUNDARY)
163169689Skan    align = PREFERRED_STACK_BOUNDARY;
164169689Skan  if (cfun->stack_alignment_needed < align)
165169689Skan    cfun->stack_alignment_needed = align;
166169689Skan
167169689Skan  return align / BITS_PER_UNIT;
168169689Skan}
169169689Skan
170169689Skan/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
171169689Skan   Return the frame offset.  */
172169689Skan
173169689Skanstatic HOST_WIDE_INT
174169689Skanalloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
175169689Skan{
176169689Skan  HOST_WIDE_INT offset, new_frame_offset;
177169689Skan
178169689Skan  new_frame_offset = frame_offset;
179169689Skan  if (FRAME_GROWS_DOWNWARD)
180169689Skan    {
181169689Skan      new_frame_offset -= size + frame_phase;
182169689Skan      new_frame_offset &= -align;
183169689Skan      new_frame_offset += frame_phase;
184169689Skan      offset = new_frame_offset;
185169689Skan    }
186169689Skan  else
187169689Skan    {
188169689Skan      new_frame_offset -= frame_phase;
189169689Skan      new_frame_offset += align - 1;
190169689Skan      new_frame_offset &= -align;
191169689Skan      new_frame_offset += frame_phase;
192169689Skan      offset = new_frame_offset;
193169689Skan      new_frame_offset += size;
194169689Skan    }
195169689Skan  frame_offset = new_frame_offset;
196169689Skan
197169689Skan  if (frame_offset_overflow (frame_offset, cfun->decl))
198169689Skan    frame_offset = offset = 0;
199169689Skan
200169689Skan  return offset;
201169689Skan}
202169689Skan
203169689Skan/* Accumulate DECL into STACK_VARS.  */
204169689Skan
205169689Skanstatic void
206169689Skanadd_stack_var (tree decl)
207169689Skan{
208169689Skan  if (stack_vars_num >= stack_vars_alloc)
209169689Skan    {
210169689Skan      if (stack_vars_alloc)
211169689Skan	stack_vars_alloc = stack_vars_alloc * 3 / 2;
212169689Skan      else
213169689Skan	stack_vars_alloc = 32;
214169689Skan      stack_vars
215169689Skan	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
216169689Skan    }
217169689Skan  stack_vars[stack_vars_num].decl = decl;
218169689Skan  stack_vars[stack_vars_num].offset = 0;
219169689Skan  stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
220169689Skan  stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
221169689Skan
222169689Skan  /* All variables are initially in their own partition.  */
223169689Skan  stack_vars[stack_vars_num].representative = stack_vars_num;
224169689Skan  stack_vars[stack_vars_num].next = EOC;
225169689Skan
226169689Skan  /* Ensure that this decl doesn't get put onto the list twice.  */
227169689Skan  SET_DECL_RTL (decl, pc_rtx);
228169689Skan
229169689Skan  stack_vars_num++;
230169689Skan}
231169689Skan
232169689Skan/* Compute the linear index of a lower-triangular coordinate (I, J).  */
233169689Skan
234169689Skanstatic size_t
235169689Skantriangular_index (size_t i, size_t j)
236169689Skan{
237169689Skan  if (i < j)
238169689Skan    {
239169689Skan      size_t t;
240169689Skan      t = i, i = j, j = t;
241169689Skan    }
242169689Skan  return (i * (i + 1)) / 2 + j;
243169689Skan}
244169689Skan
245169689Skan/* Ensure that STACK_VARS_CONFLICT is large enough for N objects.  */
246169689Skan
247169689Skanstatic void
248169689Skanresize_stack_vars_conflict (size_t n)
249169689Skan{
250169689Skan  size_t size = triangular_index (n-1, n-1) + 1;
251169689Skan
252169689Skan  if (size <= stack_vars_conflict_alloc)
253169689Skan    return;
254169689Skan
255169689Skan  stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
256169689Skan  memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
257169689Skan	  (size - stack_vars_conflict_alloc) * sizeof (bool));
258169689Skan  stack_vars_conflict_alloc = size;
259169689Skan}
260169689Skan
261169689Skan/* Make the decls associated with luid's X and Y conflict.  */
262169689Skan
263169689Skanstatic void
264169689Skanadd_stack_var_conflict (size_t x, size_t y)
265169689Skan{
266169689Skan  size_t index = triangular_index (x, y);
267169689Skan  gcc_assert (index < stack_vars_conflict_alloc);
268169689Skan  stack_vars_conflict[index] = true;
269169689Skan}
270169689Skan
271169689Skan/* Check whether the decls associated with luid's X and Y conflict.  */
272169689Skan
273169689Skanstatic bool
274169689Skanstack_var_conflict_p (size_t x, size_t y)
275169689Skan{
276169689Skan  size_t index = triangular_index (x, y);
277169689Skan  gcc_assert (index < stack_vars_conflict_alloc);
278169689Skan  return stack_vars_conflict[index];
279169689Skan}
280169689Skan
281169689Skan/* Returns true if TYPE is or contains a union type.  */
282169689Skan
283169689Skanstatic bool
284169689Skanaggregate_contains_union_type (tree type)
285169689Skan{
286169689Skan  tree field;
287169689Skan
288169689Skan  if (TREE_CODE (type) == UNION_TYPE
289169689Skan      || TREE_CODE (type) == QUAL_UNION_TYPE)
290169689Skan    return true;
291169689Skan  if (TREE_CODE (type) == ARRAY_TYPE)
292169689Skan    return aggregate_contains_union_type (TREE_TYPE (type));
293169689Skan  if (TREE_CODE (type) != RECORD_TYPE)
294169689Skan    return false;
295169689Skan
296169689Skan  for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
297169689Skan    if (TREE_CODE (field) == FIELD_DECL)
298169689Skan      if (aggregate_contains_union_type (TREE_TYPE (field)))
299169689Skan	return true;
300169689Skan
301169689Skan  return false;
302169689Skan}
303169689Skan
304169689Skan/* A subroutine of expand_used_vars.  If two variables X and Y have alias
305169689Skan   sets that do not conflict, then do add a conflict for these variables
306169689Skan   in the interference graph.  We also need to make sure to add conflicts
307169689Skan   for union containing structures.  Else RTL alias analysis comes along
308169689Skan   and due to type based aliasing rules decides that for two overlapping
309169689Skan   union temporaries { short s; int i; } accesses to the same mem through
310169689Skan   different types may not alias and happily reorders stores across
311169689Skan   life-time boundaries of the temporaries (See PR25654).
312169689Skan   We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P.  */
313169689Skan
314169689Skanstatic void
315169689Skanadd_alias_set_conflicts (void)
316169689Skan{
317169689Skan  size_t i, j, n = stack_vars_num;
318169689Skan
319169689Skan  for (i = 0; i < n; ++i)
320169689Skan    {
321169689Skan      tree type_i = TREE_TYPE (stack_vars[i].decl);
322169689Skan      bool aggr_i = AGGREGATE_TYPE_P (type_i);
323169689Skan      bool contains_union;
324169689Skan
325169689Skan      contains_union = aggregate_contains_union_type (type_i);
326169689Skan      for (j = 0; j < i; ++j)
327169689Skan	{
328169689Skan	  tree type_j = TREE_TYPE (stack_vars[j].decl);
329169689Skan	  bool aggr_j = AGGREGATE_TYPE_P (type_j);
330169689Skan	  if (aggr_i != aggr_j
331169689Skan	      /* Either the objects conflict by means of type based
332169689Skan		 aliasing rules, or we need to add a conflict.  */
333169689Skan	      || !objects_must_conflict_p (type_i, type_j)
334169689Skan	      /* In case the types do not conflict ensure that access
335169689Skan		 to elements will conflict.  In case of unions we have
336169689Skan		 to be careful as type based aliasing rules may say
337169689Skan		 access to the same memory does not conflict.  So play
338169689Skan		 safe and add a conflict in this case.  */
339169689Skan	      || contains_union)
340169689Skan	    add_stack_var_conflict (i, j);
341169689Skan	}
342169689Skan    }
343169689Skan}
344169689Skan
345169689Skan/* A subroutine of partition_stack_vars.  A comparison function for qsort,
346169689Skan   sorting an array of indicies by the size of the object.  */
347169689Skan
348169689Skanstatic int
349169689Skanstack_var_size_cmp (const void *a, const void *b)
350169689Skan{
351169689Skan  HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
352169689Skan  HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
353169689Skan  unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
354169689Skan  unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
355169689Skan
356169689Skan  if (sa < sb)
357169689Skan    return -1;
358169689Skan  if (sa > sb)
359169689Skan    return 1;
360169689Skan  /* For stack variables of the same size use the uid of the decl
361169689Skan     to make the sort stable.  */
362169689Skan  if (uida < uidb)
363169689Skan    return -1;
364169689Skan  if (uida > uidb)
365169689Skan    return 1;
366169689Skan  return 0;
367169689Skan}
368169689Skan
369169689Skan/* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
370169689Skan   partitioning algorithm.  Partitions A and B are known to be non-conflicting.
371169689Skan   Merge them into a single partition A.
372169689Skan
373169689Skan   At the same time, add OFFSET to all variables in partition B.  At the end
374169689Skan   of the partitioning process we've have a nice block easy to lay out within
375169689Skan   the stack frame.  */
376169689Skan
377169689Skanstatic void
378169689Skanunion_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
379169689Skan{
380169689Skan  size_t i, last;
381169689Skan
382169689Skan  /* Update each element of partition B with the given offset,
383169689Skan     and merge them into partition A.  */
384169689Skan  for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
385169689Skan    {
386169689Skan      stack_vars[i].offset += offset;
387169689Skan      stack_vars[i].representative = a;
388169689Skan    }
389169689Skan  stack_vars[last].next = stack_vars[a].next;
390169689Skan  stack_vars[a].next = b;
391169689Skan
392169689Skan  /* Update the required alignment of partition A to account for B.  */
393169689Skan  if (stack_vars[a].alignb < stack_vars[b].alignb)
394169689Skan    stack_vars[a].alignb = stack_vars[b].alignb;
395169689Skan
396169689Skan  /* Update the interference graph and merge the conflicts.  */
397169689Skan  for (last = stack_vars_num, i = 0; i < last; ++i)
398169689Skan    if (stack_var_conflict_p (b, i))
399169689Skan      add_stack_var_conflict (a, i);
400169689Skan}
401169689Skan
402169689Skan/* A subroutine of expand_used_vars.  Binpack the variables into
403169689Skan   partitions constrained by the interference graph.  The overall
404169689Skan   algorithm used is as follows:
405169689Skan
406169689Skan	Sort the objects by size.
407169689Skan	For each object A {
408169689Skan	  S = size(A)
409169689Skan	  O = 0
410169689Skan	  loop {
411169689Skan	    Look for the largest non-conflicting object B with size <= S.
412169689Skan	    UNION (A, B)
413169689Skan	    offset(B) = O
414169689Skan	    O += size(B)
415169689Skan	    S -= size(B)
416169689Skan	  }
417169689Skan	}
418169689Skan*/
419169689Skan
420169689Skanstatic void
421169689Skanpartition_stack_vars (void)
422169689Skan{
423169689Skan  size_t si, sj, n = stack_vars_num;
424169689Skan
425169689Skan  stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
426169689Skan  for (si = 0; si < n; ++si)
427169689Skan    stack_vars_sorted[si] = si;
428169689Skan
429169689Skan  if (n == 1)
430169689Skan    return;
431169689Skan
432169689Skan  qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
433169689Skan
434169689Skan  /* Special case: detect when all variables conflict, and thus we can't
435169689Skan     do anything during the partitioning loop.  It isn't uncommon (with
436169689Skan     C code at least) to declare all variables at the top of the function,
437169689Skan     and if we're not inlining, then all variables will be in the same scope.
438169689Skan     Take advantage of very fast libc routines for this scan.  */
439169689Skan  gcc_assert (sizeof(bool) == sizeof(char));
440169689Skan  if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
441169689Skan    return;
442169689Skan
443169689Skan  for (si = 0; si < n; ++si)
444169689Skan    {
445169689Skan      size_t i = stack_vars_sorted[si];
446169689Skan      HOST_WIDE_INT isize = stack_vars[i].size;
447169689Skan      HOST_WIDE_INT offset = 0;
448169689Skan
449169689Skan      for (sj = si; sj-- > 0; )
450169689Skan	{
451169689Skan	  size_t j = stack_vars_sorted[sj];
452169689Skan	  HOST_WIDE_INT jsize = stack_vars[j].size;
453169689Skan	  unsigned int jalign = stack_vars[j].alignb;
454169689Skan
455169689Skan	  /* Ignore objects that aren't partition representatives.  */
456169689Skan	  if (stack_vars[j].representative != j)
457169689Skan	    continue;
458169689Skan
459169689Skan	  /* Ignore objects too large for the remaining space.  */
460169689Skan	  if (isize < jsize)
461169689Skan	    continue;
462169689Skan
463169689Skan	  /* Ignore conflicting objects.  */
464169689Skan	  if (stack_var_conflict_p (i, j))
465169689Skan	    continue;
466169689Skan
467169689Skan	  /* Refine the remaining space check to include alignment.  */
468169689Skan	  if (offset & (jalign - 1))
469169689Skan	    {
470169689Skan	      HOST_WIDE_INT toff = offset;
471169689Skan	      toff += jalign - 1;
472169689Skan	      toff &= -(HOST_WIDE_INT)jalign;
473169689Skan	      if (isize - (toff - offset) < jsize)
474169689Skan		continue;
475169689Skan
476169689Skan	      isize -= toff - offset;
477169689Skan	      offset = toff;
478169689Skan	    }
479169689Skan
480169689Skan	  /* UNION the objects, placing J at OFFSET.  */
481169689Skan	  union_stack_vars (i, j, offset);
482169689Skan
483169689Skan	  isize -= jsize;
484169689Skan	  if (isize == 0)
485169689Skan	    break;
486169689Skan	}
487169689Skan    }
488169689Skan}
489169689Skan
490169689Skan/* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
491169689Skan
492169689Skanstatic void
493169689Skandump_stack_var_partition (void)
494169689Skan{
495169689Skan  size_t si, i, j, n = stack_vars_num;
496169689Skan
497169689Skan  for (si = 0; si < n; ++si)
498169689Skan    {
499169689Skan      i = stack_vars_sorted[si];
500169689Skan
501169689Skan      /* Skip variables that aren't partition representatives, for now.  */
502169689Skan      if (stack_vars[i].representative != i)
503169689Skan	continue;
504169689Skan
505169689Skan      fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
506169689Skan	       " align %u\n", (unsigned long) i, stack_vars[i].size,
507169689Skan	       stack_vars[i].alignb);
508169689Skan
509169689Skan      for (j = i; j != EOC; j = stack_vars[j].next)
510169689Skan	{
511169689Skan	  fputc ('\t', dump_file);
512169689Skan	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
513169689Skan	  fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
514169689Skan		   stack_vars[i].offset);
515169689Skan	}
516169689Skan    }
517169689Skan}
518169689Skan
519169689Skan/* Assign rtl to DECL at frame offset OFFSET.  */
520169689Skan
521169689Skanstatic void
522169689Skanexpand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
523169689Skan{
524169689Skan  HOST_WIDE_INT align;
525169689Skan  rtx x;
526169689Skan
527169689Skan  /* If this fails, we've overflowed the stack frame.  Error nicely?  */
528169689Skan  gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
529169689Skan
530169689Skan  x = plus_constant (virtual_stack_vars_rtx, offset);
531169689Skan  x = gen_rtx_MEM (DECL_MODE (decl), x);
532169689Skan
533169689Skan  /* Set alignment we actually gave this decl.  */
534169689Skan  offset -= frame_phase;
535169689Skan  align = offset & -offset;
536169689Skan  align *= BITS_PER_UNIT;
537169689Skan  if (align > STACK_BOUNDARY || align == 0)
538169689Skan    align = STACK_BOUNDARY;
539169689Skan  DECL_ALIGN (decl) = align;
540169689Skan  DECL_USER_ALIGN (decl) = 0;
541169689Skan
542169689Skan  set_mem_attributes (x, decl, true);
543169689Skan  SET_DECL_RTL (decl, x);
544169689Skan}
545169689Skan
546169689Skan/* A subroutine of expand_used_vars.  Give each partition representative
547169689Skan   a unique location within the stack frame.  Update each partition member
548169689Skan   with that location.  */
549169689Skan
550169689Skanstatic void
551169689Skanexpand_stack_vars (bool (*pred) (tree))
552169689Skan{
553169689Skan  size_t si, i, j, n = stack_vars_num;
554169689Skan
555169689Skan  for (si = 0; si < n; ++si)
556169689Skan    {
557169689Skan      HOST_WIDE_INT offset;
558169689Skan
559169689Skan      i = stack_vars_sorted[si];
560169689Skan
561169689Skan      /* Skip variables that aren't partition representatives, for now.  */
562169689Skan      if (stack_vars[i].representative != i)
563169689Skan	continue;
564169689Skan
565169689Skan      /* Skip variables that have already had rtl assigned.  See also
566169689Skan	 add_stack_var where we perpetrate this pc_rtx hack.  */
567169689Skan      if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
568169689Skan	continue;
569169689Skan
570169689Skan      /* Check the predicate to see whether this variable should be
571169689Skan	 allocated in this pass.  */
572169689Skan      if (pred && !pred (stack_vars[i].decl))
573169689Skan	continue;
574169689Skan
575169689Skan      offset = alloc_stack_frame_space (stack_vars[i].size,
576169689Skan					stack_vars[i].alignb);
577169689Skan
578169689Skan      /* Create rtl for each variable based on their location within the
579169689Skan	 partition.  */
580169689Skan      for (j = i; j != EOC; j = stack_vars[j].next)
581169689Skan	expand_one_stack_var_at (stack_vars[j].decl,
582169689Skan				 stack_vars[j].offset + offset);
583169689Skan    }
584169689Skan}
585169689Skan
586169689Skan/* A subroutine of expand_one_var.  Called to immediately assign rtl
587169689Skan   to a variable to be allocated in the stack frame.  */
588169689Skan
589169689Skanstatic void
590169689Skanexpand_one_stack_var (tree var)
591169689Skan{
592169689Skan  HOST_WIDE_INT size, offset, align;
593169689Skan
594169689Skan  size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
595169689Skan  align = get_decl_align_unit (var);
596169689Skan  offset = alloc_stack_frame_space (size, align);
597169689Skan
598169689Skan  expand_one_stack_var_at (var, offset);
599169689Skan}
600169689Skan
601169689Skan/* A subroutine of expand_one_var.  Called to assign rtl
602169689Skan   to a TREE_STATIC VAR_DECL.  */
603169689Skan
604169689Skanstatic void
605169689Skanexpand_one_static_var (tree var)
606169689Skan{
607169689Skan  /* In unit-at-a-time all the static variables are expanded at the end
608169689Skan     of compilation process.  */
609169689Skan  if (flag_unit_at_a_time)
610169689Skan    return;
611169689Skan  /* If this is an inlined copy of a static local variable,
612169689Skan     look up the original.  */
613169689Skan  var = DECL_ORIGIN (var);
614169689Skan
615169689Skan  /* If we've already processed this variable because of that, do nothing.  */
616169689Skan  if (TREE_ASM_WRITTEN (var))
617169689Skan    return;
618169689Skan
619169689Skan  /* Give the front end a chance to do whatever.  In practice, this is
620169689Skan     resolving duplicate names for IMA in C.  */
621169689Skan  if (lang_hooks.expand_decl (var))
622169689Skan    return;
623169689Skan
624169689Skan  /* Otherwise, just emit the variable.  */
625169689Skan  rest_of_decl_compilation (var, 0, 0);
626169689Skan}
627169689Skan
628169689Skan/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
629169689Skan   that will reside in a hard register.  */
630169689Skan
631169689Skanstatic void
632169689Skanexpand_one_hard_reg_var (tree var)
633169689Skan{
634169689Skan  rest_of_decl_compilation (var, 0, 0);
635169689Skan}
636169689Skan
637169689Skan/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
638169689Skan   that will reside in a pseudo register.  */
639169689Skan
640169689Skanstatic void
641169689Skanexpand_one_register_var (tree var)
642169689Skan{
643169689Skan  tree type = TREE_TYPE (var);
644169689Skan  int unsignedp = TYPE_UNSIGNED (type);
645169689Skan  enum machine_mode reg_mode
646169689Skan    = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
647169689Skan  rtx x = gen_reg_rtx (reg_mode);
648169689Skan
649169689Skan  SET_DECL_RTL (var, x);
650169689Skan
651169689Skan  /* Note if the object is a user variable.  */
652169689Skan  if (!DECL_ARTIFICIAL (var))
653169689Skan    {
654169689Skan      mark_user_reg (x);
655169689Skan
656169689Skan      /* Trust user variables which have a pointer type to really
657169689Skan	 be pointers.  Do not trust compiler generated temporaries
658169689Skan	 as our type system is totally busted as it relates to
659169689Skan	 pointer arithmetic which translates into lots of compiler
660169689Skan	 generated objects with pointer types, but which are not really
661169689Skan	 pointers.  */
662169689Skan      if (POINTER_TYPE_P (type))
663169689Skan	mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
664169689Skan    }
665169689Skan}
666169689Skan
667169689Skan/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
668169689Skan   has some associated error, e.g. its type is error-mark.  We just need
669169689Skan   to pick something that won't crash the rest of the compiler.  */
670169689Skan
671169689Skanstatic void
672169689Skanexpand_one_error_var (tree var)
673169689Skan{
674169689Skan  enum machine_mode mode = DECL_MODE (var);
675169689Skan  rtx x;
676169689Skan
677169689Skan  if (mode == BLKmode)
678169689Skan    x = gen_rtx_MEM (BLKmode, const0_rtx);
679169689Skan  else if (mode == VOIDmode)
680169689Skan    x = const0_rtx;
681169689Skan  else
682169689Skan    x = gen_reg_rtx (mode);
683169689Skan
684169689Skan  SET_DECL_RTL (var, x);
685169689Skan}
686169689Skan
687169689Skan/* A subroutine of expand_one_var.  VAR is a variable that will be
688169689Skan   allocated to the local stack frame.  Return true if we wish to
689169689Skan   add VAR to STACK_VARS so that it will be coalesced with other
690169689Skan   variables.  Return false to allocate VAR immediately.
691169689Skan
692169689Skan   This function is used to reduce the number of variables considered
693169689Skan   for coalescing, which reduces the size of the quadratic problem.  */
694169689Skan
695169689Skanstatic bool
696169689Skandefer_stack_allocation (tree var, bool toplevel)
697169689Skan{
698169689Skan  /* If stack protection is enabled, *all* stack variables must be deferred,
699169689Skan     so that we can re-order the strings to the top of the frame.  */
700169689Skan  if (flag_stack_protect)
701169689Skan    return true;
702169689Skan
703169689Skan  /* Variables in the outermost scope automatically conflict with
704169689Skan     every other variable.  The only reason to want to defer them
705169689Skan     at all is that, after sorting, we can more efficiently pack
706169689Skan     small variables in the stack frame.  Continue to defer at -O2.  */
707169689Skan  if (toplevel && optimize < 2)
708169689Skan    return false;
709169689Skan
710169689Skan  /* Without optimization, *most* variables are allocated from the
711169689Skan     stack, which makes the quadratic problem large exactly when we
712169689Skan     want compilation to proceed as quickly as possible.  On the
713169689Skan     other hand, we don't want the function's stack frame size to
714169689Skan     get completely out of hand.  So we avoid adding scalars and
715169689Skan     "small" aggregates to the list at all.  */
716169689Skan  if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
717169689Skan    return false;
718169689Skan
719169689Skan  return true;
720169689Skan}
721169689Skan
722169689Skan/* A subroutine of expand_used_vars.  Expand one variable according to
723169689Skan   its flavor.  Variables to be placed on the stack are not actually
724169689Skan   expanded yet, merely recorded.  */
725169689Skan
726169689Skanstatic void
727169689Skanexpand_one_var (tree var, bool toplevel)
728169689Skan{
729169689Skan  if (TREE_CODE (var) != VAR_DECL)
730169689Skan    lang_hooks.expand_decl (var);
731169689Skan  else if (DECL_EXTERNAL (var))
732169689Skan    ;
733169689Skan  else if (DECL_HAS_VALUE_EXPR_P (var))
734169689Skan    ;
735169689Skan  else if (TREE_STATIC (var))
736169689Skan    expand_one_static_var (var);
737169689Skan  else if (DECL_RTL_SET_P (var))
738169689Skan    ;
739169689Skan  else if (TREE_TYPE (var) == error_mark_node)
740169689Skan    expand_one_error_var (var);
741169689Skan  else if (DECL_HARD_REGISTER (var))
742169689Skan    expand_one_hard_reg_var (var);
743169689Skan  else if (use_register_for_decl (var))
744169689Skan    expand_one_register_var (var);
745169689Skan  else if (defer_stack_allocation (var, toplevel))
746169689Skan    add_stack_var (var);
747169689Skan  else
748169689Skan    expand_one_stack_var (var);
749169689Skan}
750169689Skan
751169689Skan/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
752169689Skan   expanding variables.  Those variables that can be put into registers
753169689Skan   are allocated pseudos; those that can't are put on the stack.
754169689Skan
755169689Skan   TOPLEVEL is true if this is the outermost BLOCK.  */
756169689Skan
757169689Skanstatic void
758169689Skanexpand_used_vars_for_block (tree block, bool toplevel)
759169689Skan{
760169689Skan  size_t i, j, old_sv_num, this_sv_num, new_sv_num;
761169689Skan  tree t;
762169689Skan
763169689Skan  old_sv_num = toplevel ? 0 : stack_vars_num;
764169689Skan
765169689Skan  /* Expand all variables at this level.  */
766169689Skan  for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
767169689Skan    if (TREE_USED (t)
768169689Skan	/* Force local static variables to be output when marked by
769169689Skan	   used attribute.  For unit-at-a-time, cgraph code already takes
770169689Skan	   care of this.  */
771169689Skan	|| (!flag_unit_at_a_time && TREE_STATIC (t)
772169689Skan	    && DECL_PRESERVE_P (t)))
773169689Skan      expand_one_var (t, toplevel);
774169689Skan
775169689Skan  this_sv_num = stack_vars_num;
776169689Skan
777169689Skan  /* Expand all variables at containing levels.  */
778169689Skan  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
779169689Skan    expand_used_vars_for_block (t, false);
780169689Skan
781169689Skan  /* Since we do not track exact variable lifetimes (which is not even
782169689Skan     possible for variables whose address escapes), we mirror the block
783169689Skan     tree in the interference graph.  Here we cause all variables at this
784169689Skan     level, and all sublevels, to conflict.  Do make certain that a
785169689Skan     variable conflicts with itself.  */
786169689Skan  if (old_sv_num < this_sv_num)
787169689Skan    {
788169689Skan      new_sv_num = stack_vars_num;
789169689Skan      resize_stack_vars_conflict (new_sv_num);
790169689Skan
791169689Skan      for (i = old_sv_num; i < new_sv_num; ++i)
792169689Skan	for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
793169689Skan	  add_stack_var_conflict (i, j);
794169689Skan    }
795169689Skan}
796169689Skan
797169689Skan/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
798169689Skan   and clear TREE_USED on all local variables.  */
799169689Skan
800169689Skanstatic void
801169689Skanclear_tree_used (tree block)
802169689Skan{
803169689Skan  tree t;
804169689Skan
805169689Skan  for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
806169689Skan    /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
807169689Skan      TREE_USED (t) = 0;
808169689Skan
809169689Skan  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
810169689Skan    clear_tree_used (t);
811169689Skan}
812169689Skan
813286713Spfgenum {
814286713Spfg  SPCT_FLAG_DEFAULT = 1,
815286713Spfg  SPCT_FLAG_ALL = 2,
816286713Spfg  SPCT_FLAG_STRONG = 3
817286713Spfg};
818286713Spfg
819169689Skan/* Examine TYPE and determine a bit mask of the following features.  */
820169689Skan
821169689Skan#define SPCT_HAS_LARGE_CHAR_ARRAY	1
822169689Skan#define SPCT_HAS_SMALL_CHAR_ARRAY	2
823169689Skan#define SPCT_HAS_ARRAY			4
824169689Skan#define SPCT_HAS_AGGREGATE		8
825169689Skan
826169689Skanstatic unsigned int
827169689Skanstack_protect_classify_type (tree type)
828169689Skan{
829169689Skan  unsigned int ret = 0;
830169689Skan  tree t;
831169689Skan
832169689Skan  switch (TREE_CODE (type))
833169689Skan    {
834169689Skan    case ARRAY_TYPE:
835169689Skan      t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
836169689Skan      if (t == char_type_node
837169689Skan	  || t == signed_char_type_node
838169689Skan	  || t == unsigned_char_type_node)
839169689Skan	{
840169689Skan	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
841169689Skan	  unsigned HOST_WIDE_INT len;
842169689Skan
843169689Skan	  if (!TYPE_SIZE_UNIT (type)
844169689Skan	      || !host_integerp (TYPE_SIZE_UNIT (type), 1))
845169689Skan	    len = max;
846169689Skan	  else
847169689Skan	    len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
848169689Skan
849169689Skan	  if (len < max)
850169689Skan	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
851169689Skan	  else
852169689Skan	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
853169689Skan	}
854169689Skan      else
855169689Skan	ret = SPCT_HAS_ARRAY;
856169689Skan      break;
857169689Skan
858169689Skan    case UNION_TYPE:
859169689Skan    case QUAL_UNION_TYPE:
860169689Skan    case RECORD_TYPE:
861169689Skan      ret = SPCT_HAS_AGGREGATE;
862169689Skan      for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
863169689Skan	if (TREE_CODE (t) == FIELD_DECL)
864169689Skan	  ret |= stack_protect_classify_type (TREE_TYPE (t));
865169689Skan      break;
866169689Skan
867169689Skan    default:
868169689Skan      break;
869169689Skan    }
870169689Skan
871169689Skan  return ret;
872169689Skan}
873169689Skan
874169689Skan/* Return nonzero if DECL should be segregated into the "vulnerable" upper
875169689Skan   part of the local stack frame.  Remember if we ever return nonzero for
876169689Skan   any variable in this function.  The return value is the phase number in
877169689Skan   which the variable should be allocated.  */
878169689Skan
879169689Skanstatic int
880169689Skanstack_protect_decl_phase (tree decl)
881169689Skan{
882169689Skan  unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
883169689Skan  int ret = 0;
884169689Skan
885169689Skan  if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
886169689Skan    has_short_buffer = true;
887169689Skan
888286713Spfg  if (flag_stack_protect == SPCT_FLAG_ALL
889286713Spfg      || flag_stack_protect == SPCT_FLAG_STRONG)
890169689Skan    {
891169689Skan      if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
892169689Skan	  && !(bits & SPCT_HAS_AGGREGATE))
893169689Skan	ret = 1;
894169689Skan      else if (bits & SPCT_HAS_ARRAY)
895169689Skan	ret = 2;
896169689Skan    }
897169689Skan  else
898169689Skan    ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
899169689Skan
900169689Skan  if (ret)
901169689Skan    has_protected_decls = true;
902169689Skan
903169689Skan  return ret;
904169689Skan}
905169689Skan
906169689Skan/* Two helper routines that check for phase 1 and phase 2.  These are used
907169689Skan   as callbacks for expand_stack_vars.  */
908169689Skan
909169689Skanstatic bool
910169689Skanstack_protect_decl_phase_1 (tree decl)
911169689Skan{
912169689Skan  return stack_protect_decl_phase (decl) == 1;
913169689Skan}
914169689Skan
915169689Skanstatic bool
916169689Skanstack_protect_decl_phase_2 (tree decl)
917169689Skan{
918169689Skan  return stack_protect_decl_phase (decl) == 2;
919169689Skan}
920169689Skan
921169689Skan/* Ensure that variables in different stack protection phases conflict
922169689Skan   so that they are not merged and share the same stack slot.  */
923169689Skan
924169689Skanstatic void
925169689Skanadd_stack_protection_conflicts (void)
926169689Skan{
927169689Skan  size_t i, j, n = stack_vars_num;
928169689Skan  unsigned char *phase;
929169689Skan
930169689Skan  phase = XNEWVEC (unsigned char, n);
931169689Skan  for (i = 0; i < n; ++i)
932169689Skan    phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
933169689Skan
934169689Skan  for (i = 0; i < n; ++i)
935169689Skan    {
936169689Skan      unsigned char ph_i = phase[i];
937169689Skan      for (j = 0; j < i; ++j)
938169689Skan	if (ph_i != phase[j])
939169689Skan	  add_stack_var_conflict (i, j);
940169689Skan    }
941169689Skan
942169689Skan  XDELETEVEC (phase);
943169689Skan}
944169689Skan
945169689Skan/* Create a decl for the guard at the top of the stack frame.  */
946169689Skan
947169689Skanstatic void
948169689Skancreate_stack_guard (void)
949169689Skan{
950169689Skan  tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
951169689Skan  TREE_THIS_VOLATILE (guard) = 1;
952169689Skan  TREE_USED (guard) = 1;
953169689Skan  expand_one_stack_var (guard);
954169689Skan  cfun->stack_protect_guard = guard;
955169689Skan}
956169689Skan
957286713Spfg/* Helper routine to check if a record or union contains an array field. */
958286713Spfg
959286713Spfgstatic int
960286713Spfgrecord_or_union_type_has_array_p (tree tree_type)
961286713Spfg{
962286713Spfg  tree fields = TYPE_FIELDS (tree_type);
963286713Spfg  tree f;
964286713Spfg
965286713Spfg  for (f = fields; f; f = TREE_CHAIN (f))
966286713Spfg    if (TREE_CODE (f) == FIELD_DECL)
967286713Spfg      {
968286713Spfg	tree field_type = TREE_TYPE (f);
969286713Spfg	if ((TREE_CODE (field_type) == RECORD_TYPE
970286713Spfg	     || TREE_CODE (field_type) == UNION_TYPE
971286713Spfg	     || TREE_CODE (field_type) == QUAL_UNION_TYPE)
972286713Spfg	    && record_or_union_type_has_array_p (field_type))
973286713Spfg	  return 1;
974286713Spfg	if (TREE_CODE (field_type) == ARRAY_TYPE)
975286713Spfg	  return 1;
976286713Spfg      }
977286713Spfg  return 0;
978286713Spfg}
979286713Spfg
980169689Skan/* Expand all variables used in the function.  */
981169689Skan
982169689Skanstatic void
983169689Skanexpand_used_vars (void)
984169689Skan{
985169689Skan  tree t, outer_block = DECL_INITIAL (current_function_decl);
986286713Spfg  bool gen_stack_protect_signal = false;
987169689Skan
988169689Skan  /* Compute the phase of the stack frame for this function.  */
989169689Skan  {
990169689Skan    int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
991169689Skan    int off = STARTING_FRAME_OFFSET % align;
992169689Skan    frame_phase = off ? align - off : 0;
993169689Skan  }
994169689Skan
995169689Skan  /* Set TREE_USED on all variables in the unexpanded_var_list.  */
996169689Skan  for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
997169689Skan    TREE_USED (TREE_VALUE (t)) = 1;
998169689Skan
999169689Skan  /* Clear TREE_USED on all variables associated with a block scope.  */
1000169689Skan  clear_tree_used (outer_block);
1001169689Skan
1002169689Skan  /* Initialize local stack smashing state.  */
1003169689Skan  has_protected_decls = false;
1004169689Skan  has_short_buffer = false;
1005169689Skan
1006286713Spfg  if (flag_stack_protect == SPCT_FLAG_STRONG)
1007286713Spfg    for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1008286713Spfg      {
1009286713Spfg	tree var = TREE_VALUE (t);
1010286713Spfg	if (!is_global_var (var))
1011286713Spfg	  {
1012286713Spfg	    tree var_type = TREE_TYPE (var);
1013286713Spfg	    /* Examine local referenced variables that have their addresses
1014286713Spfg	     * taken, contain an array, or are arrays. */
1015286713Spfg	    if (TREE_CODE (var) == VAR_DECL
1016286713Spfg		&& (TREE_CODE (var_type) == ARRAY_TYPE
1017286713Spfg		    || TREE_ADDRESSABLE (var)
1018286713Spfg		    || ((TREE_CODE (var_type) == RECORD_TYPE
1019286713Spfg			 || TREE_CODE (var_type) == UNION_TYPE
1020286713Spfg			 || TREE_CODE (var_type) == QUAL_UNION_TYPE)
1021286713Spfg			&& record_or_union_type_has_array_p (var_type))))
1022286713Spfg	      {
1023286713Spfg		gen_stack_protect_signal = true;
1024286713Spfg		break;
1025286713Spfg	      }
1026286713Spfg	  }
1027286713Spfg      }
1028286713Spfg
1029169689Skan  /* At this point all variables on the unexpanded_var_list with TREE_USED
1030169689Skan     set are not associated with any block scope.  Lay them out.  */
1031169689Skan  for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1032169689Skan    {
1033169689Skan      tree var = TREE_VALUE (t);
1034169689Skan      bool expand_now = false;
1035169689Skan
1036169689Skan      /* We didn't set a block for static or extern because it's hard
1037169689Skan	 to tell the difference between a global variable (re)declared
1038169689Skan	 in a local scope, and one that's really declared there to
1039169689Skan	 begin with.  And it doesn't really matter much, since we're
1040169689Skan	 not giving them stack space.  Expand them now.  */
1041169689Skan      if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1042169689Skan	expand_now = true;
1043169689Skan
1044169689Skan      /* Any variable that could have been hoisted into an SSA_NAME
1045169689Skan	 will have been propagated anywhere the optimizers chose,
1046169689Skan	 i.e. not confined to their original block.  Allocate them
1047169689Skan	 as if they were defined in the outermost scope.  */
1048169689Skan      else if (is_gimple_reg (var))
1049169689Skan	expand_now = true;
1050169689Skan
1051169689Skan      /* If the variable is not associated with any block, then it
1052169689Skan	 was created by the optimizers, and could be live anywhere
1053169689Skan	 in the function.  */
1054169689Skan      else if (TREE_USED (var))
1055169689Skan	expand_now = true;
1056169689Skan
1057169689Skan      /* Finally, mark all variables on the list as used.  We'll use
1058169689Skan	 this in a moment when we expand those associated with scopes.  */
1059169689Skan      TREE_USED (var) = 1;
1060169689Skan
1061169689Skan      if (expand_now)
1062169689Skan	expand_one_var (var, true);
1063169689Skan    }
1064169689Skan  cfun->unexpanded_var_list = NULL_TREE;
1065169689Skan
1066169689Skan  /* At this point, all variables within the block tree with TREE_USED
1067169689Skan     set are actually used by the optimized function.  Lay them out.  */
1068169689Skan  expand_used_vars_for_block (outer_block, true);
1069169689Skan
1070169689Skan  if (stack_vars_num > 0)
1071169689Skan    {
1072169689Skan      /* Due to the way alias sets work, no variables with non-conflicting
1073169689Skan	 alias sets may be assigned the same address.  Add conflicts to
1074169689Skan	 reflect this.  */
1075169689Skan      add_alias_set_conflicts ();
1076169689Skan
1077169689Skan      /* If stack protection is enabled, we don't share space between
1078169689Skan	 vulnerable data and non-vulnerable data.  */
1079169689Skan      if (flag_stack_protect)
1080169689Skan	add_stack_protection_conflicts ();
1081169689Skan
1082169689Skan      /* Now that we have collected all stack variables, and have computed a
1083169689Skan	 minimal interference graph, attempt to save some stack space.  */
1084169689Skan      partition_stack_vars ();
1085169689Skan      if (dump_file)
1086169689Skan	dump_stack_var_partition ();
1087169689Skan    }
1088169689Skan
1089286713Spfg  switch (flag_stack_protect)
1090286713Spfg    {
1091286713Spfg    case SPCT_FLAG_ALL:
1092286713Spfg      create_stack_guard ();
1093286713Spfg      break;
1094169689Skan
1095286713Spfg    case SPCT_FLAG_STRONG:
1096286713Spfg      if (gen_stack_protect_signal
1097286713Spfg	  || current_function_calls_alloca || has_protected_decls)
1098286713Spfg	create_stack_guard ();
1099286713Spfg      break;
1100286713Spfg
1101286713Spfg    case SPCT_FLAG_DEFAULT:
1102286713Spfg      if (current_function_calls_alloca || has_protected_decls)
1103286713Spfg	create_stack_guard();
1104286713Spfg      break;
1105286713Spfg
1106286713Spfg    default:
1107286713Spfg      ;
1108286713Spfg    }
1109286713Spfg
1110169689Skan  /* Assign rtl to each variable based on these partitions.  */
1111169689Skan  if (stack_vars_num > 0)
1112169689Skan    {
1113169689Skan      /* Reorder decls to be protected by iterating over the variables
1114169689Skan	 array multiple times, and allocating out of each phase in turn.  */
1115169689Skan      /* ??? We could probably integrate this into the qsort we did
1116169689Skan	 earlier, such that we naturally see these variables first,
1117169689Skan	 and thus naturally allocate things in the right order.  */
1118169689Skan      if (has_protected_decls)
1119169689Skan	{
1120169689Skan	  /* Phase 1 contains only character arrays.  */
1121169689Skan	  expand_stack_vars (stack_protect_decl_phase_1);
1122169689Skan
1123169689Skan	  /* Phase 2 contains other kinds of arrays.  */
1124169689Skan	  if (flag_stack_protect == 2)
1125169689Skan	    expand_stack_vars (stack_protect_decl_phase_2);
1126169689Skan	}
1127169689Skan
1128169689Skan      expand_stack_vars (NULL);
1129169689Skan
1130169689Skan      /* Free up stack variable graph data.  */
1131169689Skan      XDELETEVEC (stack_vars);
1132169689Skan      XDELETEVEC (stack_vars_sorted);
1133169689Skan      XDELETEVEC (stack_vars_conflict);
1134169689Skan      stack_vars = NULL;
1135169689Skan      stack_vars_alloc = stack_vars_num = 0;
1136169689Skan      stack_vars_conflict = NULL;
1137169689Skan      stack_vars_conflict_alloc = 0;
1138169689Skan    }
1139169689Skan
1140169689Skan  /* If the target requires that FRAME_OFFSET be aligned, do it.  */
1141169689Skan  if (STACK_ALIGNMENT_NEEDED)
1142169689Skan    {
1143169689Skan      HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1144169689Skan      if (!FRAME_GROWS_DOWNWARD)
1145169689Skan	frame_offset += align - 1;
1146169689Skan      frame_offset &= -align;
1147169689Skan    }
1148169689Skan}
1149169689Skan
1150169689Skan
1151169689Skan/* If we need to produce a detailed dump, print the tree representation
1152169689Skan   for STMT to the dump file.  SINCE is the last RTX after which the RTL
1153169689Skan   generated for STMT should have been appended.  */
1154169689Skan
1155169689Skanstatic void
1156169689Skanmaybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1157169689Skan{
1158169689Skan  if (dump_file && (dump_flags & TDF_DETAILS))
1159169689Skan    {
1160169689Skan      fprintf (dump_file, "\n;; ");
1161169689Skan      print_generic_expr (dump_file, stmt, TDF_SLIM);
1162169689Skan      fprintf (dump_file, "\n");
1163169689Skan
1164169689Skan      print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1165169689Skan    }
1166169689Skan}
1167169689Skan
1168169689Skan/* A subroutine of expand_gimple_basic_block.  Expand one COND_EXPR.
1169169689Skan   Returns a new basic block if we've terminated the current basic
1170169689Skan   block and created a new one.  */
1171169689Skan
1172169689Skanstatic basic_block
1173169689Skanexpand_gimple_cond_expr (basic_block bb, tree stmt)
1174169689Skan{
1175169689Skan  basic_block new_bb, dest;
1176169689Skan  edge new_edge;
1177169689Skan  edge true_edge;
1178169689Skan  edge false_edge;
1179169689Skan  tree pred = COND_EXPR_COND (stmt);
1180169689Skan  tree then_exp = COND_EXPR_THEN (stmt);
1181169689Skan  tree else_exp = COND_EXPR_ELSE (stmt);
1182169689Skan  rtx last2, last;
1183169689Skan
1184169689Skan  last2 = last = get_last_insn ();
1185169689Skan
1186169689Skan  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1187169689Skan  if (EXPR_LOCUS (stmt))
1188169689Skan    {
1189169689Skan      emit_line_note (*(EXPR_LOCUS (stmt)));
1190169689Skan      record_block_change (TREE_BLOCK (stmt));
1191169689Skan    }
1192169689Skan
1193169689Skan  /* These flags have no purpose in RTL land.  */
1194169689Skan  true_edge->flags &= ~EDGE_TRUE_VALUE;
1195169689Skan  false_edge->flags &= ~EDGE_FALSE_VALUE;
1196169689Skan
1197169689Skan  /* We can either have a pure conditional jump with one fallthru edge or
1198169689Skan     two-way jump that needs to be decomposed into two basic blocks.  */
1199169689Skan  if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1200169689Skan    {
1201169689Skan      jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1202169689Skan      add_reg_br_prob_note (last, true_edge->probability);
1203169689Skan      maybe_dump_rtl_for_tree_stmt (stmt, last);
1204169689Skan      if (EXPR_LOCUS (then_exp))
1205169689Skan	emit_line_note (*(EXPR_LOCUS (then_exp)));
1206169689Skan      return NULL;
1207169689Skan    }
1208169689Skan  if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1209169689Skan    {
1210169689Skan      jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
1211169689Skan      add_reg_br_prob_note (last, false_edge->probability);
1212169689Skan      maybe_dump_rtl_for_tree_stmt (stmt, last);
1213169689Skan      if (EXPR_LOCUS (else_exp))
1214169689Skan	emit_line_note (*(EXPR_LOCUS (else_exp)));
1215169689Skan      return NULL;
1216169689Skan    }
1217169689Skan  gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1218169689Skan	      && TREE_CODE (else_exp) == GOTO_EXPR);
1219169689Skan
1220169689Skan  jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1221169689Skan  add_reg_br_prob_note (last, true_edge->probability);
1222169689Skan  last = get_last_insn ();
1223169689Skan  expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1224169689Skan
1225169689Skan  BB_END (bb) = last;
1226169689Skan  if (BARRIER_P (BB_END (bb)))
1227169689Skan    BB_END (bb) = PREV_INSN (BB_END (bb));
1228169689Skan  update_bb_for_insn (bb);
1229169689Skan
1230169689Skan  new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1231169689Skan  dest = false_edge->dest;
1232169689Skan  redirect_edge_succ (false_edge, new_bb);
1233169689Skan  false_edge->flags |= EDGE_FALLTHRU;
1234169689Skan  new_bb->count = false_edge->count;
1235169689Skan  new_bb->frequency = EDGE_FREQUENCY (false_edge);
1236169689Skan  new_edge = make_edge (new_bb, dest, 0);
1237169689Skan  new_edge->probability = REG_BR_PROB_BASE;
1238169689Skan  new_edge->count = new_bb->count;
1239169689Skan  if (BARRIER_P (BB_END (new_bb)))
1240169689Skan    BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1241169689Skan  update_bb_for_insn (new_bb);
1242169689Skan
1243169689Skan  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1244169689Skan
1245169689Skan  if (EXPR_LOCUS (else_exp))
1246169689Skan    emit_line_note (*(EXPR_LOCUS (else_exp)));
1247169689Skan
1248169689Skan  return new_bb;
1249169689Skan}
1250169689Skan
1251169689Skan/* A subroutine of expand_gimple_basic_block.  Expand one CALL_EXPR
1252169689Skan   that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
1253169689Skan   generated a tail call (something that might be denied by the ABI
1254169689Skan   rules governing the call; see calls.c).
1255169689Skan
1256169689Skan   Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1257169689Skan   can still reach the rest of BB.  The case here is __builtin_sqrt,
1258169689Skan   where the NaN result goes through the external function (with a
1259169689Skan   tailcall) and the normal result happens via a sqrt instruction.  */
1260169689Skan
1261169689Skanstatic basic_block
1262169689Skanexpand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1263169689Skan{
1264169689Skan  rtx last2, last;
1265169689Skan  edge e;
1266169689Skan  edge_iterator ei;
1267169689Skan  int probability;
1268169689Skan  gcov_type count;
1269169689Skan
1270169689Skan  last2 = last = get_last_insn ();
1271169689Skan
1272169689Skan  expand_expr_stmt (stmt);
1273169689Skan
1274169689Skan  for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1275169689Skan    if (CALL_P (last) && SIBLING_CALL_P (last))
1276169689Skan      goto found;
1277169689Skan
1278169689Skan  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1279169689Skan
1280169689Skan  *can_fallthru = true;
1281169689Skan  return NULL;
1282169689Skan
1283169689Skan found:
1284169689Skan  /* ??? Wouldn't it be better to just reset any pending stack adjust?
1285169689Skan     Any instructions emitted here are about to be deleted.  */
1286169689Skan  do_pending_stack_adjust ();
1287169689Skan
1288169689Skan  /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
1289169689Skan  /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
1290169689Skan     EH or abnormal edges, we shouldn't have created a tail call in
1291169689Skan     the first place.  So it seems to me we should just be removing
1292169689Skan     all edges here, or redirecting the existing fallthru edge to
1293169689Skan     the exit block.  */
1294169689Skan
1295169689Skan  probability = 0;
1296169689Skan  count = 0;
1297169689Skan
1298169689Skan  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1299169689Skan    {
1300169689Skan      if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1301169689Skan	{
1302169689Skan	  if (e->dest != EXIT_BLOCK_PTR)
1303169689Skan	    {
1304169689Skan	      e->dest->count -= e->count;
1305169689Skan	      e->dest->frequency -= EDGE_FREQUENCY (e);
1306169689Skan	      if (e->dest->count < 0)
1307169689Skan		e->dest->count = 0;
1308169689Skan	      if (e->dest->frequency < 0)
1309169689Skan		e->dest->frequency = 0;
1310169689Skan	    }
1311169689Skan	  count += e->count;
1312169689Skan	  probability += e->probability;
1313169689Skan	  remove_edge (e);
1314169689Skan	}
1315169689Skan      else
1316169689Skan	ei_next (&ei);
1317169689Skan    }
1318169689Skan
1319169689Skan  /* This is somewhat ugly: the call_expr expander often emits instructions
1320169689Skan     after the sibcall (to perform the function return).  These confuse the
1321169689Skan     find_many_sub_basic_blocks code, so we need to get rid of these.  */
1322169689Skan  last = NEXT_INSN (last);
1323169689Skan  gcc_assert (BARRIER_P (last));
1324169689Skan
1325169689Skan  *can_fallthru = false;
1326169689Skan  while (NEXT_INSN (last))
1327169689Skan    {
1328169689Skan      /* For instance an sqrt builtin expander expands if with
1329169689Skan	 sibcall in the then and label for `else`.  */
1330169689Skan      if (LABEL_P (NEXT_INSN (last)))
1331169689Skan	{
1332169689Skan	  *can_fallthru = true;
1333169689Skan	  break;
1334169689Skan	}
1335169689Skan      delete_insn (NEXT_INSN (last));
1336169689Skan    }
1337169689Skan
1338169689Skan  e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1339169689Skan  e->probability += probability;
1340169689Skan  e->count += count;
1341169689Skan  BB_END (bb) = last;
1342169689Skan  update_bb_for_insn (bb);
1343169689Skan
1344169689Skan  if (NEXT_INSN (last))
1345169689Skan    {
1346169689Skan      bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1347169689Skan
1348169689Skan      last = BB_END (bb);
1349169689Skan      if (BARRIER_P (last))
1350169689Skan	BB_END (bb) = PREV_INSN (last);
1351169689Skan    }
1352169689Skan
1353169689Skan  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1354169689Skan
1355169689Skan  return bb;
1356169689Skan}
1357169689Skan
1358169689Skan/* Expand basic block BB from GIMPLE trees to RTL.  */
1359169689Skan
1360169689Skanstatic basic_block
1361169689Skanexpand_gimple_basic_block (basic_block bb)
1362169689Skan{
1363169689Skan  block_stmt_iterator bsi = bsi_start (bb);
1364169689Skan  tree stmt = NULL;
1365169689Skan  rtx note, last;
1366169689Skan  edge e;
1367169689Skan  edge_iterator ei;
1368169689Skan
1369169689Skan  if (dump_file)
1370169689Skan    {
1371169689Skan      fprintf (dump_file,
1372169689Skan	       "\n;; Generating RTL for tree basic block %d\n",
1373169689Skan	       bb->index);
1374169689Skan    }
1375169689Skan
1376169689Skan  init_rtl_bb_info (bb);
1377169689Skan  bb->flags |= BB_RTL;
1378169689Skan
1379169689Skan  if (!bsi_end_p (bsi))
1380169689Skan    stmt = bsi_stmt (bsi);
1381169689Skan
1382169689Skan  if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1383169689Skan    {
1384169689Skan      last = get_last_insn ();
1385169689Skan
1386169689Skan      expand_expr_stmt (stmt);
1387169689Skan
1388169689Skan      /* Java emits line number notes in the top of labels.
1389169689Skan	 ??? Make this go away once line number notes are obsoleted.  */
1390169689Skan      BB_HEAD (bb) = NEXT_INSN (last);
1391169689Skan      if (NOTE_P (BB_HEAD (bb)))
1392169689Skan	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1393169689Skan      bsi_next (&bsi);
1394169689Skan      note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1395169689Skan
1396169689Skan      maybe_dump_rtl_for_tree_stmt (stmt, last);
1397169689Skan    }
1398169689Skan  else
1399169689Skan    note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1400169689Skan
1401169689Skan  NOTE_BASIC_BLOCK (note) = bb;
1402169689Skan
1403169689Skan  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1404169689Skan    {
1405169689Skan      /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
1406169689Skan      e->flags &= ~EDGE_EXECUTABLE;
1407169689Skan
1408169689Skan      /* At the moment not all abnormal edges match the RTL representation.
1409169689Skan	 It is safe to remove them here as find_many_sub_basic_blocks will
1410169689Skan	 rediscover them.  In the future we should get this fixed properly.  */
1411169689Skan      if (e->flags & EDGE_ABNORMAL)
1412169689Skan	remove_edge (e);
1413169689Skan      else
1414169689Skan	ei_next (&ei);
1415169689Skan    }
1416169689Skan
1417169689Skan  for (; !bsi_end_p (bsi); bsi_next (&bsi))
1418169689Skan    {
1419169689Skan      tree stmt = bsi_stmt (bsi);
1420169689Skan      basic_block new_bb;
1421169689Skan
1422169689Skan      if (!stmt)
1423169689Skan	continue;
1424169689Skan
1425169689Skan      /* Expand this statement, then evaluate the resulting RTL and
1426169689Skan	 fixup the CFG accordingly.  */
1427169689Skan      if (TREE_CODE (stmt) == COND_EXPR)
1428169689Skan	{
1429169689Skan	  new_bb = expand_gimple_cond_expr (bb, stmt);
1430169689Skan	  if (new_bb)
1431169689Skan	    return new_bb;
1432169689Skan	}
1433169689Skan      else
1434169689Skan	{
1435169689Skan	  tree call = get_call_expr_in (stmt);
1436169689Skan	  if (call && CALL_EXPR_TAILCALL (call))
1437169689Skan	    {
1438169689Skan	      bool can_fallthru;
1439169689Skan	      new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1440169689Skan	      if (new_bb)
1441169689Skan		{
1442169689Skan		  if (can_fallthru)
1443169689Skan		    bb = new_bb;
1444169689Skan		  else
1445169689Skan		    return new_bb;
1446169689Skan		}
1447169689Skan	    }
1448169689Skan	  else
1449169689Skan	    {
1450169689Skan	      last = get_last_insn ();
1451169689Skan	      expand_expr_stmt (stmt);
1452169689Skan	      maybe_dump_rtl_for_tree_stmt (stmt, last);
1453169689Skan	    }
1454169689Skan	}
1455169689Skan    }
1456169689Skan
1457169689Skan  do_pending_stack_adjust ();
1458169689Skan
1459169689Skan  /* Find the block tail.  The last insn in the block is the insn
1460169689Skan     before a barrier and/or table jump insn.  */
1461169689Skan  last = get_last_insn ();
1462169689Skan  if (BARRIER_P (last))
1463169689Skan    last = PREV_INSN (last);
1464169689Skan  if (JUMP_TABLE_DATA_P (last))
1465169689Skan    last = PREV_INSN (PREV_INSN (last));
1466169689Skan  BB_END (bb) = last;
1467169689Skan
1468169689Skan  update_bb_for_insn (bb);
1469169689Skan
1470169689Skan  return bb;
1471169689Skan}
1472169689Skan
1473169689Skan
1474169689Skan/* Create a basic block for initialization code.  */
1475169689Skan
1476169689Skanstatic basic_block
1477169689Skanconstruct_init_block (void)
1478169689Skan{
1479169689Skan  basic_block init_block, first_block;
1480169689Skan  edge e = NULL;
1481169689Skan  int flags;
1482169689Skan
1483169689Skan  /* Multiple entry points not supported yet.  */
1484169689Skan  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1485169689Skan  init_rtl_bb_info (ENTRY_BLOCK_PTR);
1486169689Skan  init_rtl_bb_info (EXIT_BLOCK_PTR);
1487169689Skan  ENTRY_BLOCK_PTR->flags |= BB_RTL;
1488169689Skan  EXIT_BLOCK_PTR->flags |= BB_RTL;
1489169689Skan
1490169689Skan  e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1491169689Skan
1492169689Skan  /* When entry edge points to first basic block, we don't need jump,
1493169689Skan     otherwise we have to jump into proper target.  */
1494169689Skan  if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1495169689Skan    {
1496169689Skan      tree label = tree_block_label (e->dest);
1497169689Skan
1498169689Skan      emit_jump (label_rtx (label));
1499169689Skan      flags = 0;
1500169689Skan    }
1501169689Skan  else
1502169689Skan    flags = EDGE_FALLTHRU;
1503169689Skan
1504169689Skan  init_block = create_basic_block (NEXT_INSN (get_insns ()),
1505169689Skan				   get_last_insn (),
1506169689Skan				   ENTRY_BLOCK_PTR);
1507169689Skan  init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1508169689Skan  init_block->count = ENTRY_BLOCK_PTR->count;
1509169689Skan  if (e)
1510169689Skan    {
1511169689Skan      first_block = e->dest;
1512169689Skan      redirect_edge_succ (e, init_block);
1513169689Skan      e = make_edge (init_block, first_block, flags);
1514169689Skan    }
1515169689Skan  else
1516169689Skan    e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1517169689Skan  e->probability = REG_BR_PROB_BASE;
1518169689Skan  e->count = ENTRY_BLOCK_PTR->count;
1519169689Skan
1520169689Skan  update_bb_for_insn (init_block);
1521169689Skan  return init_block;
1522169689Skan}
1523169689Skan
1524169689Skan
1525169689Skan/* Create a block containing landing pads and similar stuff.  */
1526169689Skan
1527169689Skanstatic void
1528169689Skanconstruct_exit_block (void)
1529169689Skan{
1530169689Skan  rtx head = get_last_insn ();
1531169689Skan  rtx end;
1532169689Skan  basic_block exit_block;
1533169689Skan  edge e, e2;
1534169689Skan  unsigned ix;
1535169689Skan  edge_iterator ei;
1536169689Skan
1537169689Skan  /* Make sure the locus is set to the end of the function, so that
1538169689Skan     epilogue line numbers and warnings are set properly.  */
1539169689Skan#ifdef USE_MAPPED_LOCATION
1540169689Skan  if (cfun->function_end_locus != UNKNOWN_LOCATION)
1541169689Skan#else
1542169689Skan  if (cfun->function_end_locus.file)
1543169689Skan#endif
1544169689Skan    input_location = cfun->function_end_locus;
1545169689Skan
1546169689Skan  /* The following insns belong to the top scope.  */
1547169689Skan  record_block_change (DECL_INITIAL (current_function_decl));
1548169689Skan
1549169689Skan  /* Generate rtl for function exit.  */
1550169689Skan  expand_function_end ();
1551169689Skan
1552169689Skan  end = get_last_insn ();
1553169689Skan  if (head == end)
1554169689Skan    return;
1555169689Skan  while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1556169689Skan    head = NEXT_INSN (head);
1557169689Skan  exit_block = create_basic_block (NEXT_INSN (head), end,
1558169689Skan				   EXIT_BLOCK_PTR->prev_bb);
1559169689Skan  exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1560169689Skan  exit_block->count = EXIT_BLOCK_PTR->count;
1561169689Skan
1562169689Skan  ix = 0;
1563169689Skan  while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1564169689Skan    {
1565169689Skan      e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1566169689Skan      if (!(e->flags & EDGE_ABNORMAL))
1567169689Skan	redirect_edge_succ (e, exit_block);
1568169689Skan      else
1569169689Skan	ix++;
1570169689Skan    }
1571169689Skan
1572169689Skan  e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1573169689Skan  e->probability = REG_BR_PROB_BASE;
1574169689Skan  e->count = EXIT_BLOCK_PTR->count;
1575169689Skan  FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1576169689Skan    if (e2 != e)
1577169689Skan      {
1578169689Skan	e->count -= e2->count;
1579169689Skan	exit_block->count -= e2->count;
1580169689Skan	exit_block->frequency -= EDGE_FREQUENCY (e2);
1581169689Skan      }
1582169689Skan  if (e->count < 0)
1583169689Skan    e->count = 0;
1584169689Skan  if (exit_block->count < 0)
1585169689Skan    exit_block->count = 0;
1586169689Skan  if (exit_block->frequency < 0)
1587169689Skan    exit_block->frequency = 0;
1588169689Skan  update_bb_for_insn (exit_block);
1589169689Skan}
1590169689Skan
1591169689Skan/* Helper function for discover_nonconstant_array_refs.
1592169689Skan   Look for ARRAY_REF nodes with non-constant indexes and mark them
1593169689Skan   addressable.  */
1594169689Skan
1595169689Skanstatic tree
1596169689Skandiscover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1597169689Skan				   void *data ATTRIBUTE_UNUSED)
1598169689Skan{
1599169689Skan  tree t = *tp;
1600169689Skan
1601169689Skan  if (IS_TYPE_OR_DECL_P (t))
1602169689Skan    *walk_subtrees = 0;
1603169689Skan  else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1604169689Skan    {
1605169689Skan      while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1606169689Skan	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1607169689Skan	      && (!TREE_OPERAND (t, 2)
1608169689Skan		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1609169689Skan	     || (TREE_CODE (t) == COMPONENT_REF
1610169689Skan		 && (!TREE_OPERAND (t,2)
1611169689Skan		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1612169689Skan	     || TREE_CODE (t) == BIT_FIELD_REF
1613169689Skan	     || TREE_CODE (t) == REALPART_EXPR
1614169689Skan	     || TREE_CODE (t) == IMAGPART_EXPR
1615169689Skan	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1616169689Skan	     || TREE_CODE (t) == NOP_EXPR
1617169689Skan	     || TREE_CODE (t) == CONVERT_EXPR)
1618169689Skan	t = TREE_OPERAND (t, 0);
1619169689Skan
1620169689Skan      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1621169689Skan	{
1622169689Skan	  t = get_base_address (t);
1623169689Skan	  if (t && DECL_P (t))
1624169689Skan	    TREE_ADDRESSABLE (t) = 1;
1625169689Skan	}
1626169689Skan
1627169689Skan      *walk_subtrees = 0;
1628169689Skan    }
1629169689Skan
1630169689Skan  return NULL_TREE;
1631169689Skan}
1632169689Skan
1633169689Skan/* RTL expansion is not able to compile array references with variable
1634169689Skan   offsets for arrays stored in single register.  Discover such
1635169689Skan   expressions and mark variables as addressable to avoid this
1636169689Skan   scenario.  */
1637169689Skan
1638169689Skanstatic void
1639169689Skandiscover_nonconstant_array_refs (void)
1640169689Skan{
1641169689Skan  basic_block bb;
1642169689Skan  block_stmt_iterator bsi;
1643169689Skan
1644169689Skan  FOR_EACH_BB (bb)
1645169689Skan    {
1646169689Skan      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1647169689Skan	walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1648169689Skan		   NULL , NULL);
1649169689Skan    }
1650169689Skan}
1651169689Skan
1652169689Skan/* Translate the intermediate representation contained in the CFG
1653169689Skan   from GIMPLE trees to RTL.
1654169689Skan
1655169689Skan   We do conversion per basic block and preserve/update the tree CFG.
1656169689Skan   This implies we have to do some magic as the CFG can simultaneously
1657169689Skan   consist of basic blocks containing RTL and GIMPLE trees.  This can
1658169689Skan   confuse the CFG hooks, so be careful to not manipulate CFG during
1659169689Skan   the expansion.  */
1660169689Skan
1661169689Skanstatic unsigned int
1662169689Skantree_expand_cfg (void)
1663169689Skan{
1664169689Skan  basic_block bb, init_block;
1665169689Skan  sbitmap blocks;
1666169689Skan  edge_iterator ei;
1667169689Skan  edge e;
1668169689Skan
1669169689Skan  /* Some backends want to know that we are expanding to RTL.  */
1670169689Skan  currently_expanding_to_rtl = 1;
1671169689Skan
1672169689Skan  /* Prepare the rtl middle end to start recording block changes.  */
1673169689Skan  reset_block_changes ();
1674169689Skan
1675169689Skan  /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
1676169689Skan  discover_nonconstant_array_refs ();
1677169689Skan
1678169689Skan  /* Expand the variables recorded during gimple lowering.  */
1679169689Skan  expand_used_vars ();
1680169689Skan
1681169689Skan  /* Honor stack protection warnings.  */
1682169689Skan  if (warn_stack_protect)
1683169689Skan    {
1684169689Skan      if (current_function_calls_alloca)
1685169689Skan	warning (0, "not protecting local variables: variable length buffer");
1686169689Skan      if (has_short_buffer && !cfun->stack_protect_guard)
1687169689Skan	warning (0, "not protecting function: no buffer at least %d bytes long",
1688169689Skan		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1689169689Skan    }
1690169689Skan
1691169689Skan  /* Set up parameters and prepare for return, for the function.  */
1692169689Skan  expand_function_start (current_function_decl);
1693169689Skan
1694169689Skan  /* If this function is `main', emit a call to `__main'
1695169689Skan     to run global initializers, etc.  */
1696169689Skan  if (DECL_NAME (current_function_decl)
1697169689Skan      && MAIN_NAME_P (DECL_NAME (current_function_decl))
1698169689Skan      && DECL_FILE_SCOPE_P (current_function_decl))
1699169689Skan    expand_main_function ();
1700169689Skan
1701169689Skan  /* Initialize the stack_protect_guard field.  This must happen after the
1702169689Skan     call to __main (if any) so that the external decl is initialized.  */
1703169689Skan  if (cfun->stack_protect_guard)
1704169689Skan    stack_protect_prologue ();
1705169689Skan
1706169689Skan  /* Register rtl specific functions for cfg.  */
1707169689Skan  rtl_register_cfg_hooks ();
1708169689Skan
1709169689Skan  init_block = construct_init_block ();
1710169689Skan
1711169689Skan  /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
1712169689Skan     remaining edges in expand_gimple_basic_block.  */
1713169689Skan  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1714169689Skan    e->flags &= ~EDGE_EXECUTABLE;
1715169689Skan
1716169689Skan  FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1717169689Skan    bb = expand_gimple_basic_block (bb);
1718169689Skan
1719169689Skan  construct_exit_block ();
1720169689Skan
1721169689Skan  /* We're done expanding trees to RTL.  */
1722169689Skan  currently_expanding_to_rtl = 0;
1723169689Skan
1724169689Skan  /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1725169689Skan     EH regions.  */
1726169689Skan  convert_from_eh_region_ranges ();
1727169689Skan
1728169689Skan  rebuild_jump_labels (get_insns ());
1729169689Skan  find_exception_handler_labels ();
1730169689Skan
1731169689Skan  blocks = sbitmap_alloc (last_basic_block);
1732169689Skan  sbitmap_ones (blocks);
1733169689Skan  find_many_sub_basic_blocks (blocks);
1734169689Skan  purge_all_dead_edges ();
1735169689Skan  sbitmap_free (blocks);
1736169689Skan
1737169689Skan  compact_blocks ();
1738169689Skan#ifdef ENABLE_CHECKING
1739169689Skan  verify_flow_info();
1740169689Skan#endif
1741169689Skan
1742169689Skan  /* There's no need to defer outputting this function any more; we
1743169689Skan     know we want to output it.  */
1744169689Skan  DECL_DEFER_OUTPUT (current_function_decl) = 0;
1745169689Skan
1746169689Skan  /* Now that we're done expanding trees to RTL, we shouldn't have any
1747169689Skan     more CONCATs anywhere.  */
1748169689Skan  generating_concat_p = 0;
1749169689Skan
1750169689Skan  finalize_block_changes ();
1751169689Skan
1752169689Skan  if (dump_file)
1753169689Skan    {
1754169689Skan      fprintf (dump_file,
1755169689Skan	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1756169689Skan      /* And the pass manager will dump RTL for us.  */
1757169689Skan    }
1758169689Skan
1759169689Skan  /* If we're emitting a nested function, make sure its parent gets
1760169689Skan     emitted as well.  Doing otherwise confuses debug info.  */
1761169689Skan  {
1762169689Skan    tree parent;
1763169689Skan    for (parent = DECL_CONTEXT (current_function_decl);
1764169689Skan	 parent != NULL_TREE;
1765169689Skan	 parent = get_containing_scope (parent))
1766169689Skan      if (TREE_CODE (parent) == FUNCTION_DECL)
1767169689Skan	TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1768169689Skan  }
1769169689Skan
1770169689Skan  /* We are now committed to emitting code for this function.  Do any
1771169689Skan     preparation, such as emitting abstract debug info for the inline
1772169689Skan     before it gets mangled by optimization.  */
1773169689Skan  if (cgraph_function_possibly_inlined_p (current_function_decl))
1774169689Skan    (*debug_hooks->outlining_inline_function) (current_function_decl);
1775169689Skan
1776169689Skan  TREE_ASM_WRITTEN (current_function_decl) = 1;
1777169689Skan
1778169689Skan  /* After expanding, the return labels are no longer needed. */
1779169689Skan  return_label = NULL;
1780169689Skan  naked_return_label = NULL;
1781169689Skan  return 0;
1782169689Skan}
1783169689Skan
1784169689Skanstruct tree_opt_pass pass_expand =
1785169689Skan{
1786169689Skan  "expand",				/* name */
1787169689Skan  NULL,                                 /* gate */
1788169689Skan  tree_expand_cfg,			/* execute */
1789169689Skan  NULL,                                 /* sub */
1790169689Skan  NULL,                                 /* next */
1791169689Skan  0,                                    /* static_pass_number */
1792169689Skan  TV_EXPAND,				/* tv_id */
1793169689Skan  /* ??? If TER is enabled, we actually receive GENERIC.  */
1794169689Skan  PROP_gimple_leh | PROP_cfg,           /* properties_required */
1795169689Skan  PROP_rtl,                             /* properties_provided */
1796169689Skan  PROP_trees,				/* properties_destroyed */
1797169689Skan  0,                                    /* todo_flags_start */
1798169689Skan  TODO_dump_func,                       /* todo_flags_finish */
1799169689Skan  'r'					/* letter */
1800169689Skan};
1801