1/* A pass for lowering trees to RTL.
2   Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING.  If not, write to
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
38#include "diagnostic.h"
39#include "toplev.h"
40#include "debug.h"
41#include "params.h"
42
43/* Verify that there is exactly single jump instruction since last and attach
44   REG_BR_PROB note specifying probability.
45   ??? We really ought to pass the probability down to RTL expanders and let it
46   re-distribute it when the conditional expands into multiple conditionals.
47   This is however difficult to do.  */
48static void
49add_reg_br_prob_note (rtx last, int probability)
50{
51  if (profile_status == PROFILE_ABSENT)
52    return;
53  for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
54    if (JUMP_P (last))
55      {
56	/* It is common to emit condjump-around-jump sequence when we don't know
57	   how to reverse the conditional.  Special case this.  */
58	if (!any_condjump_p (last)
59	    || !JUMP_P (NEXT_INSN (last))
60	    || !simplejump_p (NEXT_INSN (last))
61	    || !NEXT_INSN (NEXT_INSN (last))
62	    || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
63	    || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
64	    || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
65	    || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
66	  goto failed;
67	gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
68	REG_NOTES (last)
69	  = gen_rtx_EXPR_LIST (REG_BR_PROB,
70			       GEN_INT (REG_BR_PROB_BASE - probability),
71			       REG_NOTES (last));
72	return;
73      }
74  if (!last || !JUMP_P (last) || !any_condjump_p (last))
75    goto failed;
76  gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
77  REG_NOTES (last)
78    = gen_rtx_EXPR_LIST (REG_BR_PROB,
79			 GEN_INT (probability), REG_NOTES (last));
80  return;
81failed:
82  if (dump_file)
83    fprintf (dump_file, "Failed to add probability note\n");
84}
85
86
87#ifndef LOCAL_ALIGNMENT
88#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
89#endif
90
91#ifndef STACK_ALIGNMENT_NEEDED
92#define STACK_ALIGNMENT_NEEDED 1
93#endif
94
95
96/* This structure holds data relevant to one variable that will be
97   placed in a stack slot.  */
98struct stack_var
99{
100  /* The Variable.  */
101  tree decl;
102
103  /* The offset of the variable.  During partitioning, this is the
104     offset relative to the partition.  After partitioning, this
105     is relative to the stack frame.  */
106  HOST_WIDE_INT offset;
107
108  /* Initially, the size of the variable.  Later, the size of the partition,
109     if this variable becomes it's partition's representative.  */
110  HOST_WIDE_INT size;
111
112  /* The *byte* alignment required for this variable.  Or as, with the
113     size, the alignment for this partition.  */
114  unsigned int alignb;
115
116  /* The partition representative.  */
117  size_t representative;
118
119  /* The next stack variable in the partition, or EOC.  */
120  size_t next;
121};
122
123#define EOC  ((size_t)-1)
124
125/* We have an array of such objects while deciding allocation.  */
126static struct stack_var *stack_vars;
127static size_t stack_vars_alloc;
128static size_t stack_vars_num;
129
130/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
131   is non-decreasing.  */
132static size_t *stack_vars_sorted;
133
134/* We have an interference graph between such objects.  This graph
135   is lower triangular.  */
136static bool *stack_vars_conflict;
137static size_t stack_vars_conflict_alloc;
138
139/* The phase of the stack frame.  This is the known misalignment of
140   virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
141   (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
142static int frame_phase;
143
144/* Used during expand_used_vars to remember if we saw any decls for
145   which we'd like to enable stack smashing protection.  */
146static bool has_protected_decls;
147
148/* Used during expand_used_vars.  Remember if we say a character buffer
149   smaller than our cutoff threshold.  Used for -Wstack-protector.  */
150static bool has_short_buffer;
151
152/* Discover the byte alignment to use for DECL.  Ignore alignment
153   we can't do with expected alignment of the stack boundary.  */
154
155static unsigned int
156get_decl_align_unit (tree decl)
157{
158  unsigned int align;
159
160  align = DECL_ALIGN (decl);
161  align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
162  if (align > PREFERRED_STACK_BOUNDARY)
163    {
164      warning (0, "ignoring alignment for stack allocated %q+D", decl);
165      align = PREFERRED_STACK_BOUNDARY;
166    }
167  if (cfun->stack_alignment_needed < align)
168    cfun->stack_alignment_needed = align;
169
170  return align / BITS_PER_UNIT;
171}
172
173/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
174   Return the frame offset.  */
175
176static HOST_WIDE_INT
177alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
178{
179  HOST_WIDE_INT offset, new_frame_offset;
180
181  new_frame_offset = frame_offset;
182  if (FRAME_GROWS_DOWNWARD)
183    {
184      new_frame_offset -= size + frame_phase;
185      new_frame_offset &= -align;
186      new_frame_offset += frame_phase;
187      offset = new_frame_offset;
188    }
189  else
190    {
191      new_frame_offset -= frame_phase;
192      new_frame_offset += align - 1;
193      new_frame_offset &= -align;
194      new_frame_offset += frame_phase;
195      offset = new_frame_offset;
196      new_frame_offset += size;
197    }
198  frame_offset = new_frame_offset;
199
200  if (frame_offset_overflow (frame_offset, cfun->decl))
201    frame_offset = offset = 0;
202
203  return offset;
204}
205
206/* Accumulate DECL into STACK_VARS.  */
207
208static void
209add_stack_var (tree decl)
210{
211  if (stack_vars_num >= stack_vars_alloc)
212    {
213      if (stack_vars_alloc)
214	stack_vars_alloc = stack_vars_alloc * 3 / 2;
215      else
216	stack_vars_alloc = 32;
217      stack_vars
218	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
219    }
220  stack_vars[stack_vars_num].decl = decl;
221  stack_vars[stack_vars_num].offset = 0;
222  stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
223  stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
224
225  /* All variables are initially in their own partition.  */
226  stack_vars[stack_vars_num].representative = stack_vars_num;
227  stack_vars[stack_vars_num].next = EOC;
228
229  /* Ensure that this decl doesn't get put onto the list twice.  */
230  SET_DECL_RTL (decl, pc_rtx);
231
232  stack_vars_num++;
233}
234
235/* Compute the linear index of a lower-triangular coordinate (I, J).  */
236
237static size_t
238triangular_index (size_t i, size_t j)
239{
240  if (i < j)
241    {
242      size_t t;
243      t = i, i = j, j = t;
244    }
245  return (i * (i + 1)) / 2 + j;
246}
247
248/* Ensure that STACK_VARS_CONFLICT is large enough for N objects.  */
249
250static void
251resize_stack_vars_conflict (size_t n)
252{
253  size_t size = triangular_index (n-1, n-1) + 1;
254
255  if (size <= stack_vars_conflict_alloc)
256    return;
257
258  stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
259  memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
260	  (size - stack_vars_conflict_alloc) * sizeof (bool));
261  stack_vars_conflict_alloc = size;
262}
263
264/* Make the decls associated with luid's X and Y conflict.  */
265
266static void
267add_stack_var_conflict (size_t x, size_t y)
268{
269  size_t index = triangular_index (x, y);
270  gcc_assert (index < stack_vars_conflict_alloc);
271  stack_vars_conflict[index] = true;
272}
273
274/* Check whether the decls associated with luid's X and Y conflict.  */
275
276static bool
277stack_var_conflict_p (size_t x, size_t y)
278{
279  size_t index = triangular_index (x, y);
280  gcc_assert (index < stack_vars_conflict_alloc);
281  return stack_vars_conflict[index];
282}
283
284/* Returns true if TYPE is or contains a union type.  */
285
286static bool
287aggregate_contains_union_type (tree type)
288{
289  tree field;
290
291  if (TREE_CODE (type) == UNION_TYPE
292      || TREE_CODE (type) == QUAL_UNION_TYPE)
293    return true;
294  if (TREE_CODE (type) == ARRAY_TYPE)
295    return aggregate_contains_union_type (TREE_TYPE (type));
296  if (TREE_CODE (type) != RECORD_TYPE)
297    return false;
298
299  for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
300    if (TREE_CODE (field) == FIELD_DECL)
301      if (aggregate_contains_union_type (TREE_TYPE (field)))
302	return true;
303
304  return false;
305}
306
307/* A subroutine of expand_used_vars.  If two variables X and Y have alias
308   sets that do not conflict, then do add a conflict for these variables
309   in the interference graph.  We also need to make sure to add conflicts
310   for union containing structures.  Else RTL alias analysis comes along
311   and due to type based aliasing rules decides that for two overlapping
312   union temporaries { short s; int i; } accesses to the same mem through
313   different types may not alias and happily reorders stores across
314   life-time boundaries of the temporaries (See PR25654).
315   We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P.  */
316
317static void
318add_alias_set_conflicts (void)
319{
320  size_t i, j, n = stack_vars_num;
321
322  for (i = 0; i < n; ++i)
323    {
324      tree type_i = TREE_TYPE (stack_vars[i].decl);
325      bool aggr_i = AGGREGATE_TYPE_P (type_i);
326      bool contains_union;
327
328      contains_union = aggregate_contains_union_type (type_i);
329      for (j = 0; j < i; ++j)
330	{
331	  tree type_j = TREE_TYPE (stack_vars[j].decl);
332	  bool aggr_j = AGGREGATE_TYPE_P (type_j);
333	  if (aggr_i != aggr_j
334	      /* Either the objects conflict by means of type based
335		 aliasing rules, or we need to add a conflict.  */
336	      || !objects_must_conflict_p (type_i, type_j)
337	      /* In case the types do not conflict ensure that access
338		 to elements will conflict.  In case of unions we have
339		 to be careful as type based aliasing rules may say
340		 access to the same memory does not conflict.  So play
341		 safe and add a conflict in this case.  */
342	      || contains_union)
343	    add_stack_var_conflict (i, j);
344	}
345    }
346}
347
348/* A subroutine of partition_stack_vars.  A comparison function for qsort,
349   sorting an array of indicies by the size of the object.  */
350
351static int
352stack_var_size_cmp (const void *a, const void *b)
353{
354  HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
355  HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
356  unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
357  unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
358
359  if (sa < sb)
360    return -1;
361  if (sa > sb)
362    return 1;
363  /* For stack variables of the same size use the uid of the decl
364     to make the sort stable.  */
365  if (uida < uidb)
366    return -1;
367  if (uida > uidb)
368    return 1;
369  return 0;
370}
371
372/* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
373   partitioning algorithm.  Partitions A and B are known to be non-conflicting.
374   Merge them into a single partition A.
375
376   At the same time, add OFFSET to all variables in partition B.  At the end
377   of the partitioning process we've have a nice block easy to lay out within
378   the stack frame.  */
379
380static void
381union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
382{
383  size_t i, last;
384
385  /* Update each element of partition B with the given offset,
386     and merge them into partition A.  */
387  for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
388    {
389      stack_vars[i].offset += offset;
390      stack_vars[i].representative = a;
391    }
392  stack_vars[last].next = stack_vars[a].next;
393  stack_vars[a].next = b;
394
395  /* Update the required alignment of partition A to account for B.  */
396  if (stack_vars[a].alignb < stack_vars[b].alignb)
397    stack_vars[a].alignb = stack_vars[b].alignb;
398
399  /* Update the interference graph and merge the conflicts.  */
400  for (last = stack_vars_num, i = 0; i < last; ++i)
401    if (stack_var_conflict_p (b, i))
402      add_stack_var_conflict (a, i);
403}
404
405/* A subroutine of expand_used_vars.  Binpack the variables into
406   partitions constrained by the interference graph.  The overall
407   algorithm used is as follows:
408
409	Sort the objects by size.
410	For each object A {
411	  S = size(A)
412	  O = 0
413	  loop {
414	    Look for the largest non-conflicting object B with size <= S.
415	    UNION (A, B)
416	    offset(B) = O
417	    O += size(B)
418	    S -= size(B)
419	  }
420	}
421*/
422
423static void
424partition_stack_vars (void)
425{
426  size_t si, sj, n = stack_vars_num;
427
428  stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
429  for (si = 0; si < n; ++si)
430    stack_vars_sorted[si] = si;
431
432  if (n == 1)
433    return;
434
435  if (flag_stack_shuffle)
436    {
437      /* Fisher-Yates shuffle */
438      for (si = n - 1; si > 0; si--)
439	{
440	  size_t tmp;
441	  sj = arc4random_uniform(si + 1);
442
443	  tmp = stack_vars_sorted[si];
444	  stack_vars_sorted[si] = stack_vars_sorted[sj];
445	  stack_vars_sorted[sj] = tmp;
446	}
447    }
448  else
449    qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
450
451  /* Special case: detect when all variables conflict, and thus we can't
452     do anything during the partitioning loop.  It isn't uncommon (with
453     C code at least) to declare all variables at the top of the function,
454     and if we're not inlining, then all variables will be in the same scope.
455     Take advantage of very fast libc routines for this scan.  */
456  gcc_assert (sizeof(bool) == sizeof(char));
457  if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
458    return;
459
460  for (si = 0; si < n; ++si)
461    {
462      size_t i = stack_vars_sorted[si];
463      HOST_WIDE_INT isize = stack_vars[i].size;
464      HOST_WIDE_INT offset = 0;
465
466      for (sj = si; sj-- > 0; )
467	{
468	  size_t j = stack_vars_sorted[sj];
469	  HOST_WIDE_INT jsize = stack_vars[j].size;
470	  unsigned int jalign = stack_vars[j].alignb;
471
472	  /* Ignore objects that aren't partition representatives.  */
473	  if (stack_vars[j].representative != j)
474	    continue;
475
476	  /* Ignore objects too large for the remaining space.  */
477	  if (isize < jsize)
478	    continue;
479
480	  /* Ignore conflicting objects.  */
481	  if (stack_var_conflict_p (i, j))
482	    continue;
483
484	  /* Refine the remaining space check to include alignment.  */
485	  if (offset & (jalign - 1))
486	    {
487	      HOST_WIDE_INT toff = offset;
488	      toff += jalign - 1;
489	      toff &= -(HOST_WIDE_INT)jalign;
490	      if (isize - (toff - offset) < jsize)
491		continue;
492
493	      isize -= toff - offset;
494	      offset = toff;
495	    }
496
497	  /* UNION the objects, placing J at OFFSET.  */
498	  union_stack_vars (i, j, offset);
499
500	  isize -= jsize;
501	  if (isize == 0)
502	    break;
503	}
504    }
505}
506
507/* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
508
509static void
510dump_stack_var_partition (void)
511{
512  size_t si, i, j, n = stack_vars_num;
513
514  for (si = 0; si < n; ++si)
515    {
516      i = stack_vars_sorted[si];
517
518      /* Skip variables that aren't partition representatives, for now.  */
519      if (stack_vars[i].representative != i)
520	continue;
521
522      fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
523	       " align %u\n", (unsigned long) i, stack_vars[i].size,
524	       stack_vars[i].alignb);
525
526      for (j = i; j != EOC; j = stack_vars[j].next)
527	{
528	  fputc ('\t', dump_file);
529	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
530	  fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
531		   stack_vars[i].offset);
532	}
533    }
534}
535
536/* Assign rtl to DECL at frame offset OFFSET.  */
537
538static void
539expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
540{
541  HOST_WIDE_INT align;
542  rtx x;
543
544  /* If this fails, we've overflowed the stack frame.  Error nicely?  */
545  gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
546
547  x = plus_constant (virtual_stack_vars_rtx, offset);
548  x = gen_rtx_MEM (DECL_MODE (decl), x);
549
550  /* Set alignment we actually gave this decl.  */
551  offset -= frame_phase;
552  align = offset & -offset;
553  align *= BITS_PER_UNIT;
554  if (align > STACK_BOUNDARY || align == 0)
555    align = STACK_BOUNDARY;
556  DECL_ALIGN (decl) = align;
557  DECL_USER_ALIGN (decl) = 0;
558
559  set_mem_attributes (x, decl, true);
560  SET_DECL_RTL (decl, x);
561}
562
563/* A subroutine of expand_used_vars.  Give each partition representative
564   a unique location within the stack frame.  Update each partition member
565   with that location.  */
566
567static void
568expand_stack_vars (bool (*pred) (tree))
569{
570  size_t si, i, j, n = stack_vars_num;
571
572  for (si = 0; si < n; ++si)
573    {
574      HOST_WIDE_INT offset;
575
576      i = stack_vars_sorted[si];
577
578      /* Skip variables that aren't partition representatives, for now.  */
579      if (stack_vars[i].representative != i)
580	continue;
581
582      /* Skip variables that have already had rtl assigned.  See also
583	 add_stack_var where we perpetrate this pc_rtx hack.  */
584      if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
585	continue;
586
587      /* Check the predicate to see whether this variable should be
588	 allocated in this pass.  */
589      if (pred && !pred (stack_vars[i].decl))
590	continue;
591
592      offset = alloc_stack_frame_space (stack_vars[i].size,
593					stack_vars[i].alignb);
594
595      /* Create rtl for each variable based on their location within the
596	 partition.  */
597      for (j = i; j != EOC; j = stack_vars[j].next)
598	expand_one_stack_var_at (stack_vars[j].decl,
599				 stack_vars[j].offset + offset);
600    }
601}
602
603/* A subroutine of expand_one_var.  Called to immediately assign rtl
604   to a variable to be allocated in the stack frame.  */
605
606static void
607expand_one_stack_var (tree var)
608{
609  HOST_WIDE_INT size, offset, align;
610
611  size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
612  align = get_decl_align_unit (var);
613  offset = alloc_stack_frame_space (size, align);
614
615  expand_one_stack_var_at (var, offset);
616}
617
618/* A subroutine of expand_one_var.  Called to assign rtl
619   to a TREE_STATIC VAR_DECL.  */
620
621static void
622expand_one_static_var (tree var)
623{
624  /* In unit-at-a-time all the static variables are expanded at the end
625     of compilation process.  */
626  if (flag_unit_at_a_time)
627    return;
628  /* If this is an inlined copy of a static local variable,
629     look up the original.  */
630  var = DECL_ORIGIN (var);
631
632  /* If we've already processed this variable because of that, do nothing.  */
633  if (TREE_ASM_WRITTEN (var))
634    return;
635
636  /* Give the front end a chance to do whatever.  In practice, this is
637     resolving duplicate names for IMA in C.  */
638  if (lang_hooks.expand_decl (var))
639    return;
640
641  /* Otherwise, just emit the variable.  */
642  rest_of_decl_compilation (var, 0, 0);
643}
644
645/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
646   that will reside in a hard register.  */
647
648static void
649expand_one_hard_reg_var (tree var)
650{
651  rest_of_decl_compilation (var, 0, 0);
652}
653
654/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
655   that will reside in a pseudo register.  */
656
657static void
658expand_one_register_var (tree var)
659{
660  tree type = TREE_TYPE (var);
661  int unsignedp = TYPE_UNSIGNED (type);
662  enum machine_mode reg_mode
663    = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
664  rtx x = gen_reg_rtx (reg_mode);
665
666  SET_DECL_RTL (var, x);
667
668  /* Note if the object is a user variable.  */
669  if (!DECL_ARTIFICIAL (var))
670    {
671      mark_user_reg (x);
672
673      /* Trust user variables which have a pointer type to really
674	 be pointers.  Do not trust compiler generated temporaries
675	 as our type system is totally busted as it relates to
676	 pointer arithmetic which translates into lots of compiler
677	 generated objects with pointer types, but which are not really
678	 pointers.  */
679      if (POINTER_TYPE_P (type))
680	mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
681    }
682}
683
684/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
685   has some associated error, e.g. its type is error-mark.  We just need
686   to pick something that won't crash the rest of the compiler.  */
687
688static void
689expand_one_error_var (tree var)
690{
691  enum machine_mode mode = DECL_MODE (var);
692  rtx x;
693
694  if (mode == BLKmode)
695    x = gen_rtx_MEM (BLKmode, const0_rtx);
696  else if (mode == VOIDmode)
697    x = const0_rtx;
698  else
699    x = gen_reg_rtx (mode);
700
701  SET_DECL_RTL (var, x);
702}
703
704/* A subroutine of expand_one_var.  VAR is a variable that will be
705   allocated to the local stack frame.  Return true if we wish to
706   add VAR to STACK_VARS so that it will be coalesced with other
707   variables.  Return false to allocate VAR immediately.
708
709   This function is used to reduce the number of variables considered
710   for coalescing, which reduces the size of the quadratic problem.  */
711
712static bool
713defer_stack_allocation (tree var, bool toplevel)
714{
715  /* If stack protection is enabled, *all* stack variables must be deferred,
716     so that we can re-order the strings to the top of the frame.  */
717  if (flag_stack_protect)
718    return true;
719
720  /* Variables in the outermost scope automatically conflict with
721     every other variable.  The only reason to want to defer them
722     at all is that, after sorting, we can more efficiently pack
723     small variables in the stack frame.  Continue to defer at -O2.  */
724  if (toplevel && optimize < 2)
725    return false;
726
727  /* Without optimization, *most* variables are allocated from the
728     stack, which makes the quadratic problem large exactly when we
729     want compilation to proceed as quickly as possible.  On the
730     other hand, we don't want the function's stack frame size to
731     get completely out of hand.  So we avoid adding scalars and
732     "small" aggregates to the list at all.  */
733  if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
734    return false;
735
736  return true;
737}
738
739/* A subroutine of expand_used_vars.  Expand one variable according to
740   its flavor.  Variables to be placed on the stack are not actually
741   expanded yet, merely recorded.  */
742
743static void
744expand_one_var (tree var, bool toplevel)
745{
746  if (TREE_CODE (var) != VAR_DECL)
747    lang_hooks.expand_decl (var);
748  else if (DECL_EXTERNAL (var))
749    ;
750  else if (DECL_HAS_VALUE_EXPR_P (var))
751    ;
752  else if (TREE_STATIC (var))
753    expand_one_static_var (var);
754  else if (DECL_RTL_SET_P (var))
755    ;
756  else if (TREE_TYPE (var) == error_mark_node)
757    expand_one_error_var (var);
758  else if (DECL_HARD_REGISTER (var))
759    expand_one_hard_reg_var (var);
760  else if (use_register_for_decl (var))
761    expand_one_register_var (var);
762  else if (defer_stack_allocation (var, toplevel))
763    add_stack_var (var);
764  else
765    expand_one_stack_var (var);
766}
767
768/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
769   expanding variables.  Those variables that can be put into registers
770   are allocated pseudos; those that can't are put on the stack.
771
772   TOPLEVEL is true if this is the outermost BLOCK.  */
773
774static void
775expand_used_vars_for_block (tree block, bool toplevel)
776{
777  size_t i, j, old_sv_num, this_sv_num, new_sv_num;
778  tree t;
779
780  old_sv_num = toplevel ? 0 : stack_vars_num;
781
782  /* Expand all variables at this level.  */
783  for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
784    if (TREE_USED (t)
785	/* Force local static variables to be output when marked by
786	   used attribute.  For unit-at-a-time, cgraph code already takes
787	   care of this.  */
788	|| (!flag_unit_at_a_time && TREE_STATIC (t)
789	    && DECL_PRESERVE_P (t)))
790      expand_one_var (t, toplevel);
791
792  this_sv_num = stack_vars_num;
793
794  /* Expand all variables at containing levels.  */
795  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
796    expand_used_vars_for_block (t, false);
797
798  /* Since we do not track exact variable lifetimes (which is not even
799     possible for variables whose address escapes), we mirror the block
800     tree in the interference graph.  Here we cause all variables at this
801     level, and all sublevels, to conflict.  Do make certain that a
802     variable conflicts with itself.  */
803  if (old_sv_num < this_sv_num)
804    {
805      new_sv_num = stack_vars_num;
806      resize_stack_vars_conflict (new_sv_num);
807
808      for (i = old_sv_num; i < new_sv_num; ++i)
809	for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
810	  add_stack_var_conflict (i, j);
811    }
812}
813
814/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
815   and clear TREE_USED on all local variables.  */
816
817static void
818clear_tree_used (tree block)
819{
820  tree t;
821
822  for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
823    /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
824      TREE_USED (t) = 0;
825
826  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
827    clear_tree_used (t);
828}
829
830enum {
831  SPCT_FLAG_DEFAULT = 1,
832  SPCT_FLAG_ALL = 2,
833  SPCT_FLAG_STRONG = 3
834};
835
836/* Examine TYPE and determine a bit mask of the following features.  */
837
838#define SPCT_HAS_LARGE_CHAR_ARRAY	1
839#define SPCT_HAS_SMALL_CHAR_ARRAY	2
840#define SPCT_HAS_ARRAY			4
841#define SPCT_HAS_AGGREGATE		8
842
843static unsigned int
844stack_protect_classify_type (tree type)
845{
846  unsigned int ret = 0;
847  tree t;
848
849  switch (TREE_CODE (type))
850    {
851    case ARRAY_TYPE:
852      t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
853      if (t == char_type_node
854	  || t == signed_char_type_node
855	  || t == unsigned_char_type_node)
856	{
857	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
858	  unsigned HOST_WIDE_INT len;
859
860	  if (!TYPE_SIZE_UNIT (type)
861	      || !host_integerp (TYPE_SIZE_UNIT (type), 1))
862	    len = max;
863	  else
864	    len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
865
866	  if (len < max)
867	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
868	  else
869	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
870	}
871      else
872	ret = SPCT_HAS_ARRAY;
873      break;
874
875    case UNION_TYPE:
876    case QUAL_UNION_TYPE:
877    case RECORD_TYPE:
878      ret = SPCT_HAS_AGGREGATE;
879      for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
880	if (TREE_CODE (t) == FIELD_DECL)
881	  ret |= stack_protect_classify_type (TREE_TYPE (t));
882      break;
883
884    default:
885      break;
886    }
887
888  return ret;
889}
890
891/* Return nonzero if DECL should be segregated into the "vulnerable" upper
892   part of the local stack frame.  Remember if we ever return nonzero for
893   any variable in this function.  The return value is the phase number in
894   which the variable should be allocated.  */
895
896static int
897stack_protect_decl_phase (tree decl)
898{
899  unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
900  int ret = 0;
901
902  if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
903    has_short_buffer = true;
904
905  if (flag_stack_protect == SPCT_FLAG_ALL
906      || flag_stack_protect == SPCT_FLAG_STRONG)
907    {
908      if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
909	  && !(bits & SPCT_HAS_AGGREGATE))
910	ret = 1;
911      else if (bits & SPCT_HAS_ARRAY)
912	ret = 2;
913    }
914  else
915    ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
916
917  if (ret)
918    has_protected_decls = true;
919
920  return ret;
921}
922
923/* Two helper routines that check for phase 1 and phase 2.  These are used
924   as callbacks for expand_stack_vars.  */
925
926static bool
927stack_protect_decl_phase_1 (tree decl)
928{
929  return stack_protect_decl_phase (decl) == 1;
930}
931
932static bool
933stack_protect_decl_phase_2 (tree decl)
934{
935  return stack_protect_decl_phase (decl) == 2;
936}
937
938/* Ensure that variables in different stack protection phases conflict
939   so that they are not merged and share the same stack slot.  */
940
941static void
942add_stack_protection_conflicts (void)
943{
944  size_t i, j, n = stack_vars_num;
945  unsigned char *phase;
946
947  phase = XNEWVEC (unsigned char, n);
948  for (i = 0; i < n; ++i)
949    phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
950
951  for (i = 0; i < n; ++i)
952    {
953      unsigned char ph_i = phase[i];
954      for (j = 0; j < i; ++j)
955	if (ph_i != phase[j])
956	  add_stack_var_conflict (i, j);
957    }
958
959  XDELETEVEC (phase);
960}
961
962/* Create a decl for the guard at the top of the stack frame.  */
963
964static void
965create_stack_guard (bool protect)
966{
967  tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
968  TREE_THIS_VOLATILE (guard) = 1;
969  TREE_USED (guard) = 1;
970  expand_one_stack_var (guard);
971  if (protect)
972    cfun->stack_protect_guard = guard;
973}
974
975/* Helper routine to check if a record or union contains an array field. */
976
977static int
978record_or_union_type_has_array_p (tree tree_type)
979{
980  tree fields = TYPE_FIELDS (tree_type);
981  tree f;
982
983  for (f = fields; f; f = TREE_CHAIN (f))
984    if (TREE_CODE (f) == FIELD_DECL)
985      {
986	tree field_type = TREE_TYPE (f);
987	if ((TREE_CODE (field_type) == RECORD_TYPE
988	     || TREE_CODE (field_type) == UNION_TYPE
989	     || TREE_CODE (field_type) == QUAL_UNION_TYPE)
990	    && record_or_union_type_has_array_p (field_type))
991	  return 1;
992	if (TREE_CODE (field_type) == ARRAY_TYPE)
993	  return 1;
994      }
995  return 0;
996}
997
998/* Expand all variables used in the function.  */
999
1000static void
1001expand_used_vars (void)
1002{
1003  tree t, outer_block = DECL_INITIAL (current_function_decl);
1004  bool gen_stack_protect_signal = false;
1005
1006  /* Compute the phase of the stack frame for this function.  */
1007  {
1008    int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1009    int off = STARTING_FRAME_OFFSET % align;
1010    frame_phase = off ? align - off : 0;
1011  }
1012
1013  /* Set TREE_USED on all variables in the unexpanded_var_list.  */
1014  for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1015    TREE_USED (TREE_VALUE (t)) = 1;
1016
1017  /* Clear TREE_USED on all variables associated with a block scope.  */
1018  clear_tree_used (outer_block);
1019
1020  /* Initialize local stack smashing state.  */
1021  has_protected_decls = false;
1022  has_short_buffer = false;
1023
1024  if (flag_stack_protect == SPCT_FLAG_STRONG)
1025    for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1026      {
1027	tree var = TREE_VALUE (t);
1028	if (!is_global_var (var))
1029	  {
1030	    tree var_type = TREE_TYPE (var);
1031	    /* Examine local referenced variables that have their addresses
1032	     * taken, contain an array, or are arrays. */
1033	    if (TREE_CODE (var) == VAR_DECL
1034		&& (TREE_CODE (var_type) == ARRAY_TYPE
1035		    || TREE_ADDRESSABLE (var)
1036		    || ((TREE_CODE (var_type) == RECORD_TYPE
1037			 || TREE_CODE (var_type) == UNION_TYPE
1038			 || TREE_CODE (var_type) == QUAL_UNION_TYPE)
1039			&& record_or_union_type_has_array_p (var_type))))
1040	      {
1041		gen_stack_protect_signal = true;
1042		break;
1043	      }
1044	  }
1045      }
1046
1047  /* At this point all variables on the unexpanded_var_list with TREE_USED
1048     set are not associated with any block scope.  Lay them out.  */
1049  for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1050    {
1051      tree var = TREE_VALUE (t);
1052      bool expand_now = false;
1053
1054      /* We didn't set a block for static or extern because it's hard
1055	 to tell the difference between a global variable (re)declared
1056	 in a local scope, and one that's really declared there to
1057	 begin with.  And it doesn't really matter much, since we're
1058	 not giving them stack space.  Expand them now.  */
1059      if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1060	expand_now = true;
1061
1062      /* Any variable that could have been hoisted into an SSA_NAME
1063	 will have been propagated anywhere the optimizers chose,
1064	 i.e. not confined to their original block.  Allocate them
1065	 as if they were defined in the outermost scope.  */
1066      else if (is_gimple_reg (var))
1067	expand_now = true;
1068
1069      /* If the variable is not associated with any block, then it
1070	 was created by the optimizers, and could be live anywhere
1071	 in the function.  */
1072      else if (TREE_USED (var))
1073	expand_now = true;
1074
1075      /* Finally, mark all variables on the list as used.  We'll use
1076	 this in a moment when we expand those associated with scopes.  */
1077      TREE_USED (var) = 1;
1078
1079      if (expand_now)
1080	expand_one_var (var, true);
1081    }
1082  cfun->unexpanded_var_list = NULL_TREE;
1083
1084  /* At this point, all variables within the block tree with TREE_USED
1085     set are actually used by the optimized function.  Lay them out.  */
1086  expand_used_vars_for_block (outer_block, true);
1087
1088  if (stack_vars_num > 0)
1089    {
1090      /* Due to the way alias sets work, no variables with non-conflicting
1091	 alias sets may be assigned the same address.  Add conflicts to
1092	 reflect this.  */
1093      add_alias_set_conflicts ();
1094
1095      /* If stack protection is enabled, we don't share space between
1096	 vulnerable data and non-vulnerable data.  */
1097      if (flag_stack_protect)
1098	add_stack_protection_conflicts ();
1099
1100      /* Now that we have collected all stack variables, and have computed a
1101	 minimal interference graph, attempt to save some stack space.  */
1102      partition_stack_vars ();
1103      if (dump_file)
1104	dump_stack_var_partition ();
1105    }
1106
1107  switch (flag_stack_protect)
1108    {
1109    case SPCT_FLAG_ALL:
1110      create_stack_guard (true);
1111      break;
1112
1113    case SPCT_FLAG_STRONG:
1114      create_stack_guard (gen_stack_protect_signal
1115	  || current_function_calls_alloca || has_protected_decls);
1116      break;
1117
1118    case SPCT_FLAG_DEFAULT:
1119      create_stack_guard(current_function_calls_alloca || has_protected_decls);
1120      break;
1121
1122    default:
1123      ;
1124    }
1125
1126  /* Assign rtl to each variable based on these partitions.  */
1127  if (stack_vars_num > 0)
1128    {
1129      if (!flag_stack_shuffle)
1130	{
1131	  /* Reorder decls to be protected by iterating over the variables
1132	     array multiple times, and allocating out of each phase in turn.  */
1133	  /* ??? We could probably integrate this into the qsort we did
1134	     earlier, such that we naturally see these variables first,
1135	     and thus naturally allocate things in the right order.  */
1136	  if (has_protected_decls)
1137	    {
1138	      /* Phase 1 contains only character arrays.  */
1139	      expand_stack_vars (stack_protect_decl_phase_1);
1140
1141	      /* Phase 2 contains other kinds of arrays.  */
1142	      if (flag_stack_protect == 2)
1143		expand_stack_vars (stack_protect_decl_phase_2);
1144	    }
1145	}
1146
1147      expand_stack_vars (NULL);
1148
1149      /* Free up stack variable graph data.  */
1150      XDELETEVEC (stack_vars);
1151      XDELETEVEC (stack_vars_sorted);
1152      XDELETEVEC (stack_vars_conflict);
1153      stack_vars = NULL;
1154      stack_vars_alloc = stack_vars_num = 0;
1155      stack_vars_conflict = NULL;
1156      stack_vars_conflict_alloc = 0;
1157    }
1158
1159  /* If the target requires that FRAME_OFFSET be aligned, do it.  */
1160  if (STACK_ALIGNMENT_NEEDED)
1161    {
1162      HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1163      if (!FRAME_GROWS_DOWNWARD)
1164	frame_offset += align - 1;
1165      frame_offset &= -align;
1166    }
1167}
1168
1169
1170/* If we need to produce a detailed dump, print the tree representation
1171   for STMT to the dump file.  SINCE is the last RTX after which the RTL
1172   generated for STMT should have been appended.  */
1173
1174static void
1175maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1176{
1177  if (dump_file && (dump_flags & TDF_DETAILS))
1178    {
1179      fprintf (dump_file, "\n;; ");
1180      print_generic_expr (dump_file, stmt, TDF_SLIM);
1181      fprintf (dump_file, "\n");
1182
1183      print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1184    }
1185}
1186
1187/* A subroutine of expand_gimple_basic_block.  Expand one COND_EXPR.
1188   Returns a new basic block if we've terminated the current basic
1189   block and created a new one.  */
1190
1191static basic_block
1192expand_gimple_cond_expr (basic_block bb, tree stmt)
1193{
1194  basic_block new_bb, dest;
1195  edge new_edge;
1196  edge true_edge;
1197  edge false_edge;
1198  tree pred = COND_EXPR_COND (stmt);
1199  tree then_exp = COND_EXPR_THEN (stmt);
1200  tree else_exp = COND_EXPR_ELSE (stmt);
1201  rtx last2, last;
1202
1203  last2 = last = get_last_insn ();
1204
1205  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1206  if (EXPR_LOCUS (stmt))
1207    {
1208      emit_line_note (*(EXPR_LOCUS (stmt)));
1209      record_block_change (TREE_BLOCK (stmt));
1210    }
1211
1212  /* These flags have no purpose in RTL land.  */
1213  true_edge->flags &= ~EDGE_TRUE_VALUE;
1214  false_edge->flags &= ~EDGE_FALSE_VALUE;
1215
1216  /* We can either have a pure conditional jump with one fallthru edge or
1217     two-way jump that needs to be decomposed into two basic blocks.  */
1218  if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1219    {
1220      jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1221      add_reg_br_prob_note (last, true_edge->probability);
1222      maybe_dump_rtl_for_tree_stmt (stmt, last);
1223      if (EXPR_LOCUS (then_exp))
1224	emit_line_note (*(EXPR_LOCUS (then_exp)));
1225      return NULL;
1226    }
1227  if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1228    {
1229      jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
1230      add_reg_br_prob_note (last, false_edge->probability);
1231      maybe_dump_rtl_for_tree_stmt (stmt, last);
1232      if (EXPR_LOCUS (else_exp))
1233	emit_line_note (*(EXPR_LOCUS (else_exp)));
1234      return NULL;
1235    }
1236  gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1237	      && TREE_CODE (else_exp) == GOTO_EXPR);
1238
1239  jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1240  add_reg_br_prob_note (last, true_edge->probability);
1241  last = get_last_insn ();
1242  expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1243
1244  BB_END (bb) = last;
1245  if (BARRIER_P (BB_END (bb)))
1246    BB_END (bb) = PREV_INSN (BB_END (bb));
1247  update_bb_for_insn (bb);
1248
1249  new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1250  dest = false_edge->dest;
1251  redirect_edge_succ (false_edge, new_bb);
1252  false_edge->flags |= EDGE_FALLTHRU;
1253  new_bb->count = false_edge->count;
1254  new_bb->frequency = EDGE_FREQUENCY (false_edge);
1255  new_edge = make_edge (new_bb, dest, 0);
1256  new_edge->probability = REG_BR_PROB_BASE;
1257  new_edge->count = new_bb->count;
1258  if (BARRIER_P (BB_END (new_bb)))
1259    BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1260  update_bb_for_insn (new_bb);
1261
1262  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1263
1264  if (EXPR_LOCUS (else_exp))
1265    emit_line_note (*(EXPR_LOCUS (else_exp)));
1266
1267  return new_bb;
1268}
1269
1270/* A subroutine of expand_gimple_basic_block.  Expand one CALL_EXPR
1271   that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
1272   generated a tail call (something that might be denied by the ABI
1273   rules governing the call; see calls.c).
1274
1275   Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1276   can still reach the rest of BB.  The case here is __builtin_sqrt,
1277   where the NaN result goes through the external function (with a
1278   tailcall) and the normal result happens via a sqrt instruction.  */
1279
1280static basic_block
1281expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1282{
1283  rtx last2, last;
1284  edge e;
1285  edge_iterator ei;
1286  int probability;
1287  gcov_type count;
1288
1289  last2 = last = get_last_insn ();
1290
1291  expand_expr_stmt (stmt);
1292
1293  for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1294    if (CALL_P (last) && SIBLING_CALL_P (last))
1295      goto found;
1296
1297  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1298
1299  *can_fallthru = true;
1300  return NULL;
1301
1302 found:
1303  /* ??? Wouldn't it be better to just reset any pending stack adjust?
1304     Any instructions emitted here are about to be deleted.  */
1305  do_pending_stack_adjust ();
1306
1307  /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
1308  /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
1309     EH or abnormal edges, we shouldn't have created a tail call in
1310     the first place.  So it seems to me we should just be removing
1311     all edges here, or redirecting the existing fallthru edge to
1312     the exit block.  */
1313
1314  probability = 0;
1315  count = 0;
1316
1317  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1318    {
1319      if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1320	{
1321	  if (e->dest != EXIT_BLOCK_PTR)
1322	    {
1323	      e->dest->count -= e->count;
1324	      e->dest->frequency -= EDGE_FREQUENCY (e);
1325	      if (e->dest->count < 0)
1326		e->dest->count = 0;
1327	      if (e->dest->frequency < 0)
1328		e->dest->frequency = 0;
1329	    }
1330	  count += e->count;
1331	  probability += e->probability;
1332	  remove_edge (e);
1333	}
1334      else
1335	ei_next (&ei);
1336    }
1337
1338  /* This is somewhat ugly: the call_expr expander often emits instructions
1339     after the sibcall (to perform the function return).  These confuse the
1340     find_many_sub_basic_blocks code, so we need to get rid of these.  */
1341  last = NEXT_INSN (last);
1342  gcc_assert (BARRIER_P (last));
1343
1344  *can_fallthru = false;
1345  while (NEXT_INSN (last))
1346    {
1347      /* For instance an sqrt builtin expander expands if with
1348	 sibcall in the then and label for `else`.  */
1349      if (LABEL_P (NEXT_INSN (last)))
1350	{
1351	  *can_fallthru = true;
1352	  break;
1353	}
1354      delete_insn (NEXT_INSN (last));
1355    }
1356
1357  e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1358  e->probability += probability;
1359  e->count += count;
1360  BB_END (bb) = last;
1361  update_bb_for_insn (bb);
1362
1363  if (NEXT_INSN (last))
1364    {
1365      bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1366
1367      last = BB_END (bb);
1368      if (BARRIER_P (last))
1369	BB_END (bb) = PREV_INSN (last);
1370    }
1371
1372  maybe_dump_rtl_for_tree_stmt (stmt, last2);
1373
1374  return bb;
1375}
1376
1377/* Expand basic block BB from GIMPLE trees to RTL.  */
1378
1379static basic_block
1380expand_gimple_basic_block (basic_block bb)
1381{
1382  block_stmt_iterator bsi = bsi_start (bb);
1383  tree stmt = NULL;
1384  rtx note, last;
1385  edge e;
1386  edge_iterator ei;
1387
1388  if (dump_file)
1389    {
1390      fprintf (dump_file,
1391	       "\n;; Generating RTL for tree basic block %d\n",
1392	       bb->index);
1393    }
1394
1395  init_rtl_bb_info (bb);
1396  bb->flags |= BB_RTL;
1397
1398  if (!bsi_end_p (bsi))
1399    stmt = bsi_stmt (bsi);
1400
1401  if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1402    {
1403      last = get_last_insn ();
1404
1405      expand_expr_stmt (stmt);
1406
1407      /* Java emits line number notes in the top of labels.
1408	 ??? Make this go away once line number notes are obsoleted.  */
1409      BB_HEAD (bb) = NEXT_INSN (last);
1410      if (NOTE_P (BB_HEAD (bb)))
1411	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1412      bsi_next (&bsi);
1413      note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1414
1415      maybe_dump_rtl_for_tree_stmt (stmt, last);
1416    }
1417  else
1418    note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1419
1420  NOTE_BASIC_BLOCK (note) = bb;
1421
1422  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1423    {
1424      /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
1425      e->flags &= ~EDGE_EXECUTABLE;
1426
1427      /* At the moment not all abnormal edges match the RTL representation.
1428	 It is safe to remove them here as find_many_sub_basic_blocks will
1429	 rediscover them.  In the future we should get this fixed properly.  */
1430      if (e->flags & EDGE_ABNORMAL)
1431	remove_edge (e);
1432      else
1433	ei_next (&ei);
1434    }
1435
1436  for (; !bsi_end_p (bsi); bsi_next (&bsi))
1437    {
1438      tree stmt = bsi_stmt (bsi);
1439      basic_block new_bb;
1440
1441      if (!stmt)
1442	continue;
1443
1444      /* Expand this statement, then evaluate the resulting RTL and
1445	 fixup the CFG accordingly.  */
1446      if (TREE_CODE (stmt) == COND_EXPR)
1447	{
1448	  new_bb = expand_gimple_cond_expr (bb, stmt);
1449	  if (new_bb)
1450	    return new_bb;
1451	}
1452      else
1453	{
1454	  tree call = get_call_expr_in (stmt);
1455	  if (call && CALL_EXPR_TAILCALL (call))
1456	    {
1457	      bool can_fallthru;
1458	      new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1459	      if (new_bb)
1460		{
1461		  if (can_fallthru)
1462		    bb = new_bb;
1463		  else
1464		    return new_bb;
1465		}
1466	    }
1467	  else
1468	    {
1469	      last = get_last_insn ();
1470	      expand_expr_stmt (stmt);
1471	      maybe_dump_rtl_for_tree_stmt (stmt, last);
1472	    }
1473	}
1474    }
1475
1476  do_pending_stack_adjust ();
1477
1478  /* Find the block tail.  The last insn in the block is the insn
1479     before a barrier and/or table jump insn.  */
1480  last = get_last_insn ();
1481  if (BARRIER_P (last))
1482    last = PREV_INSN (last);
1483  if (JUMP_TABLE_DATA_P (last))
1484    last = PREV_INSN (PREV_INSN (last));
1485  BB_END (bb) = last;
1486
1487  update_bb_for_insn (bb);
1488
1489  return bb;
1490}
1491
1492
1493/* Create a basic block for initialization code.  */
1494
1495static basic_block
1496construct_init_block (void)
1497{
1498  basic_block init_block, first_block;
1499  edge e = NULL;
1500  int flags;
1501
1502  /* Multiple entry points not supported yet.  */
1503  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1504  init_rtl_bb_info (ENTRY_BLOCK_PTR);
1505  init_rtl_bb_info (EXIT_BLOCK_PTR);
1506  ENTRY_BLOCK_PTR->flags |= BB_RTL;
1507  EXIT_BLOCK_PTR->flags |= BB_RTL;
1508
1509  e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1510
1511  /* When entry edge points to first basic block, we don't need jump,
1512     otherwise we have to jump into proper target.  */
1513  if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1514    {
1515      tree label = tree_block_label (e->dest);
1516
1517      emit_jump (label_rtx (label));
1518      flags = 0;
1519    }
1520  else
1521    flags = EDGE_FALLTHRU;
1522
1523  init_block = create_basic_block (NEXT_INSN (get_insns ()),
1524				   get_last_insn (),
1525				   ENTRY_BLOCK_PTR);
1526  init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1527  init_block->count = ENTRY_BLOCK_PTR->count;
1528  if (e)
1529    {
1530      first_block = e->dest;
1531      redirect_edge_succ (e, init_block);
1532      e = make_edge (init_block, first_block, flags);
1533    }
1534  else
1535    e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1536  e->probability = REG_BR_PROB_BASE;
1537  e->count = ENTRY_BLOCK_PTR->count;
1538
1539  update_bb_for_insn (init_block);
1540  return init_block;
1541}
1542
1543
1544/* Create a block containing landing pads and similar stuff.  */
1545
1546static void
1547construct_exit_block (void)
1548{
1549  rtx head = get_last_insn ();
1550  rtx end;
1551  basic_block exit_block;
1552  edge e, e2;
1553  unsigned ix;
1554  edge_iterator ei;
1555
1556  /* Make sure the locus is set to the end of the function, so that
1557     epilogue line numbers and warnings are set properly.  */
1558#ifdef USE_MAPPED_LOCATION
1559  if (cfun->function_end_locus != UNKNOWN_LOCATION)
1560#else
1561  if (cfun->function_end_locus.file)
1562#endif
1563    input_location = cfun->function_end_locus;
1564
1565  /* The following insns belong to the top scope.  */
1566  record_block_change (DECL_INITIAL (current_function_decl));
1567
1568  /* Generate rtl for function exit.  */
1569  expand_function_end ();
1570
1571  end = get_last_insn ();
1572  if (head == end)
1573    return;
1574  while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1575    head = NEXT_INSN (head);
1576  exit_block = create_basic_block (NEXT_INSN (head), end,
1577				   EXIT_BLOCK_PTR->prev_bb);
1578  exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1579  exit_block->count = EXIT_BLOCK_PTR->count;
1580
1581  ix = 0;
1582  while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1583    {
1584      e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1585      if (!(e->flags & EDGE_ABNORMAL))
1586	redirect_edge_succ (e, exit_block);
1587      else
1588	ix++;
1589    }
1590
1591  e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1592  e->probability = REG_BR_PROB_BASE;
1593  e->count = EXIT_BLOCK_PTR->count;
1594  FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1595    if (e2 != e)
1596      {
1597	e->count -= e2->count;
1598	exit_block->count -= e2->count;
1599	exit_block->frequency -= EDGE_FREQUENCY (e2);
1600      }
1601  if (e->count < 0)
1602    e->count = 0;
1603  if (exit_block->count < 0)
1604    exit_block->count = 0;
1605  if (exit_block->frequency < 0)
1606    exit_block->frequency = 0;
1607  update_bb_for_insn (exit_block);
1608}
1609
1610/* Helper function for discover_nonconstant_array_refs.
1611   Look for ARRAY_REF nodes with non-constant indexes and mark them
1612   addressable.  */
1613
1614static tree
1615discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1616				   void *data ATTRIBUTE_UNUSED)
1617{
1618  tree t = *tp;
1619
1620  if (IS_TYPE_OR_DECL_P (t))
1621    *walk_subtrees = 0;
1622  else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1623    {
1624      while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1625	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1626	      && (!TREE_OPERAND (t, 2)
1627		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1628	     || (TREE_CODE (t) == COMPONENT_REF
1629		 && (!TREE_OPERAND (t,2)
1630		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1631	     || TREE_CODE (t) == BIT_FIELD_REF
1632	     || TREE_CODE (t) == REALPART_EXPR
1633	     || TREE_CODE (t) == IMAGPART_EXPR
1634	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1635	     || TREE_CODE (t) == NOP_EXPR
1636	     || TREE_CODE (t) == CONVERT_EXPR)
1637	t = TREE_OPERAND (t, 0);
1638
1639      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1640	{
1641	  t = get_base_address (t);
1642	  if (t && DECL_P (t))
1643	    TREE_ADDRESSABLE (t) = 1;
1644	}
1645
1646      *walk_subtrees = 0;
1647    }
1648
1649  return NULL_TREE;
1650}
1651
1652/* RTL expansion is not able to compile array references with variable
1653   offsets for arrays stored in single register.  Discover such
1654   expressions and mark variables as addressable to avoid this
1655   scenario.  */
1656
1657static void
1658discover_nonconstant_array_refs (void)
1659{
1660  basic_block bb;
1661  block_stmt_iterator bsi;
1662
1663  FOR_EACH_BB (bb)
1664    {
1665      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1666	walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1667		   NULL , NULL);
1668    }
1669}
1670
1671/* Translate the intermediate representation contained in the CFG
1672   from GIMPLE trees to RTL.
1673
1674   We do conversion per basic block and preserve/update the tree CFG.
1675   This implies we have to do some magic as the CFG can simultaneously
1676   consist of basic blocks containing RTL and GIMPLE trees.  This can
1677   confuse the CFG hooks, so be careful to not manipulate CFG during
1678   the expansion.  */
1679
1680static unsigned int
1681tree_expand_cfg (void)
1682{
1683  basic_block bb, init_block;
1684  sbitmap blocks;
1685  edge_iterator ei;
1686  edge e;
1687
1688  /* Some backends want to know that we are expanding to RTL.  */
1689  currently_expanding_to_rtl = 1;
1690
1691  /* Prepare the rtl middle end to start recording block changes.  */
1692  reset_block_changes ();
1693
1694  /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
1695  discover_nonconstant_array_refs ();
1696
1697  /* Expand the variables recorded during gimple lowering.  */
1698  expand_used_vars ();
1699
1700  /* Honor stack protection warnings.  */
1701  if (warn_stack_protect)
1702    {
1703      if (current_function_calls_alloca)
1704	warning (0, "not protecting local variables: variable length buffer");
1705      if (has_short_buffer && !cfun->stack_protect_guard)
1706	warning (0, "not protecting function: no buffer at least %d bytes long",
1707		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1708    }
1709
1710  /* Set up parameters and prepare for return, for the function.  */
1711  expand_function_start (current_function_decl);
1712
1713  /* If this function is `main', emit a call to `__main'
1714     to run global initializers, etc.  */
1715  if (DECL_NAME (current_function_decl)
1716      && MAIN_NAME_P (DECL_NAME (current_function_decl))
1717      && DECL_FILE_SCOPE_P (current_function_decl))
1718    expand_main_function ();
1719
1720  /* Initialize the stack_protect_guard field.  This must happen after the
1721     call to __main (if any) so that the external decl is initialized.  */
1722  if (cfun->stack_protect_guard)
1723    stack_protect_prologue ();
1724
1725  /* Register rtl specific functions for cfg.  */
1726  rtl_register_cfg_hooks ();
1727
1728  init_block = construct_init_block ();
1729
1730  /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
1731     remaining edges in expand_gimple_basic_block.  */
1732  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1733    e->flags &= ~EDGE_EXECUTABLE;
1734
1735  FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1736    bb = expand_gimple_basic_block (bb);
1737
1738  construct_exit_block ();
1739
1740  /* We're done expanding trees to RTL.  */
1741  currently_expanding_to_rtl = 0;
1742
1743  /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1744     EH regions.  */
1745  convert_from_eh_region_ranges ();
1746
1747  rebuild_jump_labels (get_insns ());
1748  find_exception_handler_labels ();
1749
1750  blocks = sbitmap_alloc (last_basic_block);
1751  sbitmap_ones (blocks);
1752  find_many_sub_basic_blocks (blocks);
1753  purge_all_dead_edges ();
1754  sbitmap_free (blocks);
1755
1756  compact_blocks ();
1757#ifdef ENABLE_CHECKING
1758  verify_flow_info();
1759#endif
1760
1761  /* There's no need to defer outputting this function any more; we
1762     know we want to output it.  */
1763  DECL_DEFER_OUTPUT (current_function_decl) = 0;
1764
1765  /* Now that we're done expanding trees to RTL, we shouldn't have any
1766     more CONCATs anywhere.  */
1767  generating_concat_p = 0;
1768
1769  finalize_block_changes ();
1770
1771  if (dump_file)
1772    {
1773      fprintf (dump_file,
1774	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1775      /* And the pass manager will dump RTL for us.  */
1776    }
1777
1778  /* If we're emitting a nested function, make sure its parent gets
1779     emitted as well.  Doing otherwise confuses debug info.  */
1780  {
1781    tree parent;
1782    for (parent = DECL_CONTEXT (current_function_decl);
1783	 parent != NULL_TREE;
1784	 parent = get_containing_scope (parent))
1785      if (TREE_CODE (parent) == FUNCTION_DECL)
1786	TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1787  }
1788
1789  /* We are now committed to emitting code for this function.  Do any
1790     preparation, such as emitting abstract debug info for the inline
1791     before it gets mangled by optimization.  */
1792  if (cgraph_function_possibly_inlined_p (current_function_decl))
1793    (*debug_hooks->outlining_inline_function) (current_function_decl);
1794
1795  TREE_ASM_WRITTEN (current_function_decl) = 1;
1796
1797  /* After expanding, the return labels are no longer needed. */
1798  return_label = NULL;
1799  naked_return_label = NULL;
1800  return 0;
1801}
1802
1803struct tree_opt_pass pass_expand =
1804{
1805  "expand",				/* name */
1806  NULL,                                 /* gate */
1807  tree_expand_cfg,			/* execute */
1808  NULL,                                 /* sub */
1809  NULL,                                 /* next */
1810  0,                                    /* static_pass_number */
1811  TV_EXPAND,				/* tv_id */
1812  /* ??? If TER is enabled, we actually receive GENERIC.  */
1813  PROP_gimple_leh | PROP_cfg,           /* properties_required */
1814  PROP_rtl,                             /* properties_provided */
1815  PROP_trees,				/* properties_destroyed */
1816  0,                                    /* todo_flags_start */
1817  TODO_dump_func,                       /* todo_flags_finish */
1818  'r'					/* letter */
1819};
1820