1/* A pass for lowering trees to RTL.
2   Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "rtl.h"
25#include "hard-reg-set.h"
26#include "hash-set.h"
27#include "machmode.h"
28#include "vec.h"
29#include "double-int.h"
30#include "input.h"
31#include "alias.h"
32#include "symtab.h"
33#include "wide-int.h"
34#include "inchash.h"
35#include "tree.h"
36#include "fold-const.h"
37#include "stringpool.h"
38#include "varasm.h"
39#include "stor-layout.h"
40#include "stmt.h"
41#include "print-tree.h"
42#include "tm_p.h"
43#include "predict.h"
44#include "hashtab.h"
45#include "function.h"
46#include "dominance.h"
47#include "cfg.h"
48#include "cfgrtl.h"
49#include "cfganal.h"
50#include "cfgbuild.h"
51#include "cfgcleanup.h"
52#include "basic-block.h"
53#include "insn-codes.h"
54#include "optabs.h"
55#include "flags.h"
56#include "statistics.h"
57#include "real.h"
58#include "fixed-value.h"
59#include "insn-config.h"
60#include "expmed.h"
61#include "dojump.h"
62#include "explow.h"
63#include "calls.h"
64#include "emit-rtl.h"
65#include "expr.h"
66#include "langhooks.h"
67#include "bitmap.h"
68#include "tree-ssa-alias.h"
69#include "internal-fn.h"
70#include "tree-eh.h"
71#include "gimple-expr.h"
72#include "is-a.h"
73#include "gimple.h"
74#include "gimple-iterator.h"
75#include "gimple-walk.h"
76#include "gimple-ssa.h"
77#include "hash-map.h"
78#include "plugin-api.h"
79#include "ipa-ref.h"
80#include "cgraph.h"
81#include "tree-cfg.h"
82#include "tree-phinodes.h"
83#include "ssa-iterators.h"
84#include "tree-ssanames.h"
85#include "tree-dfa.h"
86#include "tree-ssa.h"
87#include "tree-pass.h"
88#include "except.h"
89#include "diagnostic.h"
90#include "gimple-pretty-print.h"
91#include "toplev.h"
92#include "debug.h"
93#include "params.h"
94#include "tree-inline.h"
95#include "value-prof.h"
96#include "target.h"
97#include "tree-ssa-live.h"
98#include "tree-outof-ssa.h"
99#include "sbitmap.h"
100#include "cfgloop.h"
101#include "regs.h" /* For reg_renumber.  */
102#include "insn-attr.h" /* For INSN_SCHEDULING.  */
103#include "asan.h"
104#include "tree-ssa-address.h"
105#include "recog.h"
106#include "output.h"
107#include "builtins.h"
108#include "tree-chkp.h"
109#include "rtl-chkp.h"
110
111/* Some systems use __main in a way incompatible with its use in gcc, in these
112   cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
113   give the same symbol without quotes for an alternative entry point.  You
114   must define both, or neither.  */
115#ifndef NAME__MAIN
116#define NAME__MAIN "__main"
117#endif
118
119/* This variable holds information helping the rewriting of SSA trees
120   into RTL.  */
121struct ssaexpand SA;
122
123/* This variable holds the currently expanded gimple statement for purposes
124   of comminucating the profile info to the builtin expanders.  */
125gimple currently_expanding_gimple_stmt;
126
127static rtx expand_debug_expr (tree);
128
129/* Return an expression tree corresponding to the RHS of GIMPLE
130   statement STMT.  */
131
132tree
133gimple_assign_rhs_to_tree (gimple stmt)
134{
135  tree t;
136  enum gimple_rhs_class grhs_class;
137
138  grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
139
140  if (grhs_class == GIMPLE_TERNARY_RHS)
141    t = build3 (gimple_assign_rhs_code (stmt),
142		TREE_TYPE (gimple_assign_lhs (stmt)),
143		gimple_assign_rhs1 (stmt),
144		gimple_assign_rhs2 (stmt),
145		gimple_assign_rhs3 (stmt));
146  else if (grhs_class == GIMPLE_BINARY_RHS)
147    t = build2 (gimple_assign_rhs_code (stmt),
148		TREE_TYPE (gimple_assign_lhs (stmt)),
149		gimple_assign_rhs1 (stmt),
150		gimple_assign_rhs2 (stmt));
151  else if (grhs_class == GIMPLE_UNARY_RHS)
152    t = build1 (gimple_assign_rhs_code (stmt),
153		TREE_TYPE (gimple_assign_lhs (stmt)),
154		gimple_assign_rhs1 (stmt));
155  else if (grhs_class == GIMPLE_SINGLE_RHS)
156    {
157      t = gimple_assign_rhs1 (stmt);
158      /* Avoid modifying this tree in place below.  */
159      if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
160	   && gimple_location (stmt) != EXPR_LOCATION (t))
161	  || (gimple_block (stmt)
162	      && currently_expanding_to_rtl
163	      && EXPR_P (t)))
164	t = copy_node (t);
165    }
166  else
167    gcc_unreachable ();
168
169  if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
170    SET_EXPR_LOCATION (t, gimple_location (stmt));
171
172  return t;
173}
174
175
176#ifndef STACK_ALIGNMENT_NEEDED
177#define STACK_ALIGNMENT_NEEDED 1
178#endif
179
180#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
181
182/* Associate declaration T with storage space X.  If T is no
183   SSA name this is exactly SET_DECL_RTL, otherwise make the
184   partition of T associated with X.  */
185static inline void
186set_rtl (tree t, rtx x)
187{
188  if (TREE_CODE (t) == SSA_NAME)
189    {
190      SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
191      if (x && !MEM_P (x))
192	set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
193      /* For the benefit of debug information at -O0 (where vartracking
194         doesn't run) record the place also in the base DECL if it's
195	 a normal variable (not a parameter).  */
196      if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
197	{
198	  tree var = SSA_NAME_VAR (t);
199	  /* If we don't yet have something recorded, just record it now.  */
200	  if (!DECL_RTL_SET_P (var))
201	    SET_DECL_RTL (var, x);
202	  /* If we have it set already to "multiple places" don't
203	     change this.  */
204	  else if (DECL_RTL (var) == pc_rtx)
205	    ;
206	  /* If we have something recorded and it's not the same place
207	     as we want to record now, we have multiple partitions for the
208	     same base variable, with different places.  We can't just
209	     randomly chose one, hence we have to say that we don't know.
210	     This only happens with optimization, and there var-tracking
211	     will figure out the right thing.  */
212	  else if (DECL_RTL (var) != x)
213	    SET_DECL_RTL (var, pc_rtx);
214	}
215    }
216  else
217    SET_DECL_RTL (t, x);
218}
219
220/* This structure holds data relevant to one variable that will be
221   placed in a stack slot.  */
222struct stack_var
223{
224  /* The Variable.  */
225  tree decl;
226
227  /* Initially, the size of the variable.  Later, the size of the partition,
228     if this variable becomes it's partition's representative.  */
229  HOST_WIDE_INT size;
230
231  /* The *byte* alignment required for this variable.  Or as, with the
232     size, the alignment for this partition.  */
233  unsigned int alignb;
234
235  /* The partition representative.  */
236  size_t representative;
237
238  /* The next stack variable in the partition, or EOC.  */
239  size_t next;
240
241  /* The numbers of conflicting stack variables.  */
242  bitmap conflicts;
243};
244
245#define EOC  ((size_t)-1)
246
247/* We have an array of such objects while deciding allocation.  */
248static struct stack_var *stack_vars;
249static size_t stack_vars_alloc;
250static size_t stack_vars_num;
251static hash_map<tree, size_t> *decl_to_stack_part;
252
253/* Conflict bitmaps go on this obstack.  This allows us to destroy
254   all of them in one big sweep.  */
255static bitmap_obstack stack_var_bitmap_obstack;
256
257/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
258   is non-decreasing.  */
259static size_t *stack_vars_sorted;
260
261/* The phase of the stack frame.  This is the known misalignment of
262   virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
263   (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
264static int frame_phase;
265
266/* Used during expand_used_vars to remember if we saw any decls for
267   which we'd like to enable stack smashing protection.  */
268static bool has_protected_decls;
269
270/* Used during expand_used_vars.  Remember if we say a character buffer
271   smaller than our cutoff threshold.  Used for -Wstack-protector.  */
272static bool has_short_buffer;
273
274/* Compute the byte alignment to use for DECL.  Ignore alignment
275   we can't do with expected alignment of the stack boundary.  */
276
277static unsigned int
278align_local_variable (tree decl)
279{
280  unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
281  DECL_ALIGN (decl) = align;
282  return align / BITS_PER_UNIT;
283}
284
285/* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
286   down otherwise.  Return truncated BASE value.  */
287
288static inline unsigned HOST_WIDE_INT
289align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
290{
291  return align_up ? (base + align - 1) & -align : base & -align;
292}
293
294/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
295   Return the frame offset.  */
296
297static HOST_WIDE_INT
298alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
299{
300  HOST_WIDE_INT offset, new_frame_offset;
301
302  if (FRAME_GROWS_DOWNWARD)
303    {
304      new_frame_offset
305	= align_base (frame_offset - frame_phase - size,
306		      align, false) + frame_phase;
307      offset = new_frame_offset;
308    }
309  else
310    {
311      new_frame_offset
312	= align_base (frame_offset - frame_phase, align, true) + frame_phase;
313      offset = new_frame_offset;
314      new_frame_offset += size;
315    }
316  frame_offset = new_frame_offset;
317
318  if (frame_offset_overflow (frame_offset, cfun->decl))
319    frame_offset = offset = 0;
320
321  return offset;
322}
323
324/* Accumulate DECL into STACK_VARS.  */
325
326static void
327add_stack_var (tree decl)
328{
329  struct stack_var *v;
330
331  if (stack_vars_num >= stack_vars_alloc)
332    {
333      if (stack_vars_alloc)
334	stack_vars_alloc = stack_vars_alloc * 3 / 2;
335      else
336	stack_vars_alloc = 32;
337      stack_vars
338	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
339    }
340  if (!decl_to_stack_part)
341    decl_to_stack_part = new hash_map<tree, size_t>;
342
343  v = &stack_vars[stack_vars_num];
344  decl_to_stack_part->put (decl, stack_vars_num);
345
346  v->decl = decl;
347  v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
348  /* Ensure that all variables have size, so that &a != &b for any two
349     variables that are simultaneously live.  */
350  if (v->size == 0)
351    v->size = 1;
352  v->alignb = align_local_variable (SSAVAR (decl));
353  /* An alignment of zero can mightily confuse us later.  */
354  gcc_assert (v->alignb != 0);
355
356  /* All variables are initially in their own partition.  */
357  v->representative = stack_vars_num;
358  v->next = EOC;
359
360  /* All variables initially conflict with no other.  */
361  v->conflicts = NULL;
362
363  /* Ensure that this decl doesn't get put onto the list twice.  */
364  set_rtl (decl, pc_rtx);
365
366  stack_vars_num++;
367}
368
369/* Make the decls associated with luid's X and Y conflict.  */
370
371static void
372add_stack_var_conflict (size_t x, size_t y)
373{
374  struct stack_var *a = &stack_vars[x];
375  struct stack_var *b = &stack_vars[y];
376  if (!a->conflicts)
377    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
378  if (!b->conflicts)
379    b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
380  bitmap_set_bit (a->conflicts, y);
381  bitmap_set_bit (b->conflicts, x);
382}
383
384/* Check whether the decls associated with luid's X and Y conflict.  */
385
386static bool
387stack_var_conflict_p (size_t x, size_t y)
388{
389  struct stack_var *a = &stack_vars[x];
390  struct stack_var *b = &stack_vars[y];
391  if (x == y)
392    return false;
393  /* Partitions containing an SSA name result from gimple registers
394     with things like unsupported modes.  They are top-level and
395     hence conflict with everything else.  */
396  if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
397    return true;
398
399  if (!a->conflicts || !b->conflicts)
400    return false;
401  return bitmap_bit_p (a->conflicts, y);
402}
403
404/* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
405   enter its partition number into bitmap DATA.  */
406
407static bool
408visit_op (gimple, tree op, tree, void *data)
409{
410  bitmap active = (bitmap)data;
411  op = get_base_address (op);
412  if (op
413      && DECL_P (op)
414      && DECL_RTL_IF_SET (op) == pc_rtx)
415    {
416      size_t *v = decl_to_stack_part->get (op);
417      if (v)
418	bitmap_set_bit (active, *v);
419    }
420  return false;
421}
422
423/* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
424   record conflicts between it and all currently active other partitions
425   from bitmap DATA.  */
426
427static bool
428visit_conflict (gimple, tree op, tree, void *data)
429{
430  bitmap active = (bitmap)data;
431  op = get_base_address (op);
432  if (op
433      && DECL_P (op)
434      && DECL_RTL_IF_SET (op) == pc_rtx)
435    {
436      size_t *v = decl_to_stack_part->get (op);
437      if (v && bitmap_set_bit (active, *v))
438	{
439	  size_t num = *v;
440	  bitmap_iterator bi;
441	  unsigned i;
442	  gcc_assert (num < stack_vars_num);
443	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
444	    add_stack_var_conflict (num, i);
445	}
446    }
447  return false;
448}
449
450/* Helper routine for add_scope_conflicts, calculating the active partitions
451   at the end of BB, leaving the result in WORK.  We're called to generate
452   conflicts when FOR_CONFLICT is true, otherwise we're just tracking
453   liveness.  */
454
455static void
456add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
457{
458  edge e;
459  edge_iterator ei;
460  gimple_stmt_iterator gsi;
461  walk_stmt_load_store_addr_fn visit;
462
463  bitmap_clear (work);
464  FOR_EACH_EDGE (e, ei, bb->preds)
465    bitmap_ior_into (work, (bitmap)e->src->aux);
466
467  visit = visit_op;
468
469  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
470    {
471      gimple stmt = gsi_stmt (gsi);
472      walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
473    }
474  for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
475    {
476      gimple stmt = gsi_stmt (gsi);
477
478      if (gimple_clobber_p (stmt))
479	{
480	  tree lhs = gimple_assign_lhs (stmt);
481	  size_t *v;
482	  /* Nested function lowering might introduce LHSs
483	     that are COMPONENT_REFs.  */
484	  if (TREE_CODE (lhs) != VAR_DECL)
485	    continue;
486	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
487	      && (v = decl_to_stack_part->get (lhs)))
488	    bitmap_clear_bit (work, *v);
489	}
490      else if (!is_gimple_debug (stmt))
491	{
492	  if (for_conflict
493	      && visit == visit_op)
494	    {
495	      /* If this is the first real instruction in this BB we need
496	         to add conflicts for everything live at this point now.
497		 Unlike classical liveness for named objects we can't
498		 rely on seeing a def/use of the names we're interested in.
499		 There might merely be indirect loads/stores.  We'd not add any
500		 conflicts for such partitions.  */
501	      bitmap_iterator bi;
502	      unsigned i;
503	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
504		{
505		  struct stack_var *a = &stack_vars[i];
506		  if (!a->conflicts)
507		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
508		  bitmap_ior_into (a->conflicts, work);
509		}
510	      visit = visit_conflict;
511	    }
512	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
513	}
514    }
515}
516
517/* Generate stack partition conflicts between all partitions that are
518   simultaneously live.  */
519
520static void
521add_scope_conflicts (void)
522{
523  basic_block bb;
524  bool changed;
525  bitmap work = BITMAP_ALLOC (NULL);
526  int *rpo;
527  int n_bbs;
528
529  /* We approximate the live range of a stack variable by taking the first
530     mention of its name as starting point(s), and by the end-of-scope
531     death clobber added by gimplify as ending point(s) of the range.
532     This overapproximates in the case we for instance moved an address-taken
533     operation upward, without also moving a dereference to it upwards.
534     But it's conservatively correct as a variable never can hold values
535     before its name is mentioned at least once.
536
537     We then do a mostly classical bitmap liveness algorithm.  */
538
539  FOR_ALL_BB_FN (bb, cfun)
540    bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
541
542  rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
543  n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
544
545  changed = true;
546  while (changed)
547    {
548      int i;
549      changed = false;
550      for (i = 0; i < n_bbs; i++)
551	{
552	  bitmap active;
553	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
554	  active = (bitmap)bb->aux;
555	  add_scope_conflicts_1 (bb, work, false);
556	  if (bitmap_ior_into (active, work))
557	    changed = true;
558	}
559    }
560
561  FOR_EACH_BB_FN (bb, cfun)
562    add_scope_conflicts_1 (bb, work, true);
563
564  free (rpo);
565  BITMAP_FREE (work);
566  FOR_ALL_BB_FN (bb, cfun)
567    BITMAP_FREE (bb->aux);
568}
569
570/* A subroutine of partition_stack_vars.  A comparison function for qsort,
571   sorting an array of indices by the properties of the object.  */
572
573static int
574stack_var_cmp (const void *a, const void *b)
575{
576  size_t ia = *(const size_t *)a;
577  size_t ib = *(const size_t *)b;
578  unsigned int aligna = stack_vars[ia].alignb;
579  unsigned int alignb = stack_vars[ib].alignb;
580  HOST_WIDE_INT sizea = stack_vars[ia].size;
581  HOST_WIDE_INT sizeb = stack_vars[ib].size;
582  tree decla = stack_vars[ia].decl;
583  tree declb = stack_vars[ib].decl;
584  bool largea, largeb;
585  unsigned int uida, uidb;
586
587  /* Primary compare on "large" alignment.  Large comes first.  */
588  largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
589  largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
590  if (largea != largeb)
591    return (int)largeb - (int)largea;
592
593  /* Secondary compare on size, decreasing  */
594  if (sizea > sizeb)
595    return -1;
596  if (sizea < sizeb)
597    return 1;
598
599  /* Tertiary compare on true alignment, decreasing.  */
600  if (aligna < alignb)
601    return -1;
602  if (aligna > alignb)
603    return 1;
604
605  /* Final compare on ID for sort stability, increasing.
606     Two SSA names are compared by their version, SSA names come before
607     non-SSA names, and two normal decls are compared by their DECL_UID.  */
608  if (TREE_CODE (decla) == SSA_NAME)
609    {
610      if (TREE_CODE (declb) == SSA_NAME)
611	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
612      else
613	return -1;
614    }
615  else if (TREE_CODE (declb) == SSA_NAME)
616    return 1;
617  else
618    uida = DECL_UID (decla), uidb = DECL_UID (declb);
619  if (uida < uidb)
620    return 1;
621  if (uida > uidb)
622    return -1;
623  return 0;
624}
625
626struct part_traits : default_hashmap_traits
627{
628  template<typename T>
629    static bool
630    is_deleted (T &e)
631    { return e.m_value == reinterpret_cast<void *> (1); }
632
633  template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
634  template<typename T>
635    static void
636    mark_deleted (T &e)
637    { e.m_value = reinterpret_cast<T> (1); }
638
639  template<typename T>
640    static void
641    mark_empty (T &e)
642      { e.m_value = NULL; }
643};
644
645typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
646
647/* If the points-to solution *PI points to variables that are in a partition
648   together with other variables add all partition members to the pointed-to
649   variables bitmap.  */
650
651static void
652add_partitioned_vars_to_ptset (struct pt_solution *pt,
653			       part_hashmap *decls_to_partitions,
654			       hash_set<bitmap> *visited, bitmap temp)
655{
656  bitmap_iterator bi;
657  unsigned i;
658  bitmap *part;
659
660  if (pt->anything
661      || pt->vars == NULL
662      /* The pointed-to vars bitmap is shared, it is enough to
663	 visit it once.  */
664      || visited->add (pt->vars))
665    return;
666
667  bitmap_clear (temp);
668
669  /* By using a temporary bitmap to store all members of the partitions
670     we have to add we make sure to visit each of the partitions only
671     once.  */
672  EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
673    if ((!temp
674	 || !bitmap_bit_p (temp, i))
675	&& (part = decls_to_partitions->get (i)))
676      bitmap_ior_into (temp, *part);
677  if (!bitmap_empty_p (temp))
678    bitmap_ior_into (pt->vars, temp);
679}
680
681/* Update points-to sets based on partition info, so we can use them on RTL.
682   The bitmaps representing stack partitions will be saved until expand,
683   where partitioned decls used as bases in memory expressions will be
684   rewritten.  */
685
686static void
687update_alias_info_with_stack_vars (void)
688{
689  part_hashmap *decls_to_partitions = NULL;
690  size_t i, j;
691  tree var = NULL_TREE;
692
693  for (i = 0; i < stack_vars_num; i++)
694    {
695      bitmap part = NULL;
696      tree name;
697      struct ptr_info_def *pi;
698
699      /* Not interested in partitions with single variable.  */
700      if (stack_vars[i].representative != i
701          || stack_vars[i].next == EOC)
702        continue;
703
704      if (!decls_to_partitions)
705	{
706	  decls_to_partitions = new part_hashmap;
707	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
708	}
709
710      /* Create an SSA_NAME that points to the partition for use
711         as base during alias-oracle queries on RTL for bases that
712	 have been partitioned.  */
713      if (var == NULL_TREE)
714	var = create_tmp_var (ptr_type_node);
715      name = make_ssa_name (var);
716
717      /* Create bitmaps representing partitions.  They will be used for
718         points-to sets later, so use GGC alloc.  */
719      part = BITMAP_GGC_ALLOC ();
720      for (j = i; j != EOC; j = stack_vars[j].next)
721	{
722	  tree decl = stack_vars[j].decl;
723	  unsigned int uid = DECL_PT_UID (decl);
724	  bitmap_set_bit (part, uid);
725	  decls_to_partitions->put (uid, part);
726	  cfun->gimple_df->decls_to_pointers->put (decl, name);
727	  if (TREE_ADDRESSABLE (decl))
728	    TREE_ADDRESSABLE (name) = 1;
729	}
730
731      /* Make the SSA name point to all partition members.  */
732      pi = get_ptr_info (name);
733      pt_solution_set (&pi->pt, part, false);
734    }
735
736  /* Make all points-to sets that contain one member of a partition
737     contain all members of the partition.  */
738  if (decls_to_partitions)
739    {
740      unsigned i;
741      hash_set<bitmap> visited;
742      bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
743
744      for (i = 1; i < num_ssa_names; i++)
745	{
746	  tree name = ssa_name (i);
747	  struct ptr_info_def *pi;
748
749	  if (name
750	      && POINTER_TYPE_P (TREE_TYPE (name))
751	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
752	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
753					   &visited, temp);
754	}
755
756      add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
757				     decls_to_partitions, &visited, temp);
758
759      delete decls_to_partitions;
760      BITMAP_FREE (temp);
761    }
762}
763
764/* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
765   partitioning algorithm.  Partitions A and B are known to be non-conflicting.
766   Merge them into a single partition A.  */
767
768static void
769union_stack_vars (size_t a, size_t b)
770{
771  struct stack_var *vb = &stack_vars[b];
772  bitmap_iterator bi;
773  unsigned u;
774
775  gcc_assert (stack_vars[b].next == EOC);
776   /* Add B to A's partition.  */
777  stack_vars[b].next = stack_vars[a].next;
778  stack_vars[b].representative = a;
779  stack_vars[a].next = b;
780
781  /* Update the required alignment of partition A to account for B.  */
782  if (stack_vars[a].alignb < stack_vars[b].alignb)
783    stack_vars[a].alignb = stack_vars[b].alignb;
784
785  /* Update the interference graph and merge the conflicts.  */
786  if (vb->conflicts)
787    {
788      EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
789	add_stack_var_conflict (a, stack_vars[u].representative);
790      BITMAP_FREE (vb->conflicts);
791    }
792}
793
794/* A subroutine of expand_used_vars.  Binpack the variables into
795   partitions constrained by the interference graph.  The overall
796   algorithm used is as follows:
797
798	Sort the objects by size in descending order.
799	For each object A {
800	  S = size(A)
801	  O = 0
802	  loop {
803	    Look for the largest non-conflicting object B with size <= S.
804	    UNION (A, B)
805	  }
806	}
807*/
808
809static void
810partition_stack_vars (void)
811{
812  size_t si, sj, n = stack_vars_num;
813
814  stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
815  for (si = 0; si < n; ++si)
816    stack_vars_sorted[si] = si;
817
818  if (n == 1)
819    return;
820
821  qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
822
823  for (si = 0; si < n; ++si)
824    {
825      size_t i = stack_vars_sorted[si];
826      unsigned int ialign = stack_vars[i].alignb;
827      HOST_WIDE_INT isize = stack_vars[i].size;
828
829      /* Ignore objects that aren't partition representatives. If we
830         see a var that is not a partition representative, it must
831         have been merged earlier.  */
832      if (stack_vars[i].representative != i)
833        continue;
834
835      for (sj = si + 1; sj < n; ++sj)
836	{
837	  size_t j = stack_vars_sorted[sj];
838	  unsigned int jalign = stack_vars[j].alignb;
839	  HOST_WIDE_INT jsize = stack_vars[j].size;
840
841	  /* Ignore objects that aren't partition representatives.  */
842	  if (stack_vars[j].representative != j)
843	    continue;
844
845	  /* Do not mix objects of "small" (supported) alignment
846	     and "large" (unsupported) alignment.  */
847	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
848	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
849	    break;
850
851	  /* For Address Sanitizer do not mix objects with different
852	     sizes, as the shorter vars wouldn't be adequately protected.
853	     Don't do that for "large" (unsupported) alignment objects,
854	     those aren't protected anyway.  */
855	  if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
856	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
857	    break;
858
859	  /* Ignore conflicting objects.  */
860	  if (stack_var_conflict_p (i, j))
861	    continue;
862
863	  /* UNION the objects, placing J at OFFSET.  */
864	  union_stack_vars (i, j);
865	}
866    }
867
868  update_alias_info_with_stack_vars ();
869}
870
871/* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
872
873static void
874dump_stack_var_partition (void)
875{
876  size_t si, i, j, n = stack_vars_num;
877
878  for (si = 0; si < n; ++si)
879    {
880      i = stack_vars_sorted[si];
881
882      /* Skip variables that aren't partition representatives, for now.  */
883      if (stack_vars[i].representative != i)
884	continue;
885
886      fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
887	       " align %u\n", (unsigned long) i, stack_vars[i].size,
888	       stack_vars[i].alignb);
889
890      for (j = i; j != EOC; j = stack_vars[j].next)
891	{
892	  fputc ('\t', dump_file);
893	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
894	}
895      fputc ('\n', dump_file);
896    }
897}
898
899/* Assign rtl to DECL at BASE + OFFSET.  */
900
901static void
902expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
903			 HOST_WIDE_INT offset)
904{
905  unsigned align;
906  rtx x;
907
908  /* If this fails, we've overflowed the stack frame.  Error nicely?  */
909  gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
910
911  x = plus_constant (Pmode, base, offset);
912  x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
913
914  if (TREE_CODE (decl) != SSA_NAME)
915    {
916      /* Set alignment we actually gave this decl if it isn't an SSA name.
917         If it is we generate stack slots only accidentally so it isn't as
918	 important, we'll simply use the alignment that is already set.  */
919      if (base == virtual_stack_vars_rtx)
920	offset -= frame_phase;
921      align = offset & -offset;
922      align *= BITS_PER_UNIT;
923      if (align == 0 || align > base_align)
924	align = base_align;
925
926      /* One would think that we could assert that we're not decreasing
927	 alignment here, but (at least) the i386 port does exactly this
928	 via the MINIMUM_ALIGNMENT hook.  */
929
930      DECL_ALIGN (decl) = align;
931      DECL_USER_ALIGN (decl) = 0;
932    }
933
934  set_mem_attributes (x, SSAVAR (decl), true);
935  set_rtl (decl, x);
936}
937
938struct stack_vars_data
939{
940  /* Vector of offset pairs, always end of some padding followed
941     by start of the padding that needs Address Sanitizer protection.
942     The vector is in reversed, highest offset pairs come first.  */
943  vec<HOST_WIDE_INT> asan_vec;
944
945  /* Vector of partition representative decls in between the paddings.  */
946  vec<tree> asan_decl_vec;
947
948  /* Base pseudo register for Address Sanitizer protected automatic vars.  */
949  rtx asan_base;
950
951  /* Alignment needed for the Address Sanitizer protected automatic vars.  */
952  unsigned int asan_alignb;
953};
954
955/* A subroutine of expand_used_vars.  Give each partition representative
956   a unique location within the stack frame.  Update each partition member
957   with that location.  */
958
959static void
960expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
961{
962  size_t si, i, j, n = stack_vars_num;
963  HOST_WIDE_INT large_size = 0, large_alloc = 0;
964  rtx large_base = NULL;
965  unsigned large_align = 0;
966  tree decl;
967
968  /* Determine if there are any variables requiring "large" alignment.
969     Since these are dynamically allocated, we only process these if
970     no predicate involved.  */
971  large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
972  if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
973    {
974      /* Find the total size of these variables.  */
975      for (si = 0; si < n; ++si)
976	{
977	  unsigned alignb;
978
979	  i = stack_vars_sorted[si];
980	  alignb = stack_vars[i].alignb;
981
982	  /* All "large" alignment decls come before all "small" alignment
983	     decls, but "large" alignment decls are not sorted based on
984	     their alignment.  Increase large_align to track the largest
985	     required alignment.  */
986	  if ((alignb * BITS_PER_UNIT) > large_align)
987	    large_align = alignb * BITS_PER_UNIT;
988
989	  /* Stop when we get to the first decl with "small" alignment.  */
990	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
991	    break;
992
993	  /* Skip variables that aren't partition representatives.  */
994	  if (stack_vars[i].representative != i)
995	    continue;
996
997	  /* Skip variables that have already had rtl assigned.  See also
998	     add_stack_var where we perpetrate this pc_rtx hack.  */
999	  decl = stack_vars[i].decl;
1000	  if ((TREE_CODE (decl) == SSA_NAME
1001	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1002	      : DECL_RTL (decl)) != pc_rtx)
1003	    continue;
1004
1005	  large_size += alignb - 1;
1006	  large_size &= -(HOST_WIDE_INT)alignb;
1007	  large_size += stack_vars[i].size;
1008	}
1009
1010      /* If there were any, allocate space.  */
1011      if (large_size > 0)
1012	large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1013						   large_align, true);
1014    }
1015
1016  for (si = 0; si < n; ++si)
1017    {
1018      rtx base;
1019      unsigned base_align, alignb;
1020      HOST_WIDE_INT offset;
1021
1022      i = stack_vars_sorted[si];
1023
1024      /* Skip variables that aren't partition representatives, for now.  */
1025      if (stack_vars[i].representative != i)
1026	continue;
1027
1028      /* Skip variables that have already had rtl assigned.  See also
1029	 add_stack_var where we perpetrate this pc_rtx hack.  */
1030      decl = stack_vars[i].decl;
1031      if ((TREE_CODE (decl) == SSA_NAME
1032	   ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1033	   : DECL_RTL (decl)) != pc_rtx)
1034	continue;
1035
1036      /* Check the predicate to see whether this variable should be
1037	 allocated in this pass.  */
1038      if (pred && !pred (i))
1039	continue;
1040
1041      alignb = stack_vars[i].alignb;
1042      if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1043	{
1044	  base = virtual_stack_vars_rtx;
1045	  if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1046	    {
1047	      HOST_WIDE_INT prev_offset
1048		= align_base (frame_offset,
1049			      MAX (alignb, ASAN_RED_ZONE_SIZE),
1050			      FRAME_GROWS_DOWNWARD);
1051	      tree repr_decl = NULL_TREE;
1052	      offset
1053		= alloc_stack_frame_space (stack_vars[i].size
1054					   + ASAN_RED_ZONE_SIZE,
1055					   MAX (alignb, ASAN_RED_ZONE_SIZE));
1056
1057	      data->asan_vec.safe_push (prev_offset);
1058	      data->asan_vec.safe_push (offset + stack_vars[i].size);
1059	      /* Find best representative of the partition.
1060		 Prefer those with DECL_NAME, even better
1061		 satisfying asan_protect_stack_decl predicate.  */
1062	      for (j = i; j != EOC; j = stack_vars[j].next)
1063		if (asan_protect_stack_decl (stack_vars[j].decl)
1064		    && DECL_NAME (stack_vars[j].decl))
1065		  {
1066		    repr_decl = stack_vars[j].decl;
1067		    break;
1068		  }
1069		else if (repr_decl == NULL_TREE
1070			 && DECL_P (stack_vars[j].decl)
1071			 && DECL_NAME (stack_vars[j].decl))
1072		  repr_decl = stack_vars[j].decl;
1073	      if (repr_decl == NULL_TREE)
1074		repr_decl = stack_vars[i].decl;
1075	      data->asan_decl_vec.safe_push (repr_decl);
1076	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1077	      if (data->asan_base == NULL)
1078		data->asan_base = gen_reg_rtx (Pmode);
1079	      base = data->asan_base;
1080
1081	      if (!STRICT_ALIGNMENT)
1082		base_align = crtl->max_used_stack_slot_alignment;
1083	      else
1084		base_align = MAX (crtl->max_used_stack_slot_alignment,
1085				  GET_MODE_ALIGNMENT (SImode)
1086				  << ASAN_SHADOW_SHIFT);
1087	    }
1088	  else
1089	    {
1090	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1091	      base_align = crtl->max_used_stack_slot_alignment;
1092	    }
1093	}
1094      else
1095	{
1096	  /* Large alignment is only processed in the last pass.  */
1097	  if (pred)
1098	    continue;
1099	  gcc_assert (large_base != NULL);
1100
1101	  large_alloc += alignb - 1;
1102	  large_alloc &= -(HOST_WIDE_INT)alignb;
1103	  offset = large_alloc;
1104	  large_alloc += stack_vars[i].size;
1105
1106	  base = large_base;
1107	  base_align = large_align;
1108	}
1109
1110      /* Create rtl for each variable based on their location within the
1111	 partition.  */
1112      for (j = i; j != EOC; j = stack_vars[j].next)
1113	{
1114	  expand_one_stack_var_at (stack_vars[j].decl,
1115				   base, base_align,
1116				   offset);
1117	}
1118    }
1119
1120  gcc_assert (large_alloc == large_size);
1121}
1122
1123/* Take into account all sizes of partitions and reset DECL_RTLs.  */
1124static HOST_WIDE_INT
1125account_stack_vars (void)
1126{
1127  size_t si, j, i, n = stack_vars_num;
1128  HOST_WIDE_INT size = 0;
1129
1130  for (si = 0; si < n; ++si)
1131    {
1132      i = stack_vars_sorted[si];
1133
1134      /* Skip variables that aren't partition representatives, for now.  */
1135      if (stack_vars[i].representative != i)
1136	continue;
1137
1138      size += stack_vars[i].size;
1139      for (j = i; j != EOC; j = stack_vars[j].next)
1140	set_rtl (stack_vars[j].decl, NULL);
1141    }
1142  return size;
1143}
1144
1145/* A subroutine of expand_one_var.  Called to immediately assign rtl
1146   to a variable to be allocated in the stack frame.  */
1147
1148static void
1149expand_one_stack_var (tree var)
1150{
1151  HOST_WIDE_INT size, offset;
1152  unsigned byte_align;
1153
1154  size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1155  byte_align = align_local_variable (SSAVAR (var));
1156
1157  /* We handle highly aligned variables in expand_stack_vars.  */
1158  gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1159
1160  offset = alloc_stack_frame_space (size, byte_align);
1161
1162  expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1163			   crtl->max_used_stack_slot_alignment, offset);
1164}
1165
1166/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1167   that will reside in a hard register.  */
1168
1169static void
1170expand_one_hard_reg_var (tree var)
1171{
1172  rest_of_decl_compilation (var, 0, 0);
1173}
1174
1175/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1176   that will reside in a pseudo register.  */
1177
1178static void
1179expand_one_register_var (tree var)
1180{
1181  tree decl = SSAVAR (var);
1182  tree type = TREE_TYPE (decl);
1183  machine_mode reg_mode = promote_decl_mode (decl, NULL);
1184  rtx x = gen_reg_rtx (reg_mode);
1185
1186  set_rtl (var, x);
1187
1188  /* Note if the object is a user variable.  */
1189  if (!DECL_ARTIFICIAL (decl))
1190    mark_user_reg (x);
1191
1192  if (POINTER_TYPE_P (type))
1193    mark_reg_pointer (x, get_pointer_alignment (var));
1194}
1195
1196/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1197   has some associated error, e.g. its type is error-mark.  We just need
1198   to pick something that won't crash the rest of the compiler.  */
1199
1200static void
1201expand_one_error_var (tree var)
1202{
1203  machine_mode mode = DECL_MODE (var);
1204  rtx x;
1205
1206  if (mode == BLKmode)
1207    x = gen_rtx_MEM (BLKmode, const0_rtx);
1208  else if (mode == VOIDmode)
1209    x = const0_rtx;
1210  else
1211    x = gen_reg_rtx (mode);
1212
1213  SET_DECL_RTL (var, x);
1214}
1215
1216/* A subroutine of expand_one_var.  VAR is a variable that will be
1217   allocated to the local stack frame.  Return true if we wish to
1218   add VAR to STACK_VARS so that it will be coalesced with other
1219   variables.  Return false to allocate VAR immediately.
1220
1221   This function is used to reduce the number of variables considered
1222   for coalescing, which reduces the size of the quadratic problem.  */
1223
1224static bool
1225defer_stack_allocation (tree var, bool toplevel)
1226{
1227  /* Whether the variable is small enough for immediate allocation not to be
1228     a problem with regard to the frame size.  */
1229  bool smallish
1230    = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1231       < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1232
1233  /* If stack protection is enabled, *all* stack variables must be deferred,
1234     so that we can re-order the strings to the top of the frame.
1235     Similarly for Address Sanitizer.  */
1236  if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1237    return true;
1238
1239  /* We handle "large" alignment via dynamic allocation.  We want to handle
1240     this extra complication in only one place, so defer them.  */
1241  if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1242    return true;
1243
1244  /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1245     might be detached from their block and appear at toplevel when we reach
1246     here.  We want to coalesce them with variables from other blocks when
1247     the immediate contribution to the frame size would be noticeable.  */
1248  if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1249    return true;
1250
1251  /* Variables declared in the outermost scope automatically conflict
1252     with every other variable.  The only reason to want to defer them
1253     at all is that, after sorting, we can more efficiently pack
1254     small variables in the stack frame.  Continue to defer at -O2.  */
1255  if (toplevel && optimize < 2)
1256    return false;
1257
1258  /* Without optimization, *most* variables are allocated from the
1259     stack, which makes the quadratic problem large exactly when we
1260     want compilation to proceed as quickly as possible.  On the
1261     other hand, we don't want the function's stack frame size to
1262     get completely out of hand.  So we avoid adding scalars and
1263     "small" aggregates to the list at all.  */
1264  if (optimize == 0 && smallish)
1265    return false;
1266
1267  return true;
1268}
1269
1270/* A subroutine of expand_used_vars.  Expand one variable according to
1271   its flavor.  Variables to be placed on the stack are not actually
1272   expanded yet, merely recorded.
1273   When REALLY_EXPAND is false, only add stack values to be allocated.
1274   Return stack usage this variable is supposed to take.
1275*/
1276
1277static HOST_WIDE_INT
1278expand_one_var (tree var, bool toplevel, bool really_expand)
1279{
1280  unsigned int align = BITS_PER_UNIT;
1281  tree origvar = var;
1282
1283  var = SSAVAR (var);
1284
1285  if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1286    {
1287      /* Because we don't know if VAR will be in register or on stack,
1288	 we conservatively assume it will be on stack even if VAR is
1289	 eventually put into register after RA pass.  For non-automatic
1290	 variables, which won't be on stack, we collect alignment of
1291	 type and ignore user specified alignment.  Similarly for
1292	 SSA_NAMEs for which use_register_for_decl returns true.  */
1293      if (TREE_STATIC (var)
1294	  || DECL_EXTERNAL (var)
1295	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1296	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1297				   TYPE_MODE (TREE_TYPE (var)),
1298				   TYPE_ALIGN (TREE_TYPE (var)));
1299      else if (DECL_HAS_VALUE_EXPR_P (var)
1300	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1301	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1302	   or variables which were assigned a stack slot already by
1303	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1304	   changed from the offset chosen to it.  */
1305	align = crtl->stack_alignment_estimated;
1306      else
1307	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1308
1309      /* If the variable alignment is very large we'll dynamicaly allocate
1310	 it, which means that in-frame portion is just a pointer.  */
1311      if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1312	align = POINTER_SIZE;
1313    }
1314
1315  if (SUPPORTS_STACK_ALIGNMENT
1316      && crtl->stack_alignment_estimated < align)
1317    {
1318      /* stack_alignment_estimated shouldn't change after stack
1319         realign decision made */
1320      gcc_assert (!crtl->stack_realign_processed);
1321      crtl->stack_alignment_estimated = align;
1322    }
1323
1324  /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1325     So here we only make sure stack_alignment_needed >= align.  */
1326  if (crtl->stack_alignment_needed < align)
1327    crtl->stack_alignment_needed = align;
1328  if (crtl->max_used_stack_slot_alignment < align)
1329    crtl->max_used_stack_slot_alignment = align;
1330
1331  if (TREE_CODE (origvar) == SSA_NAME)
1332    {
1333      gcc_assert (TREE_CODE (var) != VAR_DECL
1334		  || (!DECL_EXTERNAL (var)
1335		      && !DECL_HAS_VALUE_EXPR_P (var)
1336		      && !TREE_STATIC (var)
1337		      && TREE_TYPE (var) != error_mark_node
1338		      && !DECL_HARD_REGISTER (var)
1339		      && really_expand));
1340    }
1341  if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1342    ;
1343  else if (DECL_EXTERNAL (var))
1344    ;
1345  else if (DECL_HAS_VALUE_EXPR_P (var))
1346    ;
1347  else if (TREE_STATIC (var))
1348    ;
1349  else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1350    ;
1351  else if (TREE_TYPE (var) == error_mark_node)
1352    {
1353      if (really_expand)
1354        expand_one_error_var (var);
1355    }
1356  else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1357    {
1358      if (really_expand)
1359	{
1360	  expand_one_hard_reg_var (var);
1361	  if (!DECL_HARD_REGISTER (var))
1362	    /* Invalid register specification.  */
1363	    expand_one_error_var (var);
1364	}
1365    }
1366  else if (use_register_for_decl (var))
1367    {
1368      if (really_expand)
1369        expand_one_register_var (origvar);
1370    }
1371  else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1372    {
1373      /* Reject variables which cover more than half of the address-space.  */
1374      if (really_expand)
1375	{
1376	  error ("size of variable %q+D is too large", var);
1377	  expand_one_error_var (var);
1378	}
1379    }
1380  else if (defer_stack_allocation (var, toplevel))
1381    add_stack_var (origvar);
1382  else
1383    {
1384      if (really_expand)
1385        expand_one_stack_var (origvar);
1386      return tree_to_uhwi (DECL_SIZE_UNIT (var));
1387    }
1388  return 0;
1389}
1390
1391/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1392   expanding variables.  Those variables that can be put into registers
1393   are allocated pseudos; those that can't are put on the stack.
1394
1395   TOPLEVEL is true if this is the outermost BLOCK.  */
1396
1397static void
1398expand_used_vars_for_block (tree block, bool toplevel)
1399{
1400  tree t;
1401
1402  /* Expand all variables at this level.  */
1403  for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1404    if (TREE_USED (t)
1405        && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1406	    || !DECL_NONSHAREABLE (t)))
1407      expand_one_var (t, toplevel, true);
1408
1409  /* Expand all variables at containing levels.  */
1410  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1411    expand_used_vars_for_block (t, false);
1412}
1413
1414/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1415   and clear TREE_USED on all local variables.  */
1416
1417static void
1418clear_tree_used (tree block)
1419{
1420  tree t;
1421
1422  for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1423    /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1424    if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1425	|| !DECL_NONSHAREABLE (t))
1426      TREE_USED (t) = 0;
1427
1428  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1429    clear_tree_used (t);
1430}
1431
1432enum {
1433  SPCT_FLAG_DEFAULT = 1,
1434  SPCT_FLAG_ALL = 2,
1435  SPCT_FLAG_STRONG = 3,
1436  SPCT_FLAG_EXPLICIT = 4
1437};
1438
1439/* Examine TYPE and determine a bit mask of the following features.  */
1440
1441#define SPCT_HAS_LARGE_CHAR_ARRAY	1
1442#define SPCT_HAS_SMALL_CHAR_ARRAY	2
1443#define SPCT_HAS_ARRAY			4
1444#define SPCT_HAS_AGGREGATE		8
1445
1446static unsigned int
1447stack_protect_classify_type (tree type)
1448{
1449  unsigned int ret = 0;
1450  tree t;
1451
1452  switch (TREE_CODE (type))
1453    {
1454    case ARRAY_TYPE:
1455      t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1456      if (t == char_type_node
1457	  || t == signed_char_type_node
1458	  || t == unsigned_char_type_node)
1459	{
1460	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1461	  unsigned HOST_WIDE_INT len;
1462
1463	  if (!TYPE_SIZE_UNIT (type)
1464	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1465	    len = max;
1466	  else
1467	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1468
1469	  if (len < max)
1470	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1471	  else
1472	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1473	}
1474      else
1475	ret = SPCT_HAS_ARRAY;
1476      break;
1477
1478    case UNION_TYPE:
1479    case QUAL_UNION_TYPE:
1480    case RECORD_TYPE:
1481      ret = SPCT_HAS_AGGREGATE;
1482      for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1483	if (TREE_CODE (t) == FIELD_DECL)
1484	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1485      break;
1486
1487    default:
1488      break;
1489    }
1490
1491  return ret;
1492}
1493
1494/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1495   part of the local stack frame.  Remember if we ever return nonzero for
1496   any variable in this function.  The return value is the phase number in
1497   which the variable should be allocated.  */
1498
1499static int
1500stack_protect_decl_phase (tree decl)
1501{
1502  unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1503  int ret = 0;
1504
1505  if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1506    has_short_buffer = true;
1507
1508  if (flag_stack_protect == SPCT_FLAG_ALL
1509      || flag_stack_protect == SPCT_FLAG_STRONG
1510      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1511	  && lookup_attribute ("stack_protect",
1512			       DECL_ATTRIBUTES (current_function_decl))))
1513    {
1514      if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1515	  && !(bits & SPCT_HAS_AGGREGATE))
1516	ret = 1;
1517      else if (bits & SPCT_HAS_ARRAY)
1518	ret = 2;
1519    }
1520  else
1521    ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1522
1523  if (ret)
1524    has_protected_decls = true;
1525
1526  return ret;
1527}
1528
1529/* Two helper routines that check for phase 1 and phase 2.  These are used
1530   as callbacks for expand_stack_vars.  */
1531
1532static bool
1533stack_protect_decl_phase_1 (size_t i)
1534{
1535  return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1536}
1537
1538static bool
1539stack_protect_decl_phase_2 (size_t i)
1540{
1541  return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1542}
1543
1544/* And helper function that checks for asan phase (with stack protector
1545   it is phase 3).  This is used as callback for expand_stack_vars.
1546   Returns true if any of the vars in the partition need to be protected.  */
1547
1548static bool
1549asan_decl_phase_3 (size_t i)
1550{
1551  while (i != EOC)
1552    {
1553      if (asan_protect_stack_decl (stack_vars[i].decl))
1554	return true;
1555      i = stack_vars[i].next;
1556    }
1557  return false;
1558}
1559
1560/* Ensure that variables in different stack protection phases conflict
1561   so that they are not merged and share the same stack slot.  */
1562
1563static void
1564add_stack_protection_conflicts (void)
1565{
1566  size_t i, j, n = stack_vars_num;
1567  unsigned char *phase;
1568
1569  phase = XNEWVEC (unsigned char, n);
1570  for (i = 0; i < n; ++i)
1571    phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1572
1573  for (i = 0; i < n; ++i)
1574    {
1575      unsigned char ph_i = phase[i];
1576      for (j = i + 1; j < n; ++j)
1577	if (ph_i != phase[j])
1578	  add_stack_var_conflict (i, j);
1579    }
1580
1581  XDELETEVEC (phase);
1582}
1583
1584/* Create a decl for the guard at the top of the stack frame.  */
1585
1586static void
1587create_stack_guard (void)
1588{
1589  tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1590			   VAR_DECL, NULL, ptr_type_node);
1591  TREE_THIS_VOLATILE (guard) = 1;
1592  TREE_USED (guard) = 1;
1593  expand_one_stack_var (guard);
1594  crtl->stack_protect_guard = guard;
1595}
1596
1597/* Prepare for expanding variables.  */
1598static void
1599init_vars_expansion (void)
1600{
1601  /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1602  bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1603
1604  /* A map from decl to stack partition.  */
1605  decl_to_stack_part = new hash_map<tree, size_t>;
1606
1607  /* Initialize local stack smashing state.  */
1608  has_protected_decls = false;
1609  has_short_buffer = false;
1610}
1611
1612/* Free up stack variable graph data.  */
1613static void
1614fini_vars_expansion (void)
1615{
1616  bitmap_obstack_release (&stack_var_bitmap_obstack);
1617  if (stack_vars)
1618    XDELETEVEC (stack_vars);
1619  if (stack_vars_sorted)
1620    XDELETEVEC (stack_vars_sorted);
1621  stack_vars = NULL;
1622  stack_vars_sorted = NULL;
1623  stack_vars_alloc = stack_vars_num = 0;
1624  delete decl_to_stack_part;
1625  decl_to_stack_part = NULL;
1626}
1627
1628/* Make a fair guess for the size of the stack frame of the function
1629   in NODE.  This doesn't have to be exact, the result is only used in
1630   the inline heuristics.  So we don't want to run the full stack var
1631   packing algorithm (which is quadratic in the number of stack vars).
1632   Instead, we calculate the total size of all stack vars.  This turns
1633   out to be a pretty fair estimate -- packing of stack vars doesn't
1634   happen very often.  */
1635
1636HOST_WIDE_INT
1637estimated_stack_frame_size (struct cgraph_node *node)
1638{
1639  HOST_WIDE_INT size = 0;
1640  size_t i;
1641  tree var;
1642  struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1643
1644  push_cfun (fn);
1645
1646  init_vars_expansion ();
1647
1648  FOR_EACH_LOCAL_DECL (fn, i, var)
1649    if (auto_var_in_fn_p (var, fn->decl))
1650      size += expand_one_var (var, true, false);
1651
1652  if (stack_vars_num > 0)
1653    {
1654      /* Fake sorting the stack vars for account_stack_vars ().  */
1655      stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1656      for (i = 0; i < stack_vars_num; ++i)
1657	stack_vars_sorted[i] = i;
1658      size += account_stack_vars ();
1659    }
1660
1661  fini_vars_expansion ();
1662  pop_cfun ();
1663  return size;
1664}
1665
1666/* Helper routine to check if a record or union contains an array field. */
1667
1668static int
1669record_or_union_type_has_array_p (const_tree tree_type)
1670{
1671  tree fields = TYPE_FIELDS (tree_type);
1672  tree f;
1673
1674  for (f = fields; f; f = DECL_CHAIN (f))
1675    if (TREE_CODE (f) == FIELD_DECL)
1676      {
1677	tree field_type = TREE_TYPE (f);
1678	if (RECORD_OR_UNION_TYPE_P (field_type)
1679	    && record_or_union_type_has_array_p (field_type))
1680	  return 1;
1681	if (TREE_CODE (field_type) == ARRAY_TYPE)
1682	  return 1;
1683      }
1684  return 0;
1685}
1686
1687/* Check if the current function has local referenced variables that
1688   have their addresses taken, contain an array, or are arrays.  */
1689
1690static bool
1691stack_protect_decl_p ()
1692{
1693  unsigned i;
1694  tree var;
1695
1696  FOR_EACH_LOCAL_DECL (cfun, i, var)
1697    if (!is_global_var (var))
1698      {
1699	tree var_type = TREE_TYPE (var);
1700	if (TREE_CODE (var) == VAR_DECL
1701	    && (TREE_CODE (var_type) == ARRAY_TYPE
1702		|| TREE_ADDRESSABLE (var)
1703		|| (RECORD_OR_UNION_TYPE_P (var_type)
1704		    && record_or_union_type_has_array_p (var_type))))
1705	  return true;
1706      }
1707  return false;
1708}
1709
1710/* Check if the current function has calls that use a return slot.  */
1711
1712static bool
1713stack_protect_return_slot_p ()
1714{
1715  basic_block bb;
1716
1717  FOR_ALL_BB_FN (bb, cfun)
1718    for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1719	 !gsi_end_p (gsi); gsi_next (&gsi))
1720      {
1721	gimple stmt = gsi_stmt (gsi);
1722	/* This assumes that calls to internal-only functions never
1723	   use a return slot.  */
1724	if (is_gimple_call (stmt)
1725	    && !gimple_call_internal_p (stmt)
1726	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1727				  gimple_call_fndecl (stmt)))
1728	  return true;
1729      }
1730  return false;
1731}
1732
1733/* Expand all variables used in the function.  */
1734
1735static rtx_insn *
1736expand_used_vars (void)
1737{
1738  tree var, outer_block = DECL_INITIAL (current_function_decl);
1739  vec<tree> maybe_local_decls = vNULL;
1740  rtx_insn *var_end_seq = NULL;
1741  unsigned i;
1742  unsigned len;
1743  bool gen_stack_protect_signal = false;
1744
1745  /* Compute the phase of the stack frame for this function.  */
1746  {
1747    int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1748    int off = STARTING_FRAME_OFFSET % align;
1749    frame_phase = off ? align - off : 0;
1750  }
1751
1752  /* Set TREE_USED on all variables in the local_decls.  */
1753  FOR_EACH_LOCAL_DECL (cfun, i, var)
1754    TREE_USED (var) = 1;
1755  /* Clear TREE_USED on all variables associated with a block scope.  */
1756  clear_tree_used (DECL_INITIAL (current_function_decl));
1757
1758  init_vars_expansion ();
1759
1760  if (targetm.use_pseudo_pic_reg ())
1761    pic_offset_table_rtx = gen_reg_rtx (Pmode);
1762
1763  hash_map<tree, tree> ssa_name_decls;
1764  for (i = 0; i < SA.map->num_partitions; i++)
1765    {
1766      tree var = partition_to_var (SA.map, i);
1767
1768      gcc_assert (!virtual_operand_p (var));
1769
1770      /* Assign decls to each SSA name partition, share decls for partitions
1771         we could have coalesced (those with the same type).  */
1772      if (SSA_NAME_VAR (var) == NULL_TREE)
1773	{
1774	  tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1775	  if (!*slot)
1776	    *slot = create_tmp_reg (TREE_TYPE (var));
1777	  replace_ssa_name_symbol (var, *slot);
1778	}
1779
1780      /* Always allocate space for partitions based on VAR_DECLs.  But for
1781	 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1782	 debug info, there is no need to do so if optimization is disabled
1783	 because all the SSA_NAMEs based on these DECLs have been coalesced
1784	 into a single partition, which is thus assigned the canonical RTL
1785	 location of the DECLs.  If in_lto_p, we can't rely on optimize,
1786	 a function could be compiled with -O1 -flto first and only the
1787	 link performed at -O0.  */
1788      if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1789	expand_one_var (var, true, true);
1790      else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1791	{
1792	  /* This is a PARM_DECL or RESULT_DECL.  For those partitions that
1793	     contain the default def (representing the parm or result itself)
1794	     we don't do anything here.  But those which don't contain the
1795	     default def (representing a temporary based on the parm/result)
1796	     we need to allocate space just like for normal VAR_DECLs.  */
1797	  if (!bitmap_bit_p (SA.partition_has_default_def, i))
1798	    {
1799	      expand_one_var (var, true, true);
1800	      gcc_assert (SA.partition_to_pseudo[i]);
1801	    }
1802	}
1803    }
1804
1805  if (flag_stack_protect == SPCT_FLAG_STRONG)
1806      gen_stack_protect_signal
1807	= stack_protect_decl_p () || stack_protect_return_slot_p ();
1808
1809  /* At this point all variables on the local_decls with TREE_USED
1810     set are not associated with any block scope.  Lay them out.  */
1811
1812  len = vec_safe_length (cfun->local_decls);
1813  FOR_EACH_LOCAL_DECL (cfun, i, var)
1814    {
1815      bool expand_now = false;
1816
1817      /* Expanded above already.  */
1818      if (is_gimple_reg (var))
1819	{
1820	  TREE_USED (var) = 0;
1821	  goto next;
1822	}
1823      /* We didn't set a block for static or extern because it's hard
1824	 to tell the difference between a global variable (re)declared
1825	 in a local scope, and one that's really declared there to
1826	 begin with.  And it doesn't really matter much, since we're
1827	 not giving them stack space.  Expand them now.  */
1828      else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1829	expand_now = true;
1830
1831      /* Expand variables not associated with any block now.  Those created by
1832	 the optimizers could be live anywhere in the function.  Those that
1833	 could possibly have been scoped originally and detached from their
1834	 block will have their allocation deferred so we coalesce them with
1835	 others when optimization is enabled.  */
1836      else if (TREE_USED (var))
1837	expand_now = true;
1838
1839      /* Finally, mark all variables on the list as used.  We'll use
1840	 this in a moment when we expand those associated with scopes.  */
1841      TREE_USED (var) = 1;
1842
1843      if (expand_now)
1844	expand_one_var (var, true, true);
1845
1846    next:
1847      if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1848	{
1849	  rtx rtl = DECL_RTL_IF_SET (var);
1850
1851	  /* Keep artificial non-ignored vars in cfun->local_decls
1852	     chain until instantiate_decls.  */
1853	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1854	    add_local_decl (cfun, var);
1855	  else if (rtl == NULL_RTX)
1856	    /* If rtl isn't set yet, which can happen e.g. with
1857	       -fstack-protector, retry before returning from this
1858	       function.  */
1859	    maybe_local_decls.safe_push (var);
1860	}
1861    }
1862
1863  /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1864
1865     +-----------------+-----------------+
1866     | ...processed... | ...duplicates...|
1867     +-----------------+-----------------+
1868                       ^
1869		       +-- LEN points here.
1870
1871     We just want the duplicates, as those are the artificial
1872     non-ignored vars that we want to keep until instantiate_decls.
1873     Move them down and truncate the array.  */
1874  if (!vec_safe_is_empty (cfun->local_decls))
1875    cfun->local_decls->block_remove (0, len);
1876
1877  /* At this point, all variables within the block tree with TREE_USED
1878     set are actually used by the optimized function.  Lay them out.  */
1879  expand_used_vars_for_block (outer_block, true);
1880
1881  if (stack_vars_num > 0)
1882    {
1883      add_scope_conflicts ();
1884
1885      /* If stack protection is enabled, we don't share space between
1886	 vulnerable data and non-vulnerable data.  */
1887      if (flag_stack_protect != 0
1888	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
1889	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1890		  && lookup_attribute ("stack_protect",
1891				       DECL_ATTRIBUTES (current_function_decl)))))
1892	add_stack_protection_conflicts ();
1893
1894      /* Now that we have collected all stack variables, and have computed a
1895	 minimal interference graph, attempt to save some stack space.  */
1896      partition_stack_vars ();
1897      if (dump_file)
1898	dump_stack_var_partition ();
1899    }
1900
1901  switch (flag_stack_protect)
1902    {
1903    case SPCT_FLAG_ALL:
1904      create_stack_guard ();
1905      break;
1906
1907    case SPCT_FLAG_STRONG:
1908      if (gen_stack_protect_signal
1909	  || cfun->calls_alloca || has_protected_decls
1910	  || lookup_attribute ("stack_protect",
1911			       DECL_ATTRIBUTES (current_function_decl)))
1912	create_stack_guard ();
1913      break;
1914
1915    case SPCT_FLAG_DEFAULT:
1916      if (cfun->calls_alloca || has_protected_decls
1917	  || lookup_attribute ("stack_protect",
1918			       DECL_ATTRIBUTES (current_function_decl)))
1919	create_stack_guard ();
1920      break;
1921
1922    case SPCT_FLAG_EXPLICIT:
1923      if (lookup_attribute ("stack_protect",
1924			    DECL_ATTRIBUTES (current_function_decl)))
1925	create_stack_guard ();
1926      break;
1927    default:
1928      ;
1929    }
1930
1931  /* Assign rtl to each variable based on these partitions.  */
1932  if (stack_vars_num > 0)
1933    {
1934      struct stack_vars_data data;
1935
1936      data.asan_vec = vNULL;
1937      data.asan_decl_vec = vNULL;
1938      data.asan_base = NULL_RTX;
1939      data.asan_alignb = 0;
1940
1941      /* Reorder decls to be protected by iterating over the variables
1942	 array multiple times, and allocating out of each phase in turn.  */
1943      /* ??? We could probably integrate this into the qsort we did
1944	 earlier, such that we naturally see these variables first,
1945	 and thus naturally allocate things in the right order.  */
1946      if (has_protected_decls)
1947	{
1948	  /* Phase 1 contains only character arrays.  */
1949	  expand_stack_vars (stack_protect_decl_phase_1, &data);
1950
1951	  /* Phase 2 contains other kinds of arrays.  */
1952	  if (flag_stack_protect == SPCT_FLAG_ALL
1953	      || flag_stack_protect == SPCT_FLAG_STRONG
1954	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1955		  && lookup_attribute ("stack_protect",
1956				       DECL_ATTRIBUTES (current_function_decl))))
1957	    expand_stack_vars (stack_protect_decl_phase_2, &data);
1958	}
1959
1960      if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1961	/* Phase 3, any partitions that need asan protection
1962	   in addition to phase 1 and 2.  */
1963	expand_stack_vars (asan_decl_phase_3, &data);
1964
1965      if (!data.asan_vec.is_empty ())
1966	{
1967	  HOST_WIDE_INT prev_offset = frame_offset;
1968	  HOST_WIDE_INT offset, sz, redzonesz;
1969	  redzonesz = ASAN_RED_ZONE_SIZE;
1970	  sz = data.asan_vec[0] - prev_offset;
1971	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1972	      && data.asan_alignb <= 4096
1973	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1974	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1975			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1976	  offset
1977	    = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1978	  data.asan_vec.safe_push (prev_offset);
1979	  data.asan_vec.safe_push (offset);
1980	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
1981	  if (STRICT_ALIGNMENT)
1982	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1983				      << ASAN_SHADOW_SHIFT)
1984				     / BITS_PER_UNIT, 1);
1985
1986	  var_end_seq
1987	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
1988					  data.asan_base,
1989					  data.asan_alignb,
1990					  data.asan_vec.address (),
1991					  data.asan_decl_vec.address (),
1992					  data.asan_vec.length ());
1993	}
1994
1995      expand_stack_vars (NULL, &data);
1996
1997      data.asan_vec.release ();
1998      data.asan_decl_vec.release ();
1999    }
2000
2001  fini_vars_expansion ();
2002
2003  /* If there were any artificial non-ignored vars without rtl
2004     found earlier, see if deferred stack allocation hasn't assigned
2005     rtl to them.  */
2006  FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2007    {
2008      rtx rtl = DECL_RTL_IF_SET (var);
2009
2010      /* Keep artificial non-ignored vars in cfun->local_decls
2011	 chain until instantiate_decls.  */
2012      if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2013	add_local_decl (cfun, var);
2014    }
2015  maybe_local_decls.release ();
2016
2017  /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2018  if (STACK_ALIGNMENT_NEEDED)
2019    {
2020      HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2021      if (!FRAME_GROWS_DOWNWARD)
2022	frame_offset += align - 1;
2023      frame_offset &= -align;
2024    }
2025
2026  return var_end_seq;
2027}
2028
2029
2030/* If we need to produce a detailed dump, print the tree representation
2031   for STMT to the dump file.  SINCE is the last RTX after which the RTL
2032   generated for STMT should have been appended.  */
2033
2034static void
2035maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
2036{
2037  if (dump_file && (dump_flags & TDF_DETAILS))
2038    {
2039      fprintf (dump_file, "\n;; ");
2040      print_gimple_stmt (dump_file, stmt, 0,
2041			 TDF_SLIM | (dump_flags & TDF_LINENO));
2042      fprintf (dump_file, "\n");
2043
2044      print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2045    }
2046}
2047
2048/* Maps the blocks that do not contain tree labels to rtx labels.  */
2049
2050static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2051
2052/* Returns the label_rtx expression for a label starting basic block BB.  */
2053
2054static rtx
2055label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2056{
2057  gimple_stmt_iterator gsi;
2058  tree lab;
2059
2060  if (bb->flags & BB_RTL)
2061    return block_label (bb);
2062
2063  rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2064  if (elt)
2065    return *elt;
2066
2067  /* Find the tree label if it is present.  */
2068
2069  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2070    {
2071      glabel *lab_stmt;
2072
2073      lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2074      if (!lab_stmt)
2075	break;
2076
2077      lab = gimple_label_label (lab_stmt);
2078      if (DECL_NONLOCAL (lab))
2079	break;
2080
2081      return label_rtx (lab);
2082    }
2083
2084  rtx_code_label *l = gen_label_rtx ();
2085  lab_rtx_for_bb->put (bb, l);
2086  return l;
2087}
2088
2089
2090/* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2091   of a basic block where we just expanded the conditional at the end,
2092   possibly clean up the CFG and instruction sequence.  LAST is the
2093   last instruction before the just emitted jump sequence.  */
2094
2095static void
2096maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2097{
2098  /* Special case: when jumpif decides that the condition is
2099     trivial it emits an unconditional jump (and the necessary
2100     barrier).  But we still have two edges, the fallthru one is
2101     wrong.  purge_dead_edges would clean this up later.  Unfortunately
2102     we have to insert insns (and split edges) before
2103     find_many_sub_basic_blocks and hence before purge_dead_edges.
2104     But splitting edges might create new blocks which depend on the
2105     fact that if there are two edges there's no barrier.  So the
2106     barrier would get lost and verify_flow_info would ICE.  Instead
2107     of auditing all edge splitters to care for the barrier (which
2108     normally isn't there in a cleaned CFG), fix it here.  */
2109  if (BARRIER_P (get_last_insn ()))
2110    {
2111      rtx_insn *insn;
2112      remove_edge (e);
2113      /* Now, we have a single successor block, if we have insns to
2114	 insert on the remaining edge we potentially will insert
2115	 it at the end of this block (if the dest block isn't feasible)
2116	 in order to avoid splitting the edge.  This insertion will take
2117	 place in front of the last jump.  But we might have emitted
2118	 multiple jumps (conditional and one unconditional) to the
2119	 same destination.  Inserting in front of the last one then
2120	 is a problem.  See PR 40021.  We fix this by deleting all
2121	 jumps except the last unconditional one.  */
2122      insn = PREV_INSN (get_last_insn ());
2123      /* Make sure we have an unconditional jump.  Otherwise we're
2124	 confused.  */
2125      gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2126      for (insn = PREV_INSN (insn); insn != last;)
2127	{
2128	  insn = PREV_INSN (insn);
2129	  if (JUMP_P (NEXT_INSN (insn)))
2130	    {
2131	      if (!any_condjump_p (NEXT_INSN (insn)))
2132		{
2133		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2134		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2135		}
2136	      delete_insn (NEXT_INSN (insn));
2137	    }
2138	}
2139    }
2140}
2141
2142/* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2143   Returns a new basic block if we've terminated the current basic
2144   block and created a new one.  */
2145
2146static basic_block
2147expand_gimple_cond (basic_block bb, gcond *stmt)
2148{
2149  basic_block new_bb, dest;
2150  edge new_edge;
2151  edge true_edge;
2152  edge false_edge;
2153  rtx_insn *last2, *last;
2154  enum tree_code code;
2155  tree op0, op1;
2156
2157  code = gimple_cond_code (stmt);
2158  op0 = gimple_cond_lhs (stmt);
2159  op1 = gimple_cond_rhs (stmt);
2160  /* We're sometimes presented with such code:
2161       D.123_1 = x < y;
2162       if (D.123_1 != 0)
2163         ...
2164     This would expand to two comparisons which then later might
2165     be cleaned up by combine.  But some pattern matchers like if-conversion
2166     work better when there's only one compare, so make up for this
2167     here as special exception if TER would have made the same change.  */
2168  if (SA.values
2169      && TREE_CODE (op0) == SSA_NAME
2170      && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2171      && TREE_CODE (op1) == INTEGER_CST
2172      && ((gimple_cond_code (stmt) == NE_EXPR
2173	   && integer_zerop (op1))
2174	  || (gimple_cond_code (stmt) == EQ_EXPR
2175	      && integer_onep (op1)))
2176      && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2177    {
2178      gimple second = SSA_NAME_DEF_STMT (op0);
2179      if (gimple_code (second) == GIMPLE_ASSIGN)
2180	{
2181	  enum tree_code code2 = gimple_assign_rhs_code (second);
2182	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2183	    {
2184	      code = code2;
2185	      op0 = gimple_assign_rhs1 (second);
2186	      op1 = gimple_assign_rhs2 (second);
2187	    }
2188	  /* If jumps are cheap and the target does not support conditional
2189	     compare, turn some more codes into jumpy sequences.  */
2190	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2191		   && targetm.gen_ccmp_first == NULL)
2192	    {
2193	      if ((code2 == BIT_AND_EXPR
2194		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2195		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2196		  || code2 == TRUTH_AND_EXPR)
2197		{
2198		  code = TRUTH_ANDIF_EXPR;
2199		  op0 = gimple_assign_rhs1 (second);
2200		  op1 = gimple_assign_rhs2 (second);
2201		}
2202	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2203		{
2204		  code = TRUTH_ORIF_EXPR;
2205		  op0 = gimple_assign_rhs1 (second);
2206		  op1 = gimple_assign_rhs2 (second);
2207		}
2208	    }
2209	}
2210    }
2211
2212  last2 = last = get_last_insn ();
2213
2214  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2215  set_curr_insn_location (gimple_location (stmt));
2216
2217  /* These flags have no purpose in RTL land.  */
2218  true_edge->flags &= ~EDGE_TRUE_VALUE;
2219  false_edge->flags &= ~EDGE_FALSE_VALUE;
2220
2221  /* We can either have a pure conditional jump with one fallthru edge or
2222     two-way jump that needs to be decomposed into two basic blocks.  */
2223  if (false_edge->dest == bb->next_bb)
2224    {
2225      jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2226		true_edge->probability);
2227      maybe_dump_rtl_for_gimple_stmt (stmt, last);
2228      if (true_edge->goto_locus != UNKNOWN_LOCATION)
2229	set_curr_insn_location (true_edge->goto_locus);
2230      false_edge->flags |= EDGE_FALLTHRU;
2231      maybe_cleanup_end_of_block (false_edge, last);
2232      return NULL;
2233    }
2234  if (true_edge->dest == bb->next_bb)
2235    {
2236      jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2237		   false_edge->probability);
2238      maybe_dump_rtl_for_gimple_stmt (stmt, last);
2239      if (false_edge->goto_locus != UNKNOWN_LOCATION)
2240	set_curr_insn_location (false_edge->goto_locus);
2241      true_edge->flags |= EDGE_FALLTHRU;
2242      maybe_cleanup_end_of_block (true_edge, last);
2243      return NULL;
2244    }
2245
2246  jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2247	    true_edge->probability);
2248  last = get_last_insn ();
2249  if (false_edge->goto_locus != UNKNOWN_LOCATION)
2250    set_curr_insn_location (false_edge->goto_locus);
2251  emit_jump (label_rtx_for_bb (false_edge->dest));
2252
2253  BB_END (bb) = last;
2254  if (BARRIER_P (BB_END (bb)))
2255    BB_END (bb) = PREV_INSN (BB_END (bb));
2256  update_bb_for_insn (bb);
2257
2258  new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2259  dest = false_edge->dest;
2260  redirect_edge_succ (false_edge, new_bb);
2261  false_edge->flags |= EDGE_FALLTHRU;
2262  new_bb->count = false_edge->count;
2263  new_bb->frequency = EDGE_FREQUENCY (false_edge);
2264  add_bb_to_loop (new_bb, bb->loop_father);
2265  new_edge = make_edge (new_bb, dest, 0);
2266  new_edge->probability = REG_BR_PROB_BASE;
2267  new_edge->count = new_bb->count;
2268  if (BARRIER_P (BB_END (new_bb)))
2269    BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2270  update_bb_for_insn (new_bb);
2271
2272  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2273
2274  if (true_edge->goto_locus != UNKNOWN_LOCATION)
2275    {
2276      set_curr_insn_location (true_edge->goto_locus);
2277      true_edge->goto_locus = curr_insn_location ();
2278    }
2279
2280  return new_bb;
2281}
2282
2283/* Mark all calls that can have a transaction restart.  */
2284
2285static void
2286mark_transaction_restart_calls (gimple stmt)
2287{
2288  struct tm_restart_node dummy;
2289  tm_restart_node **slot;
2290
2291  if (!cfun->gimple_df->tm_restart)
2292    return;
2293
2294  dummy.stmt = stmt;
2295  slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2296  if (slot)
2297    {
2298      struct tm_restart_node *n = *slot;
2299      tree list = n->label_or_list;
2300      rtx_insn *insn;
2301
2302      for (insn = next_real_insn (get_last_insn ());
2303	   !CALL_P (insn);
2304	   insn = next_real_insn (insn))
2305	continue;
2306
2307      if (TREE_CODE (list) == LABEL_DECL)
2308	add_reg_note (insn, REG_TM, label_rtx (list));
2309      else
2310	for (; list ; list = TREE_CHAIN (list))
2311	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2312    }
2313}
2314
2315/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2316   statement STMT.  */
2317
2318static void
2319expand_call_stmt (gcall *stmt)
2320{
2321  tree exp, decl, lhs;
2322  bool builtin_p;
2323  size_t i;
2324
2325  if (gimple_call_internal_p (stmt))
2326    {
2327      expand_internal_call (stmt);
2328      return;
2329    }
2330
2331  exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2332
2333  CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2334  decl = gimple_call_fndecl (stmt);
2335  builtin_p = decl && DECL_BUILT_IN (decl);
2336
2337  /* If this is not a builtin function, the function type through which the
2338     call is made may be different from the type of the function.  */
2339  if (!builtin_p)
2340    CALL_EXPR_FN (exp)
2341      = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2342		      CALL_EXPR_FN (exp));
2343
2344  TREE_TYPE (exp) = gimple_call_return_type (stmt);
2345  CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2346
2347  for (i = 0; i < gimple_call_num_args (stmt); i++)
2348    {
2349      tree arg = gimple_call_arg (stmt, i);
2350      gimple def;
2351      /* TER addresses into arguments of builtin functions so we have a
2352	 chance to infer more correct alignment information.  See PR39954.  */
2353      if (builtin_p
2354	  && TREE_CODE (arg) == SSA_NAME
2355	  && (def = get_gimple_for_ssa_name (arg))
2356	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2357	arg = gimple_assign_rhs1 (def);
2358      CALL_EXPR_ARG (exp, i) = arg;
2359    }
2360
2361  if (gimple_has_side_effects (stmt))
2362    TREE_SIDE_EFFECTS (exp) = 1;
2363
2364  if (gimple_call_nothrow_p (stmt))
2365    TREE_NOTHROW (exp) = 1;
2366
2367  CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2368  CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2369  if (decl
2370      && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2371      && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2372	  || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2373    CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2374  else
2375    CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2376  CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2377  SET_EXPR_LOCATION (exp, gimple_location (stmt));
2378  CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2379
2380  /* Ensure RTL is created for debug args.  */
2381  if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2382    {
2383      vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2384      unsigned int ix;
2385      tree dtemp;
2386
2387      if (debug_args)
2388	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2389	  {
2390	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2391	    expand_debug_expr (dtemp);
2392	  }
2393    }
2394
2395  lhs = gimple_call_lhs (stmt);
2396  if (lhs)
2397    expand_assignment (lhs, exp, false);
2398  else
2399    expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2400
2401  mark_transaction_restart_calls (stmt);
2402}
2403
2404
2405/* Generate RTL for an asm statement (explicit assembler code).
2406   STRING is a STRING_CST node containing the assembler code text,
2407   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2408   insn is volatile; don't optimize it.  */
2409
2410static void
2411expand_asm_loc (tree string, int vol, location_t locus)
2412{
2413  rtx body;
2414
2415  if (TREE_CODE (string) == ADDR_EXPR)
2416    string = TREE_OPERAND (string, 0);
2417
2418  body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2419				ggc_strdup (TREE_STRING_POINTER (string)),
2420				locus);
2421
2422  MEM_VOLATILE_P (body) = vol;
2423
2424  emit_insn (body);
2425}
2426
2427/* Return the number of times character C occurs in string S.  */
2428static int
2429n_occurrences (int c, const char *s)
2430{
2431  int n = 0;
2432  while (*s)
2433    n += (*s++ == c);
2434  return n;
2435}
2436
2437/* A subroutine of expand_asm_operands.  Check that all operands have
2438   the same number of alternatives.  Return true if so.  */
2439
2440static bool
2441check_operand_nalternatives (tree outputs, tree inputs)
2442{
2443  if (outputs || inputs)
2444    {
2445      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2446      int nalternatives
2447	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2448      tree next = inputs;
2449
2450      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2451	{
2452	  error ("too many alternatives in %<asm%>");
2453	  return false;
2454	}
2455
2456      tmp = outputs;
2457      while (tmp)
2458	{
2459	  const char *constraint
2460	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2461
2462	  if (n_occurrences (',', constraint) != nalternatives)
2463	    {
2464	      error ("operand constraints for %<asm%> differ "
2465		     "in number of alternatives");
2466	      return false;
2467	    }
2468
2469	  if (TREE_CHAIN (tmp))
2470	    tmp = TREE_CHAIN (tmp);
2471	  else
2472	    tmp = next, next = 0;
2473	}
2474    }
2475
2476  return true;
2477}
2478
2479/* Check for overlap between registers marked in CLOBBERED_REGS and
2480   anything inappropriate in T.  Emit error and return the register
2481   variable definition for error, NULL_TREE for ok.  */
2482
2483static bool
2484tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2485{
2486  /* Conflicts between asm-declared register variables and the clobber
2487     list are not allowed.  */
2488  tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2489
2490  if (overlap)
2491    {
2492      error ("asm-specifier for variable %qE conflicts with asm clobber list",
2493	     DECL_NAME (overlap));
2494
2495      /* Reset registerness to stop multiple errors emitted for a single
2496	 variable.  */
2497      DECL_REGISTER (overlap) = 0;
2498      return true;
2499    }
2500
2501  return false;
2502}
2503
2504/* Generate RTL for an asm statement with arguments.
2505   STRING is the instruction template.
2506   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2507   Each output or input has an expression in the TREE_VALUE and
2508   a tree list in TREE_PURPOSE which in turn contains a constraint
2509   name in TREE_VALUE (or NULL_TREE) and a constraint string
2510   in TREE_PURPOSE.
2511   CLOBBERS is a list of STRING_CST nodes each naming a hard register
2512   that is clobbered by this insn.
2513
2514   LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2515   should be the fallthru basic block of the asm goto.
2516
2517   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2518   Some elements of OUTPUTS may be replaced with trees representing temporary
2519   values.  The caller should copy those temporary values to the originally
2520   specified lvalues.
2521
2522   VOL nonzero means the insn is volatile; don't optimize it.  */
2523
2524static void
2525expand_asm_operands (tree string, tree outputs, tree inputs,
2526		     tree clobbers, tree labels, basic_block fallthru_bb,
2527		     int vol, location_t locus)
2528{
2529  rtvec argvec, constraintvec, labelvec;
2530  rtx body;
2531  int ninputs = list_length (inputs);
2532  int noutputs = list_length (outputs);
2533  int nlabels = list_length (labels);
2534  int ninout;
2535  int nclobbers;
2536  HARD_REG_SET clobbered_regs;
2537  int clobber_conflict_found = 0;
2538  tree tail;
2539  tree t;
2540  int i;
2541  /* Vector of RTX's of evaluated output operands.  */
2542  rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2543  int *inout_opnum = XALLOCAVEC (int, noutputs);
2544  rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2545  machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
2546  const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2547  int old_generating_concat_p = generating_concat_p;
2548  rtx_code_label *fallthru_label = NULL;
2549
2550  /* An ASM with no outputs needs to be treated as volatile, for now.  */
2551  if (noutputs == 0)
2552    vol = 1;
2553
2554  if (! check_operand_nalternatives (outputs, inputs))
2555    return;
2556
2557  string = resolve_asm_operand_names (string, outputs, inputs, labels);
2558
2559  /* Collect constraints.  */
2560  i = 0;
2561  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2562    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2563  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2564    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2565
2566  /* Sometimes we wish to automatically clobber registers across an asm.
2567     Case in point is when the i386 backend moved from cc0 to a hard reg --
2568     maintaining source-level compatibility means automatically clobbering
2569     the flags register.  */
2570  clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2571
2572  /* Count the number of meaningful clobbered registers, ignoring what
2573     we would ignore later.  */
2574  nclobbers = 0;
2575  CLEAR_HARD_REG_SET (clobbered_regs);
2576  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2577    {
2578      const char *regname;
2579      int nregs;
2580
2581      if (TREE_VALUE (tail) == error_mark_node)
2582	return;
2583      regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2584
2585      i = decode_reg_name_and_count (regname, &nregs);
2586      if (i == -4)
2587	++nclobbers;
2588      else if (i == -2)
2589	error ("unknown register name %qs in %<asm%>", regname);
2590
2591      /* Mark clobbered registers.  */
2592      if (i >= 0)
2593        {
2594	  int reg;
2595
2596	  for (reg = i; reg < i + nregs; reg++)
2597	    {
2598	      ++nclobbers;
2599
2600	      /* Clobbering the PIC register is an error.  */
2601	      if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2602		{
2603		  error ("PIC register clobbered by %qs in %<asm%>", regname);
2604		  return;
2605		}
2606
2607	      SET_HARD_REG_BIT (clobbered_regs, reg);
2608	    }
2609	}
2610    }
2611
2612  /* First pass over inputs and outputs checks validity and sets
2613     mark_addressable if needed.  */
2614
2615  ninout = 0;
2616  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2617    {
2618      tree val = TREE_VALUE (tail);
2619      tree type = TREE_TYPE (val);
2620      const char *constraint;
2621      bool is_inout;
2622      bool allows_reg;
2623      bool allows_mem;
2624
2625      /* If there's an erroneous arg, emit no insn.  */
2626      if (type == error_mark_node)
2627	return;
2628
2629      /* Try to parse the output constraint.  If that fails, there's
2630	 no point in going further.  */
2631      constraint = constraints[i];
2632      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2633				    &allows_mem, &allows_reg, &is_inout))
2634	return;
2635
2636      if (! allows_reg
2637	  && (allows_mem
2638	      || is_inout
2639	      || (DECL_P (val)
2640		  && REG_P (DECL_RTL (val))
2641		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2642	mark_addressable (val);
2643
2644      if (is_inout)
2645	ninout++;
2646    }
2647
2648  ninputs += ninout;
2649  if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2650    {
2651      error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2652      return;
2653    }
2654
2655  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2656    {
2657      bool allows_reg, allows_mem;
2658      const char *constraint;
2659
2660      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2661	 would get VOIDmode and that could cause a crash in reload.  */
2662      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2663	return;
2664
2665      constraint = constraints[i + noutputs];
2666      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2667				    constraints, &allows_mem, &allows_reg))
2668	return;
2669
2670      if (! allows_reg && allows_mem)
2671	mark_addressable (TREE_VALUE (tail));
2672    }
2673
2674  /* Second pass evaluates arguments.  */
2675
2676  /* Make sure stack is consistent for asm goto.  */
2677  if (nlabels > 0)
2678    do_pending_stack_adjust ();
2679
2680  ninout = 0;
2681  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2682    {
2683      tree val = TREE_VALUE (tail);
2684      tree type = TREE_TYPE (val);
2685      bool is_inout;
2686      bool allows_reg;
2687      bool allows_mem;
2688      rtx op;
2689      bool ok;
2690
2691      ok = parse_output_constraint (&constraints[i], i, ninputs,
2692				    noutputs, &allows_mem, &allows_reg,
2693				    &is_inout);
2694      gcc_assert (ok);
2695
2696      /* If an output operand is not a decl or indirect ref and our constraint
2697	 allows a register, make a temporary to act as an intermediate.
2698	 Make the asm insn write into that, then our caller will copy it to
2699	 the real output operand.  Likewise for promoted variables.  */
2700
2701      generating_concat_p = 0;
2702
2703      real_output_rtx[i] = NULL_RTX;
2704      if ((TREE_CODE (val) == INDIRECT_REF
2705	   && allows_mem)
2706	  || (DECL_P (val)
2707	      && (allows_mem || REG_P (DECL_RTL (val)))
2708	      && ! (REG_P (DECL_RTL (val))
2709		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2710	  || ! allows_reg
2711	  || is_inout)
2712	{
2713	  op = expand_expr (val, NULL_RTX, VOIDmode,
2714			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2715	  if (MEM_P (op))
2716	    op = validize_mem (op);
2717
2718	  if (! allows_reg && !MEM_P (op))
2719	    error ("output number %d not directly addressable", i);
2720	  if ((! allows_mem && MEM_P (op))
2721	      || GET_CODE (op) == CONCAT)
2722	    {
2723	      real_output_rtx[i] = op;
2724	      op = gen_reg_rtx (GET_MODE (op));
2725	      if (is_inout)
2726		emit_move_insn (op, real_output_rtx[i]);
2727	    }
2728	}
2729      else
2730	{
2731	  op = assign_temp (type, 0, 1);
2732	  op = validize_mem (op);
2733	  if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2734	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2735	  TREE_VALUE (tail) = make_tree (type, op);
2736	}
2737      output_rtx[i] = op;
2738
2739      generating_concat_p = old_generating_concat_p;
2740
2741      if (is_inout)
2742	{
2743	  inout_mode[ninout] = TYPE_MODE (type);
2744	  inout_opnum[ninout++] = i;
2745	}
2746
2747      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2748	clobber_conflict_found = 1;
2749    }
2750
2751  /* Make vectors for the expression-rtx, constraint strings,
2752     and named operands.  */
2753
2754  argvec = rtvec_alloc (ninputs);
2755  constraintvec = rtvec_alloc (ninputs);
2756  labelvec = rtvec_alloc (nlabels);
2757
2758  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2759				: GET_MODE (output_rtx[0])),
2760			       ggc_strdup (TREE_STRING_POINTER (string)),
2761			       empty_string, 0, argvec, constraintvec,
2762			       labelvec, locus);
2763
2764  MEM_VOLATILE_P (body) = vol;
2765
2766  /* Eval the inputs and put them into ARGVEC.
2767     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
2768
2769  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2770    {
2771      bool allows_reg, allows_mem;
2772      const char *constraint;
2773      tree val, type;
2774      rtx op;
2775      bool ok;
2776
2777      constraint = constraints[i + noutputs];
2778      ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2779				   constraints, &allows_mem, &allows_reg);
2780      gcc_assert (ok);
2781
2782      generating_concat_p = 0;
2783
2784      val = TREE_VALUE (tail);
2785      type = TREE_TYPE (val);
2786      /* EXPAND_INITIALIZER will not generate code for valid initializer
2787	 constants, but will still generate code for other types of operand.
2788	 This is the behavior we want for constant constraints.  */
2789      op = expand_expr (val, NULL_RTX, VOIDmode,
2790			allows_reg ? EXPAND_NORMAL
2791			: allows_mem ? EXPAND_MEMORY
2792			: EXPAND_INITIALIZER);
2793
2794      /* Never pass a CONCAT to an ASM.  */
2795      if (GET_CODE (op) == CONCAT)
2796	op = force_reg (GET_MODE (op), op);
2797      else if (MEM_P (op))
2798	op = validize_mem (op);
2799
2800      if (asm_operand_ok (op, constraint, NULL) <= 0)
2801	{
2802	  if (allows_reg && TYPE_MODE (type) != BLKmode)
2803	    op = force_reg (TYPE_MODE (type), op);
2804	  else if (!allows_mem)
2805	    warning (0, "asm operand %d probably doesn%'t match constraints",
2806		     i + noutputs);
2807	  else if (MEM_P (op))
2808	    {
2809	      /* We won't recognize either volatile memory or memory
2810		 with a queued address as available a memory_operand
2811		 at this point.  Ignore it: clearly this *is* a memory.  */
2812	    }
2813	  else
2814	    gcc_unreachable ();
2815	}
2816
2817      generating_concat_p = old_generating_concat_p;
2818      ASM_OPERANDS_INPUT (body, i) = op;
2819
2820      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2821	= gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2822				 ggc_strdup (constraints[i + noutputs]),
2823				 locus);
2824
2825      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2826	clobber_conflict_found = 1;
2827    }
2828
2829  /* Protect all the operands from the queue now that they have all been
2830     evaluated.  */
2831
2832  generating_concat_p = 0;
2833
2834  /* For in-out operands, copy output rtx to input rtx.  */
2835  for (i = 0; i < ninout; i++)
2836    {
2837      int j = inout_opnum[i];
2838      char buffer[16];
2839
2840      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2841	= output_rtx[j];
2842
2843      sprintf (buffer, "%d", j);
2844      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2845	= gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2846    }
2847
2848  /* Copy labels to the vector.  */
2849  for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2850    {
2851      rtx r;
2852      /* If asm goto has any labels in the fallthru basic block, use
2853	 a label that we emit immediately after the asm goto.  Expansion
2854	 may insert further instructions into the same basic block after
2855	 asm goto and if we don't do this, insertion of instructions on
2856	 the fallthru edge might misbehave.  See PR58670.  */
2857      if (fallthru_bb
2858	  && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2859	{
2860	  if (fallthru_label == NULL_RTX)
2861	    fallthru_label = gen_label_rtx ();
2862	  r = fallthru_label;
2863	}
2864      else
2865	r = label_rtx (TREE_VALUE (tail));
2866      ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2867    }
2868
2869  generating_concat_p = old_generating_concat_p;
2870
2871  /* Now, for each output, construct an rtx
2872     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2873			       ARGVEC CONSTRAINTS OPNAMES))
2874     If there is more than one, put them inside a PARALLEL.  */
2875
2876  if (nlabels > 0 && nclobbers == 0)
2877    {
2878      gcc_assert (noutputs == 0);
2879      emit_jump_insn (body);
2880    }
2881  else if (noutputs == 0 && nclobbers == 0)
2882    {
2883      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
2884      emit_insn (body);
2885    }
2886  else if (noutputs == 1 && nclobbers == 0)
2887    {
2888      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2889      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2890    }
2891  else
2892    {
2893      rtx obody = body;
2894      int num = noutputs;
2895
2896      if (num == 0)
2897	num = 1;
2898
2899      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2900
2901      /* For each output operand, store a SET.  */
2902      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2903	{
2904	  XVECEXP (body, 0, i)
2905	    = gen_rtx_SET (VOIDmode,
2906			   output_rtx[i],
2907			   gen_rtx_ASM_OPERANDS
2908			   (GET_MODE (output_rtx[i]),
2909			    ggc_strdup (TREE_STRING_POINTER (string)),
2910			    ggc_strdup (constraints[i]),
2911			    i, argvec, constraintvec, labelvec, locus));
2912
2913	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2914	}
2915
2916      /* If there are no outputs (but there are some clobbers)
2917	 store the bare ASM_OPERANDS into the PARALLEL.  */
2918
2919      if (i == 0)
2920	XVECEXP (body, 0, i++) = obody;
2921
2922      /* Store (clobber REG) for each clobbered register specified.  */
2923
2924      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2925	{
2926	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2927	  int reg, nregs;
2928	  int j = decode_reg_name_and_count (regname, &nregs);
2929	  rtx clobbered_reg;
2930
2931	  if (j < 0)
2932	    {
2933	      if (j == -3)	/* `cc', which is not a register */
2934		continue;
2935
2936	      if (j == -4)	/* `memory', don't cache memory across asm */
2937		{
2938		  XVECEXP (body, 0, i++)
2939		    = gen_rtx_CLOBBER (VOIDmode,
2940				       gen_rtx_MEM
2941				       (BLKmode,
2942					gen_rtx_SCRATCH (VOIDmode)));
2943		  continue;
2944		}
2945
2946	      /* Ignore unknown register, error already signaled.  */
2947	      continue;
2948	    }
2949
2950	  for (reg = j; reg < j + nregs; reg++)
2951	    {
2952	      /* Use QImode since that's guaranteed to clobber just
2953	       * one reg.  */
2954	      clobbered_reg = gen_rtx_REG (QImode, reg);
2955
2956	      /* Do sanity check for overlap between clobbers and
2957		 respectively input and outputs that hasn't been
2958		 handled.  Such overlap should have been detected and
2959		 reported above.  */
2960	      if (!clobber_conflict_found)
2961		{
2962		  int opno;
2963
2964		  /* We test the old body (obody) contents to avoid
2965		     tripping over the under-construction body.  */
2966		  for (opno = 0; opno < noutputs; opno++)
2967		    if (reg_overlap_mentioned_p (clobbered_reg,
2968						 output_rtx[opno]))
2969		      internal_error
2970			("asm clobber conflict with output operand");
2971
2972		  for (opno = 0; opno < ninputs - ninout; opno++)
2973		    if (reg_overlap_mentioned_p (clobbered_reg,
2974						 ASM_OPERANDS_INPUT (obody,
2975								     opno)))
2976		      internal_error
2977			("asm clobber conflict with input operand");
2978		}
2979
2980	      XVECEXP (body, 0, i++)
2981		= gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2982	    }
2983	}
2984
2985      if (nlabels > 0)
2986	emit_jump_insn (body);
2987      else
2988	emit_insn (body);
2989    }
2990
2991  if (fallthru_label)
2992    emit_label (fallthru_label);
2993
2994  /* For any outputs that needed reloading into registers, spill them
2995     back to where they belong.  */
2996  for (i = 0; i < noutputs; ++i)
2997    if (real_output_rtx[i])
2998      emit_move_insn (real_output_rtx[i], output_rtx[i]);
2999
3000  crtl->has_asm_statement = 1;
3001  free_temp_slots ();
3002}
3003
3004
3005static void
3006expand_asm_stmt (gasm *stmt)
3007{
3008  int noutputs;
3009  tree outputs, tail, t;
3010  tree *o;
3011  size_t i, n;
3012  const char *s;
3013  tree str, out, in, cl, labels;
3014  location_t locus = gimple_location (stmt);
3015  basic_block fallthru_bb = NULL;
3016
3017  /* Meh... convert the gimple asm operands into real tree lists.
3018     Eventually we should make all routines work on the vectors instead
3019     of relying on TREE_CHAIN.  */
3020  out = NULL_TREE;
3021  n = gimple_asm_noutputs (stmt);
3022  if (n > 0)
3023    {
3024      t = out = gimple_asm_output_op (stmt, 0);
3025      for (i = 1; i < n; i++)
3026	t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
3027    }
3028
3029  in = NULL_TREE;
3030  n = gimple_asm_ninputs (stmt);
3031  if (n > 0)
3032    {
3033      t = in = gimple_asm_input_op (stmt, 0);
3034      for (i = 1; i < n; i++)
3035	t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
3036    }
3037
3038  cl = NULL_TREE;
3039  n = gimple_asm_nclobbers (stmt);
3040  if (n > 0)
3041    {
3042      t = cl = gimple_asm_clobber_op (stmt, 0);
3043      for (i = 1; i < n; i++)
3044	t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
3045    }
3046
3047  labels = NULL_TREE;
3048  n = gimple_asm_nlabels (stmt);
3049  if (n > 0)
3050    {
3051      edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3052      if (fallthru)
3053	fallthru_bb = fallthru->dest;
3054      t = labels = gimple_asm_label_op (stmt, 0);
3055      for (i = 1; i < n; i++)
3056	t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
3057    }
3058
3059  s = gimple_asm_string (stmt);
3060  str = build_string (strlen (s), s);
3061
3062  if (gimple_asm_input_p (stmt))
3063    {
3064      expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3065      return;
3066    }
3067
3068  outputs = out;
3069  noutputs = gimple_asm_noutputs (stmt);
3070  /* o[I] is the place that output number I should be written.  */
3071  o = (tree *) alloca (noutputs * sizeof (tree));
3072
3073  /* Record the contents of OUTPUTS before it is modified.  */
3074  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3075    o[i] = TREE_VALUE (tail);
3076
3077  /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3078     OUTPUTS some trees for where the values were actually stored.  */
3079  expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3080		       gimple_asm_volatile_p (stmt), locus);
3081
3082  /* Copy all the intermediate outputs into the specified outputs.  */
3083  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3084    {
3085      if (o[i] != TREE_VALUE (tail))
3086	{
3087	  expand_assignment (o[i], TREE_VALUE (tail), false);
3088	  free_temp_slots ();
3089
3090	  /* Restore the original value so that it's correct the next
3091	     time we expand this function.  */
3092	  TREE_VALUE (tail) = o[i];
3093	}
3094    }
3095}
3096
3097/* Emit code to jump to the address
3098   specified by the pointer expression EXP.  */
3099
3100static void
3101expand_computed_goto (tree exp)
3102{
3103  rtx x = expand_normal (exp);
3104
3105  do_pending_stack_adjust ();
3106  emit_indirect_jump (x);
3107}
3108
3109/* Generate RTL code for a `goto' statement with target label LABEL.
3110   LABEL should be a LABEL_DECL tree node that was or will later be
3111   defined with `expand_label'.  */
3112
3113static void
3114expand_goto (tree label)
3115{
3116#ifdef ENABLE_CHECKING
3117  /* Check for a nonlocal goto to a containing function.  Should have
3118     gotten translated to __builtin_nonlocal_goto.  */
3119  tree context = decl_function_context (label);
3120  gcc_assert (!context || context == current_function_decl);
3121#endif
3122
3123  emit_jump (label_rtx (label));
3124}
3125
3126/* Output a return with no value.  */
3127
3128static void
3129expand_null_return_1 (void)
3130{
3131  clear_pending_stack_adjust ();
3132  do_pending_stack_adjust ();
3133  emit_jump (return_label);
3134}
3135
3136/* Generate RTL to return from the current function, with no value.
3137   (That is, we do not do anything about returning any value.)  */
3138
3139void
3140expand_null_return (void)
3141{
3142  /* If this function was declared to return a value, but we
3143     didn't, clobber the return registers so that they are not
3144     propagated live to the rest of the function.  */
3145  clobber_return_register ();
3146
3147  expand_null_return_1 ();
3148}
3149
3150/* Generate RTL to return from the current function, with value VAL.  */
3151
3152static void
3153expand_value_return (rtx val)
3154{
3155  /* Copy the value to the return location unless it's already there.  */
3156
3157  tree decl = DECL_RESULT (current_function_decl);
3158  rtx return_reg = DECL_RTL (decl);
3159  if (return_reg != val)
3160    {
3161      tree funtype = TREE_TYPE (current_function_decl);
3162      tree type = TREE_TYPE (decl);
3163      int unsignedp = TYPE_UNSIGNED (type);
3164      machine_mode old_mode = DECL_MODE (decl);
3165      machine_mode mode;
3166      if (DECL_BY_REFERENCE (decl))
3167        mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3168      else
3169        mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3170
3171      if (mode != old_mode)
3172	val = convert_modes (mode, old_mode, val, unsignedp);
3173
3174      if (GET_CODE (return_reg) == PARALLEL)
3175	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3176      else
3177	emit_move_insn (return_reg, val);
3178    }
3179
3180  expand_null_return_1 ();
3181}
3182
3183/* Generate RTL to evaluate the expression RETVAL and return it
3184   from the current function.  */
3185
3186static void
3187expand_return (tree retval, tree bounds)
3188{
3189  rtx result_rtl;
3190  rtx val = 0;
3191  tree retval_rhs;
3192  rtx bounds_rtl;
3193
3194  /* If function wants no value, give it none.  */
3195  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3196    {
3197      expand_normal (retval);
3198      expand_null_return ();
3199      return;
3200    }
3201
3202  if (retval == error_mark_node)
3203    {
3204      /* Treat this like a return of no value from a function that
3205	 returns a value.  */
3206      expand_null_return ();
3207      return;
3208    }
3209  else if ((TREE_CODE (retval) == MODIFY_EXPR
3210	    || TREE_CODE (retval) == INIT_EXPR)
3211	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3212    retval_rhs = TREE_OPERAND (retval, 1);
3213  else
3214    retval_rhs = retval;
3215
3216  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3217
3218  /* Put returned bounds to the right place.  */
3219  bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3220  if (bounds_rtl)
3221    {
3222      rtx addr = NULL;
3223      rtx bnd = NULL;
3224
3225      if (bounds && bounds != error_mark_node)
3226	{
3227	  bnd = expand_normal (bounds);
3228	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3229	}
3230      else if (REG_P (bounds_rtl))
3231	{
3232	  if (bounds)
3233	    bnd = chkp_expand_zero_bounds ();
3234	  else
3235	    {
3236	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3237	      addr = gen_rtx_MEM (Pmode, addr);
3238	      bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3239	    }
3240
3241	  targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3242	}
3243      else
3244	{
3245	  int n;
3246
3247	  gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3248
3249	  if (bounds)
3250	    bnd = chkp_expand_zero_bounds ();
3251	  else
3252	    {
3253	      addr = expand_normal (build_fold_addr_expr (retval_rhs));
3254	      addr = gen_rtx_MEM (Pmode, addr);
3255	    }
3256
3257	  for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3258	    {
3259	      rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3260	      if (!bounds)
3261		{
3262		  rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3263		  rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3264		  bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3265		}
3266	      targetm.calls.store_returned_bounds (slot, bnd);
3267	    }
3268	}
3269    }
3270  else if (chkp_function_instrumented_p (current_function_decl)
3271	   && !BOUNDED_P (retval_rhs)
3272	   && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3273	   && TREE_CODE (retval_rhs) != RESULT_DECL)
3274    {
3275      rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3276      addr = gen_rtx_MEM (Pmode, addr);
3277
3278      gcc_assert (MEM_P (result_rtl));
3279
3280      chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3281    }
3282
3283  /* If we are returning the RESULT_DECL, then the value has already
3284     been stored into it, so we don't have to do anything special.  */
3285  if (TREE_CODE (retval_rhs) == RESULT_DECL)
3286    expand_value_return (result_rtl);
3287
3288  /* If the result is an aggregate that is being returned in one (or more)
3289     registers, load the registers here.  */
3290
3291  else if (retval_rhs != 0
3292	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3293	   && REG_P (result_rtl))
3294    {
3295      val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3296      if (val)
3297	{
3298	  /* Use the mode of the result value on the return register.  */
3299	  PUT_MODE (result_rtl, GET_MODE (val));
3300	  expand_value_return (val);
3301	}
3302      else
3303	expand_null_return ();
3304    }
3305  else if (retval_rhs != 0
3306	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3307	   && (REG_P (result_rtl)
3308	       || (GET_CODE (result_rtl) == PARALLEL)))
3309    {
3310      /* Compute the return value into a temporary (usually a pseudo reg).  */
3311      val
3312	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3313      val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3314      val = force_not_mem (val);
3315      expand_value_return (val);
3316    }
3317  else
3318    {
3319      /* No hard reg used; calculate value into hard return reg.  */
3320      expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3321      expand_value_return (result_rtl);
3322    }
3323}
3324
3325/* A subroutine of expand_gimple_stmt, expanding one gimple statement
3326   STMT that doesn't require special handling for outgoing edges.  That
3327   is no tailcalls and no GIMPLE_COND.  */
3328
3329static void
3330expand_gimple_stmt_1 (gimple stmt)
3331{
3332  tree op0;
3333
3334  set_curr_insn_location (gimple_location (stmt));
3335
3336  switch (gimple_code (stmt))
3337    {
3338    case GIMPLE_GOTO:
3339      op0 = gimple_goto_dest (stmt);
3340      if (TREE_CODE (op0) == LABEL_DECL)
3341	expand_goto (op0);
3342      else
3343	expand_computed_goto (op0);
3344      break;
3345    case GIMPLE_LABEL:
3346      expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3347      break;
3348    case GIMPLE_NOP:
3349    case GIMPLE_PREDICT:
3350      break;
3351    case GIMPLE_SWITCH:
3352      expand_case (as_a <gswitch *> (stmt));
3353      break;
3354    case GIMPLE_ASM:
3355      expand_asm_stmt (as_a <gasm *> (stmt));
3356      break;
3357    case GIMPLE_CALL:
3358      expand_call_stmt (as_a <gcall *> (stmt));
3359      break;
3360
3361    case GIMPLE_RETURN:
3362      {
3363	tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3364	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3365
3366	if (op0 && op0 != error_mark_node)
3367	  {
3368	    tree result = DECL_RESULT (current_function_decl);
3369
3370	    /* Mark we have return statement with missing bounds.  */
3371	    if (!bnd
3372		&& chkp_function_instrumented_p (cfun->decl)
3373		&& !DECL_P (op0))
3374	      bnd = error_mark_node;
3375
3376	    /* If we are not returning the current function's RESULT_DECL,
3377	       build an assignment to it.  */
3378	    if (op0 != result)
3379	      {
3380		/* I believe that a function's RESULT_DECL is unique.  */
3381		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3382
3383		/* ??? We'd like to use simply expand_assignment here,
3384		   but this fails if the value is of BLKmode but the return
3385		   decl is a register.  expand_return has special handling
3386		   for this combination, which eventually should move
3387		   to common code.  See comments there.  Until then, let's
3388		   build a modify expression :-/  */
3389		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3390			      result, op0);
3391	      }
3392	  }
3393
3394	if (!op0)
3395	  expand_null_return ();
3396	else
3397	  expand_return (op0, bnd);
3398      }
3399      break;
3400
3401    case GIMPLE_ASSIGN:
3402      {
3403	gassign *assign_stmt = as_a <gassign *> (stmt);
3404	tree lhs = gimple_assign_lhs (assign_stmt);
3405
3406	/* Tree expand used to fiddle with |= and &= of two bitfield
3407	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3408	   of binary assigns must be a gimple reg.  */
3409
3410	if (TREE_CODE (lhs) != SSA_NAME
3411	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3412	       == GIMPLE_SINGLE_RHS)
3413	  {
3414	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3415	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3416			== GIMPLE_SINGLE_RHS);
3417	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3418	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3419	    if (TREE_CLOBBER_P (rhs))
3420	      /* This is a clobber to mark the going out of scope for
3421		 this LHS.  */
3422	      ;
3423	    else
3424	      expand_assignment (lhs, rhs,
3425				 gimple_assign_nontemporal_move_p (
3426				   assign_stmt));
3427	  }
3428	else
3429	  {
3430	    rtx target, temp;
3431	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3432	    struct separate_ops ops;
3433	    bool promoted = false;
3434
3435	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3436	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3437	      promoted = true;
3438
3439	    ops.code = gimple_assign_rhs_code (assign_stmt);
3440	    ops.type = TREE_TYPE (lhs);
3441	    switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3442	      {
3443		case GIMPLE_TERNARY_RHS:
3444		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3445		  /* Fallthru */
3446		case GIMPLE_BINARY_RHS:
3447		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3448		  /* Fallthru */
3449		case GIMPLE_UNARY_RHS:
3450		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3451		  break;
3452		default:
3453		  gcc_unreachable ();
3454	      }
3455	    ops.location = gimple_location (stmt);
3456
3457	    /* If we want to use a nontemporal store, force the value to
3458	       register first.  If we store into a promoted register,
3459	       don't directly expand to target.  */
3460	    temp = nontemporal || promoted ? NULL_RTX : target;
3461	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3462				       EXPAND_NORMAL);
3463
3464	    if (temp == target)
3465	      ;
3466	    else if (promoted)
3467	      {
3468		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3469		/* If TEMP is a VOIDmode constant, use convert_modes to make
3470		   sure that we properly convert it.  */
3471		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3472		  {
3473		    temp = convert_modes (GET_MODE (target),
3474					  TYPE_MODE (ops.type),
3475					  temp, unsignedp);
3476		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3477					  GET_MODE (target), temp, unsignedp);
3478		  }
3479
3480		convert_move (SUBREG_REG (target), temp, unsignedp);
3481	      }
3482	    else if (nontemporal && emit_storent_insn (target, temp))
3483	      ;
3484	    else
3485	      {
3486		temp = force_operand (temp, target);
3487		if (temp != target)
3488		  emit_move_insn (target, temp);
3489	      }
3490	  }
3491      }
3492      break;
3493
3494    default:
3495      gcc_unreachable ();
3496    }
3497}
3498
3499/* Expand one gimple statement STMT and return the last RTL instruction
3500   before any of the newly generated ones.
3501
3502   In addition to generating the necessary RTL instructions this also
3503   sets REG_EH_REGION notes if necessary and sets the current source
3504   location for diagnostics.  */
3505
3506static rtx_insn *
3507expand_gimple_stmt (gimple stmt)
3508{
3509  location_t saved_location = input_location;
3510  rtx_insn *last = get_last_insn ();
3511  int lp_nr;
3512
3513  gcc_assert (cfun);
3514
3515  /* We need to save and restore the current source location so that errors
3516     discovered during expansion are emitted with the right location.  But
3517     it would be better if the diagnostic routines used the source location
3518     embedded in the tree nodes rather than globals.  */
3519  if (gimple_has_location (stmt))
3520    input_location = gimple_location (stmt);
3521
3522  expand_gimple_stmt_1 (stmt);
3523
3524  /* Free any temporaries used to evaluate this statement.  */
3525  free_temp_slots ();
3526
3527  input_location = saved_location;
3528
3529  /* Mark all insns that may trap.  */
3530  lp_nr = lookup_stmt_eh_lp (stmt);
3531  if (lp_nr)
3532    {
3533      rtx_insn *insn;
3534      for (insn = next_real_insn (last); insn;
3535	   insn = next_real_insn (insn))
3536	{
3537	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3538	      /* If we want exceptions for non-call insns, any
3539		 may_trap_p instruction may throw.  */
3540	      && GET_CODE (PATTERN (insn)) != CLOBBER
3541	      && GET_CODE (PATTERN (insn)) != USE
3542	      && insn_could_throw_p (insn))
3543	    make_reg_eh_region_note (insn, 0, lp_nr);
3544	}
3545    }
3546
3547  return last;
3548}
3549
3550/* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3551   that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3552   generated a tail call (something that might be denied by the ABI
3553   rules governing the call; see calls.c).
3554
3555   Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3556   can still reach the rest of BB.  The case here is __builtin_sqrt,
3557   where the NaN result goes through the external function (with a
3558   tailcall) and the normal result happens via a sqrt instruction.  */
3559
3560static basic_block
3561expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3562{
3563  rtx_insn *last2, *last;
3564  edge e;
3565  edge_iterator ei;
3566  int probability;
3567  gcov_type count;
3568
3569  last2 = last = expand_gimple_stmt (stmt);
3570
3571  for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3572    if (CALL_P (last) && SIBLING_CALL_P (last))
3573      goto found;
3574
3575  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3576
3577  *can_fallthru = true;
3578  return NULL;
3579
3580 found:
3581  /* ??? Wouldn't it be better to just reset any pending stack adjust?
3582     Any instructions emitted here are about to be deleted.  */
3583  do_pending_stack_adjust ();
3584
3585  /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3586  /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3587     EH or abnormal edges, we shouldn't have created a tail call in
3588     the first place.  So it seems to me we should just be removing
3589     all edges here, or redirecting the existing fallthru edge to
3590     the exit block.  */
3591
3592  probability = 0;
3593  count = 0;
3594
3595  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3596    {
3597      if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3598	{
3599	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3600	    {
3601	      e->dest->count -= e->count;
3602	      e->dest->frequency -= EDGE_FREQUENCY (e);
3603	      if (e->dest->count < 0)
3604		e->dest->count = 0;
3605	      if (e->dest->frequency < 0)
3606		e->dest->frequency = 0;
3607	    }
3608	  count += e->count;
3609	  probability += e->probability;
3610	  remove_edge (e);
3611	}
3612      else
3613	ei_next (&ei);
3614    }
3615
3616  /* This is somewhat ugly: the call_expr expander often emits instructions
3617     after the sibcall (to perform the function return).  These confuse the
3618     find_many_sub_basic_blocks code, so we need to get rid of these.  */
3619  last = NEXT_INSN (last);
3620  gcc_assert (BARRIER_P (last));
3621
3622  *can_fallthru = false;
3623  while (NEXT_INSN (last))
3624    {
3625      /* For instance an sqrt builtin expander expands if with
3626	 sibcall in the then and label for `else`.  */
3627      if (LABEL_P (NEXT_INSN (last)))
3628	{
3629	  *can_fallthru = true;
3630	  break;
3631	}
3632      delete_insn (NEXT_INSN (last));
3633    }
3634
3635  e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3636		 | EDGE_SIBCALL);
3637  e->probability += probability;
3638  e->count += count;
3639  BB_END (bb) = last;
3640  update_bb_for_insn (bb);
3641
3642  if (NEXT_INSN (last))
3643    {
3644      bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3645
3646      last = BB_END (bb);
3647      if (BARRIER_P (last))
3648	BB_END (bb) = PREV_INSN (last);
3649    }
3650
3651  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3652
3653  return bb;
3654}
3655
3656/* Return the difference between the floor and the truncated result of
3657   a signed division by OP1 with remainder MOD.  */
3658static rtx
3659floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3660{
3661  /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3662  return gen_rtx_IF_THEN_ELSE
3663    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3664     gen_rtx_IF_THEN_ELSE
3665     (mode, gen_rtx_LT (BImode,
3666			gen_rtx_DIV (mode, op1, mod),
3667			const0_rtx),
3668      constm1_rtx, const0_rtx),
3669     const0_rtx);
3670}
3671
3672/* Return the difference between the ceil and the truncated result of
3673   a signed division by OP1 with remainder MOD.  */
3674static rtx
3675ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3676{
3677  /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3678  return gen_rtx_IF_THEN_ELSE
3679    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3680     gen_rtx_IF_THEN_ELSE
3681     (mode, gen_rtx_GT (BImode,
3682			gen_rtx_DIV (mode, op1, mod),
3683			const0_rtx),
3684      const1_rtx, const0_rtx),
3685     const0_rtx);
3686}
3687
3688/* Return the difference between the ceil and the truncated result of
3689   an unsigned division by OP1 with remainder MOD.  */
3690static rtx
3691ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3692{
3693  /* (mod != 0 ? 1 : 0) */
3694  return gen_rtx_IF_THEN_ELSE
3695    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3696     const1_rtx, const0_rtx);
3697}
3698
3699/* Return the difference between the rounded and the truncated result
3700   of a signed division by OP1 with remainder MOD.  Halfway cases are
3701   rounded away from zero, rather than to the nearest even number.  */
3702static rtx
3703round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3704{
3705  /* (abs (mod) >= abs (op1) - abs (mod)
3706      ? (op1 / mod > 0 ? 1 : -1)
3707      : 0) */
3708  return gen_rtx_IF_THEN_ELSE
3709    (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3710		       gen_rtx_MINUS (mode,
3711				      gen_rtx_ABS (mode, op1),
3712				      gen_rtx_ABS (mode, mod))),
3713     gen_rtx_IF_THEN_ELSE
3714     (mode, gen_rtx_GT (BImode,
3715			gen_rtx_DIV (mode, op1, mod),
3716			const0_rtx),
3717      const1_rtx, constm1_rtx),
3718     const0_rtx);
3719}
3720
3721/* Return the difference between the rounded and the truncated result
3722   of a unsigned division by OP1 with remainder MOD.  Halfway cases
3723   are rounded away from zero, rather than to the nearest even
3724   number.  */
3725static rtx
3726round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3727{
3728  /* (mod >= op1 - mod ? 1 : 0) */
3729  return gen_rtx_IF_THEN_ELSE
3730    (mode, gen_rtx_GE (BImode, mod,
3731		       gen_rtx_MINUS (mode, op1, mod)),
3732     const1_rtx, const0_rtx);
3733}
3734
3735/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3736   any rtl.  */
3737
3738static rtx
3739convert_debug_memory_address (machine_mode mode, rtx x,
3740			      addr_space_t as)
3741{
3742  machine_mode xmode = GET_MODE (x);
3743
3744#ifndef POINTERS_EXTEND_UNSIGNED
3745  gcc_assert (mode == Pmode
3746	      || mode == targetm.addr_space.address_mode (as));
3747  gcc_assert (xmode == mode || xmode == VOIDmode);
3748#else
3749  rtx temp;
3750
3751  gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3752
3753  if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3754    return x;
3755
3756  if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3757    x = simplify_gen_subreg (mode, x, xmode,
3758			     subreg_lowpart_offset
3759			     (mode, xmode));
3760  else if (POINTERS_EXTEND_UNSIGNED > 0)
3761    x = gen_rtx_ZERO_EXTEND (mode, x);
3762  else if (!POINTERS_EXTEND_UNSIGNED)
3763    x = gen_rtx_SIGN_EXTEND (mode, x);
3764  else
3765    {
3766      switch (GET_CODE (x))
3767	{
3768	case SUBREG:
3769	  if ((SUBREG_PROMOTED_VAR_P (x)
3770	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3771	       || (GET_CODE (SUBREG_REG (x)) == PLUS
3772		   && REG_P (XEXP (SUBREG_REG (x), 0))
3773		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3774		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3775	      && GET_MODE (SUBREG_REG (x)) == mode)
3776	    return SUBREG_REG (x);
3777	  break;
3778	case LABEL_REF:
3779	  temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3780	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3781	  return temp;
3782	case SYMBOL_REF:
3783	  temp = shallow_copy_rtx (x);
3784	  PUT_MODE (temp, mode);
3785	  return temp;
3786	case CONST:
3787	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3788	  if (temp)
3789	    temp = gen_rtx_CONST (mode, temp);
3790	  return temp;
3791	case PLUS:
3792	case MINUS:
3793	  if (CONST_INT_P (XEXP (x, 1)))
3794	    {
3795	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3796	      if (temp)
3797		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3798	    }
3799	  break;
3800	default:
3801	  break;
3802	}
3803      /* Don't know how to express ptr_extend as operation in debug info.  */
3804      return NULL;
3805    }
3806#endif /* POINTERS_EXTEND_UNSIGNED */
3807
3808  return x;
3809}
3810
3811/* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3812   by avoid_deep_ter_for_debug.  */
3813
3814static hash_map<tree, tree> *deep_ter_debug_map;
3815
3816/* Split too deep TER chains for debug stmts using debug temporaries.  */
3817
3818static void
3819avoid_deep_ter_for_debug (gimple stmt, int depth)
3820{
3821  use_operand_p use_p;
3822  ssa_op_iter iter;
3823  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
3824    {
3825      tree use = USE_FROM_PTR (use_p);
3826      if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
3827	continue;
3828      gimple g = get_gimple_for_ssa_name (use);
3829      if (g == NULL)
3830	continue;
3831      if (depth > 6 && !stmt_ends_bb_p (g))
3832	{
3833	  if (deep_ter_debug_map == NULL)
3834	    deep_ter_debug_map = new hash_map<tree, tree>;
3835
3836	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
3837	  if (vexpr != NULL)
3838	    continue;
3839	  vexpr = make_node (DEBUG_EXPR_DECL);
3840	  gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
3841	  DECL_ARTIFICIAL (vexpr) = 1;
3842	  TREE_TYPE (vexpr) = TREE_TYPE (use);
3843	  DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
3844	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
3845	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
3846	  avoid_deep_ter_for_debug (def_temp, 0);
3847	}
3848      else
3849	avoid_deep_ter_for_debug (g, depth + 1);
3850    }
3851}
3852
3853/* Return an RTX equivalent to the value of the parameter DECL.  */
3854
3855static rtx
3856expand_debug_parm_decl (tree decl)
3857{
3858  rtx incoming = DECL_INCOMING_RTL (decl);
3859
3860  if (incoming
3861      && GET_MODE (incoming) != BLKmode
3862      && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3863	  || (MEM_P (incoming)
3864	      && REG_P (XEXP (incoming, 0))
3865	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
3866    {
3867      rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3868
3869#ifdef HAVE_window_save
3870      /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3871	 If the target machine has an explicit window save instruction, the
3872	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
3873      if (REG_P (incoming)
3874	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3875	incoming
3876	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3877				OUTGOING_REGNO (REGNO (incoming)), 0);
3878      else if (MEM_P (incoming))
3879	{
3880	  rtx reg = XEXP (incoming, 0);
3881	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3882	    {
3883	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3884	      incoming = replace_equiv_address_nv (incoming, reg);
3885	    }
3886	  else
3887	    incoming = copy_rtx (incoming);
3888	}
3889#endif
3890
3891      ENTRY_VALUE_EXP (rtl) = incoming;
3892      return rtl;
3893    }
3894
3895  if (incoming
3896      && GET_MODE (incoming) != BLKmode
3897      && !TREE_ADDRESSABLE (decl)
3898      && MEM_P (incoming)
3899      && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3900	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
3901	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3902	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3903    return copy_rtx (incoming);
3904
3905  return NULL_RTX;
3906}
3907
3908/* Return an RTX equivalent to the value of the tree expression EXP.  */
3909
3910static rtx
3911expand_debug_expr (tree exp)
3912{
3913  rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3914  machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3915  machine_mode inner_mode = VOIDmode;
3916  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3917  addr_space_t as;
3918
3919  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3920    {
3921    case tcc_expression:
3922      switch (TREE_CODE (exp))
3923	{
3924	case COND_EXPR:
3925	case DOT_PROD_EXPR:
3926	case SAD_EXPR:
3927	case WIDEN_MULT_PLUS_EXPR:
3928	case WIDEN_MULT_MINUS_EXPR:
3929	case FMA_EXPR:
3930	  goto ternary;
3931
3932	case TRUTH_ANDIF_EXPR:
3933	case TRUTH_ORIF_EXPR:
3934	case TRUTH_AND_EXPR:
3935	case TRUTH_OR_EXPR:
3936	case TRUTH_XOR_EXPR:
3937	  goto binary;
3938
3939	case TRUTH_NOT_EXPR:
3940	  goto unary;
3941
3942	default:
3943	  break;
3944	}
3945      break;
3946
3947    ternary:
3948      op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3949      if (!op2)
3950	return NULL_RTX;
3951      /* Fall through.  */
3952
3953    binary:
3954    case tcc_binary:
3955      op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3956      if (!op1)
3957	return NULL_RTX;
3958      switch (TREE_CODE (exp))
3959	{
3960	case LSHIFT_EXPR:
3961	case RSHIFT_EXPR:
3962	case LROTATE_EXPR:
3963	case RROTATE_EXPR:
3964	case WIDEN_LSHIFT_EXPR:
3965	  /* Ensure second operand isn't wider than the first one.  */
3966	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
3967	  if (SCALAR_INT_MODE_P (inner_mode))
3968	    {
3969	      machine_mode opmode = mode;
3970	      if (VECTOR_MODE_P (mode))
3971		opmode = GET_MODE_INNER (mode);
3972	      if (SCALAR_INT_MODE_P (opmode)
3973		  && (GET_MODE_PRECISION (opmode)
3974		      < GET_MODE_PRECISION (inner_mode)))
3975		op1 = simplify_gen_subreg (opmode, op1, inner_mode,
3976					   subreg_lowpart_offset (opmode,
3977								  inner_mode));
3978	    }
3979	  break;
3980	default:
3981	  break;
3982	}
3983      /* Fall through.  */
3984
3985    unary:
3986    case tcc_unary:
3987      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3988      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3989      if (!op0)
3990	return NULL_RTX;
3991      break;
3992
3993    case tcc_comparison:
3994      unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
3995      goto binary;
3996
3997    case tcc_type:
3998    case tcc_statement:
3999      gcc_unreachable ();
4000
4001    case tcc_constant:
4002    case tcc_exceptional:
4003    case tcc_declaration:
4004    case tcc_reference:
4005    case tcc_vl_exp:
4006      break;
4007    }
4008
4009  switch (TREE_CODE (exp))
4010    {
4011    case STRING_CST:
4012      if (!lookup_constant_def (exp))
4013	{
4014	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4015	      != (size_t) TREE_STRING_LENGTH (exp))
4016	    return NULL_RTX;
4017	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4018	  op0 = gen_rtx_MEM (BLKmode, op0);
4019	  set_mem_attributes (op0, exp, 0);
4020	  return op0;
4021	}
4022      /* Fall through...  */
4023
4024    case INTEGER_CST:
4025    case REAL_CST:
4026    case FIXED_CST:
4027      op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4028      return op0;
4029
4030    case COMPLEX_CST:
4031      gcc_assert (COMPLEX_MODE_P (mode));
4032      op0 = expand_debug_expr (TREE_REALPART (exp));
4033      op1 = expand_debug_expr (TREE_IMAGPART (exp));
4034      return gen_rtx_CONCAT (mode, op0, op1);
4035
4036    case DEBUG_EXPR_DECL:
4037      op0 = DECL_RTL_IF_SET (exp);
4038
4039      if (op0)
4040	return op0;
4041
4042      op0 = gen_rtx_DEBUG_EXPR (mode);
4043      DEBUG_EXPR_TREE_DECL (op0) = exp;
4044      SET_DECL_RTL (exp, op0);
4045
4046      return op0;
4047
4048    case VAR_DECL:
4049    case PARM_DECL:
4050    case FUNCTION_DECL:
4051    case LABEL_DECL:
4052    case CONST_DECL:
4053    case RESULT_DECL:
4054      op0 = DECL_RTL_IF_SET (exp);
4055
4056      /* This decl was probably optimized away.  */
4057      if (!op0)
4058	{
4059	  if (TREE_CODE (exp) != VAR_DECL
4060	      || DECL_EXTERNAL (exp)
4061	      || !TREE_STATIC (exp)
4062	      || !DECL_NAME (exp)
4063	      || DECL_HARD_REGISTER (exp)
4064	      || DECL_IN_CONSTANT_POOL (exp)
4065	      || mode == VOIDmode)
4066	    return NULL;
4067
4068	  op0 = make_decl_rtl_for_debug (exp);
4069	  if (!MEM_P (op0)
4070	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4071	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4072	    return NULL;
4073	}
4074      else
4075	op0 = copy_rtx (op0);
4076
4077      if (GET_MODE (op0) == BLKmode
4078	  /* If op0 is not BLKmode, but mode is, adjust_mode
4079	     below would ICE.  While it is likely a FE bug,
4080	     try to be robust here.  See PR43166.  */
4081	  || mode == BLKmode
4082	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4083	{
4084	  gcc_assert (MEM_P (op0));
4085	  op0 = adjust_address_nv (op0, mode, 0);
4086	  return op0;
4087	}
4088
4089      /* Fall through.  */
4090
4091    adjust_mode:
4092    case PAREN_EXPR:
4093    CASE_CONVERT:
4094      {
4095	inner_mode = GET_MODE (op0);
4096
4097	if (mode == inner_mode)
4098	  return op0;
4099
4100	if (inner_mode == VOIDmode)
4101	  {
4102	    if (TREE_CODE (exp) == SSA_NAME)
4103	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4104	    else
4105	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4106	    if (mode == inner_mode)
4107	      return op0;
4108	  }
4109
4110	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4111	  {
4112	    if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4113	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4114	    else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4115	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4116	    else
4117	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4118	  }
4119	else if (FLOAT_MODE_P (mode))
4120	  {
4121	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4122	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4123	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4124	    else
4125	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4126	  }
4127	else if (FLOAT_MODE_P (inner_mode))
4128	  {
4129	    if (unsignedp)
4130	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4131	    else
4132	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4133	  }
4134	else if (CONSTANT_P (op0)
4135		 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4136	  op0 = simplify_gen_subreg (mode, op0, inner_mode,
4137				     subreg_lowpart_offset (mode,
4138							    inner_mode));
4139	else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
4140		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4141		 : unsignedp)
4142	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4143	else
4144	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4145
4146	return op0;
4147      }
4148
4149    case MEM_REF:
4150      if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4151	{
4152	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4153				     TREE_OPERAND (exp, 0),
4154				     TREE_OPERAND (exp, 1));
4155	  if (newexp)
4156	    return expand_debug_expr (newexp);
4157	}
4158      /* FALLTHROUGH */
4159    case INDIRECT_REF:
4160      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4161      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4162      if (!op0)
4163	return NULL;
4164
4165      if (TREE_CODE (exp) == MEM_REF)
4166	{
4167	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4168	      || (GET_CODE (op0) == PLUS
4169		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4170	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4171	       Instead just use get_inner_reference.  */
4172	    goto component_ref;
4173
4174	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4175	  if (!op1 || !CONST_INT_P (op1))
4176	    return NULL;
4177
4178	  op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4179	}
4180
4181      as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4182
4183      op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4184					  op0, as);
4185      if (op0 == NULL_RTX)
4186	return NULL;
4187
4188      op0 = gen_rtx_MEM (mode, op0);
4189      set_mem_attributes (op0, exp, 0);
4190      if (TREE_CODE (exp) == MEM_REF
4191	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4192	set_mem_expr (op0, NULL_TREE);
4193      set_mem_addr_space (op0, as);
4194
4195      return op0;
4196
4197    case TARGET_MEM_REF:
4198      if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4199	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4200	return NULL;
4201
4202      op0 = expand_debug_expr
4203	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4204      if (!op0)
4205	return NULL;
4206
4207      as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4208      op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4209					  op0, as);
4210      if (op0 == NULL_RTX)
4211	return NULL;
4212
4213      op0 = gen_rtx_MEM (mode, op0);
4214
4215      set_mem_attributes (op0, exp, 0);
4216      set_mem_addr_space (op0, as);
4217
4218      return op0;
4219
4220    component_ref:
4221    case ARRAY_REF:
4222    case ARRAY_RANGE_REF:
4223    case COMPONENT_REF:
4224    case BIT_FIELD_REF:
4225    case REALPART_EXPR:
4226    case IMAGPART_EXPR:
4227    case VIEW_CONVERT_EXPR:
4228      {
4229	machine_mode mode1;
4230	HOST_WIDE_INT bitsize, bitpos;
4231	tree offset;
4232	int volatilep = 0;
4233	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4234					&mode1, &unsignedp, &volatilep, false);
4235	rtx orig_op0;
4236
4237	if (bitsize == 0)
4238	  return NULL;
4239
4240	orig_op0 = op0 = expand_debug_expr (tem);
4241
4242	if (!op0)
4243	  return NULL;
4244
4245	if (offset)
4246	  {
4247	    machine_mode addrmode, offmode;
4248
4249	    if (!MEM_P (op0))
4250	      return NULL;
4251
4252	    op0 = XEXP (op0, 0);
4253	    addrmode = GET_MODE (op0);
4254	    if (addrmode == VOIDmode)
4255	      addrmode = Pmode;
4256
4257	    op1 = expand_debug_expr (offset);
4258	    if (!op1)
4259	      return NULL;
4260
4261	    offmode = GET_MODE (op1);
4262	    if (offmode == VOIDmode)
4263	      offmode = TYPE_MODE (TREE_TYPE (offset));
4264
4265	    if (addrmode != offmode)
4266	      op1 = simplify_gen_subreg (addrmode, op1, offmode,
4267					 subreg_lowpart_offset (addrmode,
4268								offmode));
4269
4270	    /* Don't use offset_address here, we don't need a
4271	       recognizable address, and we don't want to generate
4272	       code.  */
4273	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4274							  op0, op1));
4275	  }
4276
4277	if (MEM_P (op0))
4278	  {
4279	    if (mode1 == VOIDmode)
4280	      /* Bitfield.  */
4281	      mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4282	    if (bitpos >= BITS_PER_UNIT)
4283	      {
4284		op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4285		bitpos %= BITS_PER_UNIT;
4286	      }
4287	    else if (bitpos < 0)
4288	      {
4289		HOST_WIDE_INT units
4290		  = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4291		op0 = adjust_address_nv (op0, mode1, units);
4292		bitpos += units * BITS_PER_UNIT;
4293	      }
4294	    else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4295	      op0 = adjust_address_nv (op0, mode, 0);
4296	    else if (GET_MODE (op0) != mode1)
4297	      op0 = adjust_address_nv (op0, mode1, 0);
4298	    else
4299	      op0 = copy_rtx (op0);
4300	    if (op0 == orig_op0)
4301	      op0 = shallow_copy_rtx (op0);
4302	    set_mem_attributes (op0, exp, 0);
4303	  }
4304
4305	if (bitpos == 0 && mode == GET_MODE (op0))
4306	  return op0;
4307
4308        if (bitpos < 0)
4309          return NULL;
4310
4311	if (GET_MODE (op0) == BLKmode)
4312	  return NULL;
4313
4314	if ((bitpos % BITS_PER_UNIT) == 0
4315	    && bitsize == GET_MODE_BITSIZE (mode1))
4316	  {
4317	    machine_mode opmode = GET_MODE (op0);
4318
4319	    if (opmode == VOIDmode)
4320	      opmode = TYPE_MODE (TREE_TYPE (tem));
4321
4322	    /* This condition may hold if we're expanding the address
4323	       right past the end of an array that turned out not to
4324	       be addressable (i.e., the address was only computed in
4325	       debug stmts).  The gen_subreg below would rightfully
4326	       crash, and the address doesn't really exist, so just
4327	       drop it.  */
4328	    if (bitpos >= GET_MODE_BITSIZE (opmode))
4329	      return NULL;
4330
4331	    if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4332	      return simplify_gen_subreg (mode, op0, opmode,
4333					  bitpos / BITS_PER_UNIT);
4334	  }
4335
4336	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4337				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4338				     ? SIGN_EXTRACT
4339				     : ZERO_EXTRACT, mode,
4340				     GET_MODE (op0) != VOIDmode
4341				     ? GET_MODE (op0)
4342				     : TYPE_MODE (TREE_TYPE (tem)),
4343				     op0, GEN_INT (bitsize), GEN_INT (bitpos));
4344      }
4345
4346    case ABS_EXPR:
4347      return simplify_gen_unary (ABS, mode, op0, mode);
4348
4349    case NEGATE_EXPR:
4350      return simplify_gen_unary (NEG, mode, op0, mode);
4351
4352    case BIT_NOT_EXPR:
4353      return simplify_gen_unary (NOT, mode, op0, mode);
4354
4355    case FLOAT_EXPR:
4356      return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4357									 0)))
4358				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4359				 inner_mode);
4360
4361    case FIX_TRUNC_EXPR:
4362      return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4363				 inner_mode);
4364
4365    case POINTER_PLUS_EXPR:
4366      /* For the rare target where pointers are not the same size as
4367	 size_t, we need to check for mis-matched modes and correct
4368	 the addend.  */
4369      if (op0 && op1
4370	  && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4371	  && GET_MODE (op0) != GET_MODE (op1))
4372	{
4373	  if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4374	      /* If OP0 is a partial mode, then we must truncate, even if it has
4375		 the same bitsize as OP1 as GCC's representation of partial modes
4376		 is opaque.  */
4377	      || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4378		  && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4379	    op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4380				      GET_MODE (op1));
4381	  else
4382	    /* We always sign-extend, regardless of the signedness of
4383	       the operand, because the operand is always unsigned
4384	       here even if the original C expression is signed.  */
4385	    op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4386				      GET_MODE (op1));
4387	}
4388      /* Fall through.  */
4389    case PLUS_EXPR:
4390      return simplify_gen_binary (PLUS, mode, op0, op1);
4391
4392    case MINUS_EXPR:
4393      return simplify_gen_binary (MINUS, mode, op0, op1);
4394
4395    case MULT_EXPR:
4396      return simplify_gen_binary (MULT, mode, op0, op1);
4397
4398    case RDIV_EXPR:
4399    case TRUNC_DIV_EXPR:
4400    case EXACT_DIV_EXPR:
4401      if (unsignedp)
4402	return simplify_gen_binary (UDIV, mode, op0, op1);
4403      else
4404	return simplify_gen_binary (DIV, mode, op0, op1);
4405
4406    case TRUNC_MOD_EXPR:
4407      return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4408
4409    case FLOOR_DIV_EXPR:
4410      if (unsignedp)
4411	return simplify_gen_binary (UDIV, mode, op0, op1);
4412      else
4413	{
4414	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4415	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4416	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4417	  return simplify_gen_binary (PLUS, mode, div, adj);
4418	}
4419
4420    case FLOOR_MOD_EXPR:
4421      if (unsignedp)
4422	return simplify_gen_binary (UMOD, mode, op0, op1);
4423      else
4424	{
4425	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4426	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4427	  adj = simplify_gen_unary (NEG, mode,
4428				    simplify_gen_binary (MULT, mode, adj, op1),
4429				    mode);
4430	  return simplify_gen_binary (PLUS, mode, mod, adj);
4431	}
4432
4433    case CEIL_DIV_EXPR:
4434      if (unsignedp)
4435	{
4436	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4437	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4438	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4439	  return simplify_gen_binary (PLUS, mode, div, adj);
4440	}
4441      else
4442	{
4443	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4444	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4445	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4446	  return simplify_gen_binary (PLUS, mode, div, adj);
4447	}
4448
4449    case CEIL_MOD_EXPR:
4450      if (unsignedp)
4451	{
4452	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4453	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4454	  adj = simplify_gen_unary (NEG, mode,
4455				    simplify_gen_binary (MULT, mode, adj, op1),
4456				    mode);
4457	  return simplify_gen_binary (PLUS, mode, mod, adj);
4458	}
4459      else
4460	{
4461	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4462	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4463	  adj = simplify_gen_unary (NEG, mode,
4464				    simplify_gen_binary (MULT, mode, adj, op1),
4465				    mode);
4466	  return simplify_gen_binary (PLUS, mode, mod, adj);
4467	}
4468
4469    case ROUND_DIV_EXPR:
4470      if (unsignedp)
4471	{
4472	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4473	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4474	  rtx adj = round_udiv_adjust (mode, mod, op1);
4475	  return simplify_gen_binary (PLUS, mode, div, adj);
4476	}
4477      else
4478	{
4479	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4480	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4481	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4482	  return simplify_gen_binary (PLUS, mode, div, adj);
4483	}
4484
4485    case ROUND_MOD_EXPR:
4486      if (unsignedp)
4487	{
4488	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4489	  rtx adj = round_udiv_adjust (mode, mod, op1);
4490	  adj = simplify_gen_unary (NEG, mode,
4491				    simplify_gen_binary (MULT, mode, adj, op1),
4492				    mode);
4493	  return simplify_gen_binary (PLUS, mode, mod, adj);
4494	}
4495      else
4496	{
4497	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4498	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4499	  adj = simplify_gen_unary (NEG, mode,
4500				    simplify_gen_binary (MULT, mode, adj, op1),
4501				    mode);
4502	  return simplify_gen_binary (PLUS, mode, mod, adj);
4503	}
4504
4505    case LSHIFT_EXPR:
4506      return simplify_gen_binary (ASHIFT, mode, op0, op1);
4507
4508    case RSHIFT_EXPR:
4509      if (unsignedp)
4510	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4511      else
4512	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4513
4514    case LROTATE_EXPR:
4515      return simplify_gen_binary (ROTATE, mode, op0, op1);
4516
4517    case RROTATE_EXPR:
4518      return simplify_gen_binary (ROTATERT, mode, op0, op1);
4519
4520    case MIN_EXPR:
4521      return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4522
4523    case MAX_EXPR:
4524      return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4525
4526    case BIT_AND_EXPR:
4527    case TRUTH_AND_EXPR:
4528      return simplify_gen_binary (AND, mode, op0, op1);
4529
4530    case BIT_IOR_EXPR:
4531    case TRUTH_OR_EXPR:
4532      return simplify_gen_binary (IOR, mode, op0, op1);
4533
4534    case BIT_XOR_EXPR:
4535    case TRUTH_XOR_EXPR:
4536      return simplify_gen_binary (XOR, mode, op0, op1);
4537
4538    case TRUTH_ANDIF_EXPR:
4539      return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4540
4541    case TRUTH_ORIF_EXPR:
4542      return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4543
4544    case TRUTH_NOT_EXPR:
4545      return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4546
4547    case LT_EXPR:
4548      return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4549				      op0, op1);
4550
4551    case LE_EXPR:
4552      return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4553				      op0, op1);
4554
4555    case GT_EXPR:
4556      return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4557				      op0, op1);
4558
4559    case GE_EXPR:
4560      return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4561				      op0, op1);
4562
4563    case EQ_EXPR:
4564      return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4565
4566    case NE_EXPR:
4567      return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4568
4569    case UNORDERED_EXPR:
4570      return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4571
4572    case ORDERED_EXPR:
4573      return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4574
4575    case UNLT_EXPR:
4576      return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4577
4578    case UNLE_EXPR:
4579      return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4580
4581    case UNGT_EXPR:
4582      return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4583
4584    case UNGE_EXPR:
4585      return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4586
4587    case UNEQ_EXPR:
4588      return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4589
4590    case LTGT_EXPR:
4591      return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4592
4593    case COND_EXPR:
4594      return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4595
4596    case COMPLEX_EXPR:
4597      gcc_assert (COMPLEX_MODE_P (mode));
4598      if (GET_MODE (op0) == VOIDmode)
4599	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4600      if (GET_MODE (op1) == VOIDmode)
4601	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4602      return gen_rtx_CONCAT (mode, op0, op1);
4603
4604    case CONJ_EXPR:
4605      if (GET_CODE (op0) == CONCAT)
4606	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4607			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4608						   XEXP (op0, 1),
4609						   GET_MODE_INNER (mode)));
4610      else
4611	{
4612	  machine_mode imode = GET_MODE_INNER (mode);
4613	  rtx re, im;
4614
4615	  if (MEM_P (op0))
4616	    {
4617	      re = adjust_address_nv (op0, imode, 0);
4618	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4619	    }
4620	  else
4621	    {
4622	      machine_mode ifmode = int_mode_for_mode (mode);
4623	      machine_mode ihmode = int_mode_for_mode (imode);
4624	      rtx halfsize;
4625	      if (ifmode == BLKmode || ihmode == BLKmode)
4626		return NULL;
4627	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4628	      re = op0;
4629	      if (mode != ifmode)
4630		re = gen_rtx_SUBREG (ifmode, re, 0);
4631	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4632	      if (imode != ihmode)
4633		re = gen_rtx_SUBREG (imode, re, 0);
4634	      im = copy_rtx (op0);
4635	      if (mode != ifmode)
4636		im = gen_rtx_SUBREG (ifmode, im, 0);
4637	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4638	      if (imode != ihmode)
4639		im = gen_rtx_SUBREG (imode, im, 0);
4640	    }
4641	  im = gen_rtx_NEG (imode, im);
4642	  return gen_rtx_CONCAT (mode, re, im);
4643	}
4644
4645    case ADDR_EXPR:
4646      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4647      if (!op0 || !MEM_P (op0))
4648	{
4649	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4650	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4651	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4652	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4653		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4654	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4655
4656	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4657	    {
4658	      HOST_WIDE_INT bitoffset, bitsize, maxsize;
4659	      tree decl
4660		= get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4661					   &bitoffset, &bitsize, &maxsize);
4662	      if ((TREE_CODE (decl) == VAR_DECL
4663		   || TREE_CODE (decl) == PARM_DECL
4664		   || TREE_CODE (decl) == RESULT_DECL)
4665		  && (!TREE_ADDRESSABLE (decl)
4666		      || target_for_debug_bind (decl))
4667		  && (bitoffset % BITS_PER_UNIT) == 0
4668		  && bitsize > 0
4669		  && bitsize == maxsize)
4670		{
4671		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4672		  return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4673		}
4674	    }
4675
4676	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4677	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4678		 == ADDR_EXPR)
4679	    {
4680	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4681						     0));
4682	      if (op0 != NULL
4683		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4684		      || (GET_CODE (op0) == PLUS
4685			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4686			  && CONST_INT_P (XEXP (op0, 1)))))
4687		{
4688		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4689							 1));
4690		  if (!op1 || !CONST_INT_P (op1))
4691		    return NULL;
4692
4693		  return plus_constant (mode, op0, INTVAL (op1));
4694		}
4695	    }
4696
4697	  return NULL;
4698	}
4699
4700      as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4701      op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4702
4703      return op0;
4704
4705    case VECTOR_CST:
4706      {
4707	unsigned i;
4708
4709	op0 = gen_rtx_CONCATN
4710	  (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4711
4712	for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4713	  {
4714	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4715	    if (!op1)
4716	      return NULL;
4717	    XVECEXP (op0, 0, i) = op1;
4718	  }
4719
4720	return op0;
4721      }
4722
4723    case CONSTRUCTOR:
4724      if (TREE_CLOBBER_P (exp))
4725	return NULL;
4726      else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4727	{
4728	  unsigned i;
4729	  tree val;
4730
4731	  op0 = gen_rtx_CONCATN
4732	    (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4733
4734	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4735	    {
4736	      op1 = expand_debug_expr (val);
4737	      if (!op1)
4738		return NULL;
4739	      XVECEXP (op0, 0, i) = op1;
4740	    }
4741
4742	  if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4743	    {
4744	      op1 = expand_debug_expr
4745		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4746
4747	      if (!op1)
4748		return NULL;
4749
4750	      for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4751		XVECEXP (op0, 0, i) = op1;
4752	    }
4753
4754	  return op0;
4755	}
4756      else
4757	goto flag_unsupported;
4758
4759    case CALL_EXPR:
4760      /* ??? Maybe handle some builtins?  */
4761      return NULL;
4762
4763    case SSA_NAME:
4764      {
4765	gimple g = get_gimple_for_ssa_name (exp);
4766	if (g)
4767	  {
4768	    tree t = NULL_TREE;
4769	    if (deep_ter_debug_map)
4770	      {
4771		tree *slot = deep_ter_debug_map->get (exp);
4772		if (slot)
4773		  t = *slot;
4774	      }
4775	    if (t == NULL_TREE)
4776	      t = gimple_assign_rhs_to_tree (g);
4777	    op0 = expand_debug_expr (t);
4778	    if (!op0)
4779	      return NULL;
4780	  }
4781	else
4782	  {
4783	    int part = var_to_partition (SA.map, exp);
4784
4785	    if (part == NO_PARTITION)
4786	      {
4787		/* If this is a reference to an incoming value of parameter
4788		   that is never used in the code or where the incoming
4789		   value is never used in the code, use PARM_DECL's
4790		   DECL_RTL if set.  */
4791		if (SSA_NAME_IS_DEFAULT_DEF (exp)
4792		    && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4793		  {
4794		    op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4795		    if (op0)
4796		      goto adjust_mode;
4797		    op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4798		    if (op0)
4799		      goto adjust_mode;
4800		  }
4801		return NULL;
4802	      }
4803
4804	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4805
4806	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
4807	  }
4808	goto adjust_mode;
4809      }
4810
4811    case ERROR_MARK:
4812      return NULL;
4813
4814    /* Vector stuff.  For most of the codes we don't have rtl codes.  */
4815    case REALIGN_LOAD_EXPR:
4816    case REDUC_MAX_EXPR:
4817    case REDUC_MIN_EXPR:
4818    case REDUC_PLUS_EXPR:
4819    case VEC_COND_EXPR:
4820    case VEC_PACK_FIX_TRUNC_EXPR:
4821    case VEC_PACK_SAT_EXPR:
4822    case VEC_PACK_TRUNC_EXPR:
4823    case VEC_UNPACK_FLOAT_HI_EXPR:
4824    case VEC_UNPACK_FLOAT_LO_EXPR:
4825    case VEC_UNPACK_HI_EXPR:
4826    case VEC_UNPACK_LO_EXPR:
4827    case VEC_WIDEN_MULT_HI_EXPR:
4828    case VEC_WIDEN_MULT_LO_EXPR:
4829    case VEC_WIDEN_MULT_EVEN_EXPR:
4830    case VEC_WIDEN_MULT_ODD_EXPR:
4831    case VEC_WIDEN_LSHIFT_HI_EXPR:
4832    case VEC_WIDEN_LSHIFT_LO_EXPR:
4833    case VEC_PERM_EXPR:
4834      return NULL;
4835
4836    /* Misc codes.  */
4837    case ADDR_SPACE_CONVERT_EXPR:
4838    case FIXED_CONVERT_EXPR:
4839    case OBJ_TYPE_REF:
4840    case WITH_SIZE_EXPR:
4841      return NULL;
4842
4843    case DOT_PROD_EXPR:
4844      if (SCALAR_INT_MODE_P (GET_MODE (op0))
4845	  && SCALAR_INT_MODE_P (mode))
4846	{
4847	  op0
4848	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4849									  0)))
4850				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4851				  inner_mode);
4852	  op1
4853	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4854									  1)))
4855				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4856				  inner_mode);
4857	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
4858	  return simplify_gen_binary (PLUS, mode, op0, op2);
4859	}
4860      return NULL;
4861
4862    case WIDEN_MULT_EXPR:
4863    case WIDEN_MULT_PLUS_EXPR:
4864    case WIDEN_MULT_MINUS_EXPR:
4865      if (SCALAR_INT_MODE_P (GET_MODE (op0))
4866	  && SCALAR_INT_MODE_P (mode))
4867	{
4868	  inner_mode = GET_MODE (op0);
4869	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4870	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4871	  else
4872	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4873	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4874	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4875	  else
4876	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4877	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
4878	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4879	    return op0;
4880	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4881	    return simplify_gen_binary (PLUS, mode, op0, op2);
4882	  else
4883	    return simplify_gen_binary (MINUS, mode, op2, op0);
4884	}
4885      return NULL;
4886
4887    case MULT_HIGHPART_EXPR:
4888      /* ??? Similar to the above.  */
4889      return NULL;
4890
4891    case WIDEN_SUM_EXPR:
4892    case WIDEN_LSHIFT_EXPR:
4893      if (SCALAR_INT_MODE_P (GET_MODE (op0))
4894	  && SCALAR_INT_MODE_P (mode))
4895	{
4896	  op0
4897	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4898									  0)))
4899				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4900				  inner_mode);
4901	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4902				      ? ASHIFT : PLUS, mode, op0, op1);
4903	}
4904      return NULL;
4905
4906    case FMA_EXPR:
4907      return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4908
4909    default:
4910    flag_unsupported:
4911#ifdef ENABLE_CHECKING
4912      debug_tree (exp);
4913      gcc_unreachable ();
4914#else
4915      return NULL;
4916#endif
4917    }
4918}
4919
4920/* Return an RTX equivalent to the source bind value of the tree expression
4921   EXP.  */
4922
4923static rtx
4924expand_debug_source_expr (tree exp)
4925{
4926  rtx op0 = NULL_RTX;
4927  machine_mode mode = VOIDmode, inner_mode;
4928
4929  switch (TREE_CODE (exp))
4930    {
4931    case PARM_DECL:
4932      {
4933	mode = DECL_MODE (exp);
4934	op0 = expand_debug_parm_decl (exp);
4935	if (op0)
4936	   break;
4937	/* See if this isn't an argument that has been completely
4938	   optimized out.  */
4939	if (!DECL_RTL_SET_P (exp)
4940	    && !DECL_INCOMING_RTL (exp)
4941	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
4942	  {
4943	    tree aexp = DECL_ORIGIN (exp);
4944	    if (DECL_CONTEXT (aexp)
4945		== DECL_ABSTRACT_ORIGIN (current_function_decl))
4946	      {
4947		vec<tree, va_gc> **debug_args;
4948		unsigned int ix;
4949		tree ddecl;
4950		debug_args = decl_debug_args_lookup (current_function_decl);
4951		if (debug_args != NULL)
4952		  {
4953		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4954			 ix += 2)
4955		      if (ddecl == aexp)
4956			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4957		  }
4958	      }
4959	  }
4960	break;
4961      }
4962    default:
4963      break;
4964    }
4965
4966  if (op0 == NULL_RTX)
4967    return NULL_RTX;
4968
4969  inner_mode = GET_MODE (op0);
4970  if (mode == inner_mode)
4971    return op0;
4972
4973  if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4974    {
4975      if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4976	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4977      else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4978	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4979      else
4980	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4981    }
4982  else if (FLOAT_MODE_P (mode))
4983    gcc_unreachable ();
4984  else if (FLOAT_MODE_P (inner_mode))
4985    {
4986      if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4987	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4988      else
4989	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4990    }
4991  else if (CONSTANT_P (op0)
4992	   || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4993    op0 = simplify_gen_subreg (mode, op0, inner_mode,
4994			       subreg_lowpart_offset (mode, inner_mode));
4995  else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4996    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4997  else
4998    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4999
5000  return op0;
5001}
5002
5003/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5004   Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5005   deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5006
5007static void
5008avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5009{
5010  rtx exp = *exp_p;
5011
5012  if (exp == NULL_RTX)
5013    return;
5014
5015  if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5016    return;
5017
5018  if (depth == 4)
5019    {
5020      /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5021      rtx dval = make_debug_expr_from_rtl (exp);
5022
5023      /* Emit a debug bind insn before INSN.  */
5024      rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5025				       DEBUG_EXPR_TREE_DECL (dval), exp,
5026				       VAR_INIT_STATUS_INITIALIZED);
5027
5028      emit_debug_insn_before (bind, insn);
5029      *exp_p = dval;
5030      return;
5031    }
5032
5033  const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5034  int i, j;
5035  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5036    switch (*format_ptr++)
5037      {
5038      case 'e':
5039	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5040	break;
5041
5042      case 'E':
5043      case 'V':
5044	for (j = 0; j < XVECLEN (exp, i); j++)
5045	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5046	break;
5047
5048      default:
5049	break;
5050      }
5051}
5052
5053/* Expand the _LOCs in debug insns.  We run this after expanding all
5054   regular insns, so that any variables referenced in the function
5055   will have their DECL_RTLs set.  */
5056
5057static void
5058expand_debug_locations (void)
5059{
5060  rtx_insn *insn;
5061  rtx_insn *last = get_last_insn ();
5062  int save_strict_alias = flag_strict_aliasing;
5063
5064  /* New alias sets while setting up memory attributes cause
5065     -fcompare-debug failures, even though it doesn't bring about any
5066     codegen changes.  */
5067  flag_strict_aliasing = 0;
5068
5069  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5070    if (DEBUG_INSN_P (insn))
5071      {
5072	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5073	rtx val;
5074	rtx_insn *prev_insn, *insn2;
5075	machine_mode mode;
5076
5077	if (value == NULL_TREE)
5078	  val = NULL_RTX;
5079	else
5080	  {
5081	    if (INSN_VAR_LOCATION_STATUS (insn)
5082		== VAR_INIT_STATUS_UNINITIALIZED)
5083	      val = expand_debug_source_expr (value);
5084	    /* The avoid_deep_ter_for_debug function inserts
5085	       debug bind stmts after SSA_NAME definition, with the
5086	       SSA_NAME as the whole bind location.  Disable temporarily
5087	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5088	       being defined in this DEBUG_INSN.  */
5089	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5090	      {
5091		tree *slot = deep_ter_debug_map->get (value);
5092		if (slot)
5093		  {
5094		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5095		      *slot = NULL_TREE;
5096		    else
5097		      slot = NULL;
5098		  }
5099		val = expand_debug_expr (value);
5100		if (slot)
5101		  *slot = INSN_VAR_LOCATION_DECL (insn);
5102	      }
5103	    else
5104	      val = expand_debug_expr (value);
5105	    gcc_assert (last == get_last_insn ());
5106	  }
5107
5108	if (!val)
5109	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5110	else
5111	  {
5112	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5113
5114	    gcc_assert (mode == GET_MODE (val)
5115			|| (GET_MODE (val) == VOIDmode
5116			    && (CONST_SCALAR_INT_P (val)
5117				|| GET_CODE (val) == CONST_FIXED
5118				|| GET_CODE (val) == LABEL_REF)));
5119	  }
5120
5121	INSN_VAR_LOCATION_LOC (insn) = val;
5122	prev_insn = PREV_INSN (insn);
5123	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5124	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5125      }
5126
5127  flag_strict_aliasing = save_strict_alias;
5128}
5129
5130/* Performs swapping operands of commutative operations to expand
5131   the expensive one first.  */
5132
5133static void
5134reorder_operands (basic_block bb)
5135{
5136  unsigned int *lattice;  /* Hold cost of each statement.  */
5137  unsigned int i = 0, n = 0;
5138  gimple_stmt_iterator gsi;
5139  gimple_seq stmts;
5140  gimple stmt;
5141  bool swap;
5142  tree op0, op1;
5143  ssa_op_iter iter;
5144  use_operand_p use_p;
5145  gimple def0, def1;
5146
5147  /* Compute cost of each statement using estimate_num_insns.  */
5148  stmts = bb_seq (bb);
5149  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5150    {
5151      stmt = gsi_stmt (gsi);
5152      if (!is_gimple_debug (stmt))
5153        gimple_set_uid (stmt, n++);
5154    }
5155  lattice = XNEWVEC (unsigned int, n);
5156  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5157    {
5158      unsigned cost;
5159      stmt = gsi_stmt (gsi);
5160      if (is_gimple_debug (stmt))
5161	continue;
5162      cost = estimate_num_insns (stmt, &eni_size_weights);
5163      lattice[i] = cost;
5164      FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5165	{
5166	  tree use = USE_FROM_PTR (use_p);
5167	  gimple def_stmt;
5168	  if (TREE_CODE (use) != SSA_NAME)
5169	    continue;
5170	  def_stmt = get_gimple_for_ssa_name (use);
5171	  if (!def_stmt)
5172	    continue;
5173	  lattice[i] += lattice[gimple_uid (def_stmt)];
5174	}
5175      i++;
5176      if (!is_gimple_assign (stmt)
5177	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5178	continue;
5179      op0 = gimple_op (stmt, 1);
5180      op1 = gimple_op (stmt, 2);
5181      if (TREE_CODE (op0) != SSA_NAME
5182	  || TREE_CODE (op1) != SSA_NAME)
5183	continue;
5184      /* Swap operands if the second one is more expensive.  */
5185      def0 = get_gimple_for_ssa_name (op0);
5186      def1 = get_gimple_for_ssa_name (op1);
5187      if (!def1)
5188	continue;
5189      swap = false;
5190      if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5191	swap = true;
5192      if (swap)
5193	{
5194	  if (dump_file && (dump_flags & TDF_DETAILS))
5195	    {
5196	      fprintf (dump_file, "Swap operands in stmt:\n");
5197	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5198	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5199		       def0 ? lattice[gimple_uid (def0)] : 0,
5200		       lattice[gimple_uid (def1)]);
5201	    }
5202	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5203			     gimple_assign_rhs2_ptr (stmt));
5204	}
5205    }
5206  XDELETE (lattice);
5207}
5208
5209/* Expand basic block BB from GIMPLE trees to RTL.  */
5210
5211static basic_block
5212expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5213{
5214  gimple_stmt_iterator gsi;
5215  gimple_seq stmts;
5216  gimple stmt = NULL;
5217  rtx_note *note;
5218  rtx_insn *last;
5219  edge e;
5220  edge_iterator ei;
5221
5222  if (dump_file)
5223    fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5224	     bb->index);
5225
5226  /* Note that since we are now transitioning from GIMPLE to RTL, we
5227     cannot use the gsi_*_bb() routines because they expect the basic
5228     block to be in GIMPLE, instead of RTL.  Therefore, we need to
5229     access the BB sequence directly.  */
5230  if (optimize)
5231    reorder_operands (bb);
5232  stmts = bb_seq (bb);
5233  bb->il.gimple.seq = NULL;
5234  bb->il.gimple.phi_nodes = NULL;
5235  rtl_profile_for_bb (bb);
5236  init_rtl_bb_info (bb);
5237  bb->flags |= BB_RTL;
5238
5239  /* Remove the RETURN_EXPR if we may fall though to the exit
5240     instead.  */
5241  gsi = gsi_last (stmts);
5242  if (!gsi_end_p (gsi)
5243      && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5244    {
5245      greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5246
5247      gcc_assert (single_succ_p (bb));
5248      gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5249
5250      if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5251	  && !gimple_return_retval (ret_stmt))
5252	{
5253	  gsi_remove (&gsi, false);
5254	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5255	}
5256    }
5257
5258  gsi = gsi_start (stmts);
5259  if (!gsi_end_p (gsi))
5260    {
5261      stmt = gsi_stmt (gsi);
5262      if (gimple_code (stmt) != GIMPLE_LABEL)
5263	stmt = NULL;
5264    }
5265
5266  rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5267
5268  if (stmt || elt)
5269    {
5270      last = get_last_insn ();
5271
5272      if (stmt)
5273	{
5274	  expand_gimple_stmt (stmt);
5275	  gsi_next (&gsi);
5276	}
5277
5278      if (elt)
5279	emit_label (*elt);
5280
5281      /* Java emits line number notes in the top of labels.
5282	 ??? Make this go away once line number notes are obsoleted.  */
5283      BB_HEAD (bb) = NEXT_INSN (last);
5284      if (NOTE_P (BB_HEAD (bb)))
5285	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5286      note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5287
5288      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5289    }
5290  else
5291    BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5292
5293  NOTE_BASIC_BLOCK (note) = bb;
5294
5295  for (; !gsi_end_p (gsi); gsi_next (&gsi))
5296    {
5297      basic_block new_bb;
5298
5299      stmt = gsi_stmt (gsi);
5300
5301      /* If this statement is a non-debug one, and we generate debug
5302	 insns, then this one might be the last real use of a TERed
5303	 SSA_NAME, but where there are still some debug uses further
5304	 down.  Expanding the current SSA name in such further debug
5305	 uses by their RHS might lead to wrong debug info, as coalescing
5306	 might make the operands of such RHS be placed into the same
5307	 pseudo as something else.  Like so:
5308	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5309	   use(a_1);
5310	   a_2 = ...
5311           #DEBUG ... => a_1
5312	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5313	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5314	 the write to a_2 would actually have clobbered the place which
5315	 formerly held a_0.
5316
5317	 So, instead of that, we recognize the situation, and generate
5318	 debug temporaries at the last real use of TERed SSA names:
5319	   a_1 = a_0 + 1;
5320           #DEBUG #D1 => a_1
5321	   use(a_1);
5322	   a_2 = ...
5323           #DEBUG ... => #D1
5324	 */
5325      if (MAY_HAVE_DEBUG_INSNS
5326	  && SA.values
5327	  && !is_gimple_debug (stmt))
5328	{
5329	  ssa_op_iter iter;
5330	  tree op;
5331	  gimple def;
5332
5333	  location_t sloc = curr_insn_location ();
5334
5335	  /* Look for SSA names that have their last use here (TERed
5336	     names always have only one real use).  */
5337	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5338	    if ((def = get_gimple_for_ssa_name (op)))
5339	      {
5340		imm_use_iterator imm_iter;
5341		use_operand_p use_p;
5342		bool have_debug_uses = false;
5343
5344		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5345		  {
5346		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5347		      {
5348			have_debug_uses = true;
5349			break;
5350		      }
5351		  }
5352
5353		if (have_debug_uses)
5354		  {
5355		    /* OP is a TERed SSA name, with DEF its defining
5356		       statement, and where OP is used in further debug
5357		       instructions.  Generate a debug temporary, and
5358		       replace all uses of OP in debug insns with that
5359		       temporary.  */
5360		    gimple debugstmt;
5361		    tree value = gimple_assign_rhs_to_tree (def);
5362		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5363		    rtx val;
5364		    machine_mode mode;
5365
5366		    set_curr_insn_location (gimple_location (def));
5367
5368		    DECL_ARTIFICIAL (vexpr) = 1;
5369		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5370		    if (DECL_P (value))
5371		      mode = DECL_MODE (value);
5372		    else
5373		      mode = TYPE_MODE (TREE_TYPE (value));
5374		    DECL_MODE (vexpr) = mode;
5375
5376		    val = gen_rtx_VAR_LOCATION
5377			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5378
5379		    emit_debug_insn (val);
5380
5381		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5382		      {
5383			if (!gimple_debug_bind_p (debugstmt))
5384			  continue;
5385
5386			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5387			  SET_USE (use_p, vexpr);
5388
5389			update_stmt (debugstmt);
5390		      }
5391		  }
5392	      }
5393	  set_curr_insn_location (sloc);
5394	}
5395
5396      currently_expanding_gimple_stmt = stmt;
5397
5398      /* Expand this statement, then evaluate the resulting RTL and
5399	 fixup the CFG accordingly.  */
5400      if (gimple_code (stmt) == GIMPLE_COND)
5401	{
5402	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5403	  if (new_bb)
5404	    return new_bb;
5405	}
5406      else if (gimple_debug_bind_p (stmt))
5407	{
5408	  location_t sloc = curr_insn_location ();
5409	  gimple_stmt_iterator nsi = gsi;
5410
5411	  for (;;)
5412	    {
5413	      tree var = gimple_debug_bind_get_var (stmt);
5414	      tree value;
5415	      rtx val;
5416	      machine_mode mode;
5417
5418	      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5419		  && TREE_CODE (var) != LABEL_DECL
5420		  && !target_for_debug_bind (var))
5421		goto delink_debug_stmt;
5422
5423	      if (gimple_debug_bind_has_value_p (stmt))
5424		value = gimple_debug_bind_get_value (stmt);
5425	      else
5426		value = NULL_TREE;
5427
5428	      last = get_last_insn ();
5429
5430	      set_curr_insn_location (gimple_location (stmt));
5431
5432	      if (DECL_P (var))
5433		mode = DECL_MODE (var);
5434	      else
5435		mode = TYPE_MODE (TREE_TYPE (var));
5436
5437	      val = gen_rtx_VAR_LOCATION
5438		(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5439
5440	      emit_debug_insn (val);
5441
5442	      if (dump_file && (dump_flags & TDF_DETAILS))
5443		{
5444		  /* We can't dump the insn with a TREE where an RTX
5445		     is expected.  */
5446		  PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5447		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5448		  PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5449		}
5450
5451	    delink_debug_stmt:
5452	      /* In order not to generate too many debug temporaries,
5453	         we delink all uses of debug statements we already expanded.
5454		 Therefore debug statements between definition and real
5455		 use of TERed SSA names will continue to use the SSA name,
5456		 and not be replaced with debug temps.  */
5457	      delink_stmt_imm_use (stmt);
5458
5459	      gsi = nsi;
5460	      gsi_next (&nsi);
5461	      if (gsi_end_p (nsi))
5462		break;
5463	      stmt = gsi_stmt (nsi);
5464	      if (!gimple_debug_bind_p (stmt))
5465		break;
5466	    }
5467
5468	  set_curr_insn_location (sloc);
5469	}
5470      else if (gimple_debug_source_bind_p (stmt))
5471	{
5472	  location_t sloc = curr_insn_location ();
5473	  tree var = gimple_debug_source_bind_get_var (stmt);
5474	  tree value = gimple_debug_source_bind_get_value (stmt);
5475	  rtx val;
5476	  machine_mode mode;
5477
5478	  last = get_last_insn ();
5479
5480	  set_curr_insn_location (gimple_location (stmt));
5481
5482	  mode = DECL_MODE (var);
5483
5484	  val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5485				      VAR_INIT_STATUS_UNINITIALIZED);
5486
5487	  emit_debug_insn (val);
5488
5489	  if (dump_file && (dump_flags & TDF_DETAILS))
5490	    {
5491	      /* We can't dump the insn with a TREE where an RTX
5492		 is expected.  */
5493	      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5494	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5495	      PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5496	    }
5497
5498	  set_curr_insn_location (sloc);
5499	}
5500      else
5501	{
5502	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5503	  if (call_stmt
5504	      && gimple_call_tail_p (call_stmt)
5505	      && disable_tail_calls)
5506	    gimple_call_set_tail (call_stmt, false);
5507
5508	  if (call_stmt && gimple_call_tail_p (call_stmt))
5509	    {
5510	      bool can_fallthru;
5511	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5512	      if (new_bb)
5513		{
5514		  if (can_fallthru)
5515		    bb = new_bb;
5516		  else
5517		    return new_bb;
5518		}
5519	    }
5520	  else
5521	    {
5522	      def_operand_p def_p;
5523	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5524
5525	      if (def_p != NULL)
5526		{
5527		  /* Ignore this stmt if it is in the list of
5528		     replaceable expressions.  */
5529		  if (SA.values
5530		      && bitmap_bit_p (SA.values,
5531				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5532		    continue;
5533		}
5534	      last = expand_gimple_stmt (stmt);
5535	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5536	    }
5537	}
5538    }
5539
5540  currently_expanding_gimple_stmt = NULL;
5541
5542  /* Expand implicit goto and convert goto_locus.  */
5543  FOR_EACH_EDGE (e, ei, bb->succs)
5544    {
5545      if (e->goto_locus != UNKNOWN_LOCATION)
5546	set_curr_insn_location (e->goto_locus);
5547      if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5548	{
5549	  emit_jump (label_rtx_for_bb (e->dest));
5550	  e->flags &= ~EDGE_FALLTHRU;
5551	}
5552    }
5553
5554  /* Expanded RTL can create a jump in the last instruction of block.
5555     This later might be assumed to be a jump to successor and break edge insertion.
5556     We need to insert dummy move to prevent this. PR41440. */
5557  if (single_succ_p (bb)
5558      && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5559      && (last = get_last_insn ())
5560      && JUMP_P (last))
5561    {
5562      rtx dummy = gen_reg_rtx (SImode);
5563      emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5564    }
5565
5566  do_pending_stack_adjust ();
5567
5568  /* Find the block tail.  The last insn in the block is the insn
5569     before a barrier and/or table jump insn.  */
5570  last = get_last_insn ();
5571  if (BARRIER_P (last))
5572    last = PREV_INSN (last);
5573  if (JUMP_TABLE_DATA_P (last))
5574    last = PREV_INSN (PREV_INSN (last));
5575  BB_END (bb) = last;
5576
5577  update_bb_for_insn (bb);
5578
5579  return bb;
5580}
5581
5582
5583/* Create a basic block for initialization code.  */
5584
5585static basic_block
5586construct_init_block (void)
5587{
5588  basic_block init_block, first_block;
5589  edge e = NULL;
5590  int flags;
5591
5592  /* Multiple entry points not supported yet.  */
5593  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5594  init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5595  init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5596  ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5597  EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5598
5599  e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5600
5601  /* When entry edge points to first basic block, we don't need jump,
5602     otherwise we have to jump into proper target.  */
5603  if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5604    {
5605      tree label = gimple_block_label (e->dest);
5606
5607      emit_jump (label_rtx (label));
5608      flags = 0;
5609    }
5610  else
5611    flags = EDGE_FALLTHRU;
5612
5613  init_block = create_basic_block (NEXT_INSN (get_insns ()),
5614				   get_last_insn (),
5615				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5616  init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5617  init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5618  add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5619  if (e)
5620    {
5621      first_block = e->dest;
5622      redirect_edge_succ (e, init_block);
5623      e = make_edge (init_block, first_block, flags);
5624    }
5625  else
5626    e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5627  e->probability = REG_BR_PROB_BASE;
5628  e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5629
5630  update_bb_for_insn (init_block);
5631  return init_block;
5632}
5633
5634/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5635   found in the block tree.  */
5636
5637static void
5638set_block_levels (tree block, int level)
5639{
5640  while (block)
5641    {
5642      BLOCK_NUMBER (block) = level;
5643      set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5644      block = BLOCK_CHAIN (block);
5645    }
5646}
5647
5648/* Create a block containing landing pads and similar stuff.  */
5649
5650static void
5651construct_exit_block (void)
5652{
5653  rtx_insn *head = get_last_insn ();
5654  rtx_insn *end;
5655  basic_block exit_block;
5656  edge e, e2;
5657  unsigned ix;
5658  edge_iterator ei;
5659  basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5660  rtx_insn *orig_end = BB_END (prev_bb);
5661
5662  rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5663
5664  /* Make sure the locus is set to the end of the function, so that
5665     epilogue line numbers and warnings are set properly.  */
5666  if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5667    input_location = cfun->function_end_locus;
5668
5669  /* Generate rtl for function exit.  */
5670  expand_function_end ();
5671
5672  end = get_last_insn ();
5673  if (head == end)
5674    return;
5675  /* While emitting the function end we could move end of the last basic
5676     block.  */
5677  BB_END (prev_bb) = orig_end;
5678  while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5679    head = NEXT_INSN (head);
5680  /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5681     bb frequency counting will be confused.  Any instructions before that
5682     label are emitted for the case where PREV_BB falls through into the
5683     exit block, so append those instructions to prev_bb in that case.  */
5684  if (NEXT_INSN (head) != return_label)
5685    {
5686      while (NEXT_INSN (head) != return_label)
5687	{
5688	  if (!NOTE_P (NEXT_INSN (head)))
5689	    BB_END (prev_bb) = NEXT_INSN (head);
5690	  head = NEXT_INSN (head);
5691	}
5692    }
5693  exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5694  exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5695  exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5696  add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5697
5698  ix = 0;
5699  while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5700    {
5701      e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5702      if (!(e->flags & EDGE_ABNORMAL))
5703	redirect_edge_succ (e, exit_block);
5704      else
5705	ix++;
5706    }
5707
5708  e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5709  e->probability = REG_BR_PROB_BASE;
5710  e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5711  FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5712    if (e2 != e)
5713      {
5714	e->count -= e2->count;
5715	exit_block->count -= e2->count;
5716	exit_block->frequency -= EDGE_FREQUENCY (e2);
5717      }
5718  if (e->count < 0)
5719    e->count = 0;
5720  if (exit_block->count < 0)
5721    exit_block->count = 0;
5722  if (exit_block->frequency < 0)
5723    exit_block->frequency = 0;
5724  update_bb_for_insn (exit_block);
5725}
5726
5727/* Helper function for discover_nonconstant_array_refs.
5728   Look for ARRAY_REF nodes with non-constant indexes and mark them
5729   addressable.  */
5730
5731static tree
5732discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5733				   void *data ATTRIBUTE_UNUSED)
5734{
5735  tree t = *tp;
5736
5737  if (IS_TYPE_OR_DECL_P (t))
5738    *walk_subtrees = 0;
5739  else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5740    {
5741      while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5742	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5743	      && (!TREE_OPERAND (t, 2)
5744		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5745	     || (TREE_CODE (t) == COMPONENT_REF
5746		 && (!TREE_OPERAND (t,2)
5747		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5748	     || TREE_CODE (t) == BIT_FIELD_REF
5749	     || TREE_CODE (t) == REALPART_EXPR
5750	     || TREE_CODE (t) == IMAGPART_EXPR
5751	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
5752	     || CONVERT_EXPR_P (t))
5753	t = TREE_OPERAND (t, 0);
5754
5755      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5756	{
5757	  t = get_base_address (t);
5758	  if (t && DECL_P (t)
5759              && DECL_MODE (t) != BLKmode)
5760	    TREE_ADDRESSABLE (t) = 1;
5761	}
5762
5763      *walk_subtrees = 0;
5764    }
5765
5766  return NULL_TREE;
5767}
5768
5769/* RTL expansion is not able to compile array references with variable
5770   offsets for arrays stored in single register.  Discover such
5771   expressions and mark variables as addressable to avoid this
5772   scenario.  */
5773
5774static void
5775discover_nonconstant_array_refs (void)
5776{
5777  basic_block bb;
5778  gimple_stmt_iterator gsi;
5779
5780  FOR_EACH_BB_FN (bb, cfun)
5781    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5782      {
5783	gimple stmt = gsi_stmt (gsi);
5784	if (!is_gimple_debug (stmt))
5785	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5786      }
5787}
5788
5789/* This function sets crtl->args.internal_arg_pointer to a virtual
5790   register if DRAP is needed.  Local register allocator will replace
5791   virtual_incoming_args_rtx with the virtual register.  */
5792
5793static void
5794expand_stack_alignment (void)
5795{
5796  rtx drap_rtx;
5797  unsigned int preferred_stack_boundary;
5798
5799  if (! SUPPORTS_STACK_ALIGNMENT)
5800    return;
5801
5802  if (cfun->calls_alloca
5803      || cfun->has_nonlocal_label
5804      || crtl->has_nonlocal_goto)
5805    crtl->need_drap = true;
5806
5807  /* Call update_stack_boundary here again to update incoming stack
5808     boundary.  It may set incoming stack alignment to a different
5809     value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
5810     use the minimum incoming stack alignment to check if it is OK
5811     to perform sibcall optimization since sibcall optimization will
5812     only align the outgoing stack to incoming stack boundary.  */
5813  if (targetm.calls.update_stack_boundary)
5814    targetm.calls.update_stack_boundary ();
5815
5816  /* The incoming stack frame has to be aligned at least at
5817     parm_stack_boundary.  */
5818  gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5819
5820  /* Update crtl->stack_alignment_estimated and use it later to align
5821     stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
5822     exceptions since callgraph doesn't collect incoming stack alignment
5823     in this case.  */
5824  if (cfun->can_throw_non_call_exceptions
5825      && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5826    preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5827  else
5828    preferred_stack_boundary = crtl->preferred_stack_boundary;
5829  if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5830    crtl->stack_alignment_estimated = preferred_stack_boundary;
5831  if (preferred_stack_boundary > crtl->stack_alignment_needed)
5832    crtl->stack_alignment_needed = preferred_stack_boundary;
5833
5834  gcc_assert (crtl->stack_alignment_needed
5835	      <= crtl->stack_alignment_estimated);
5836
5837  crtl->stack_realign_needed
5838    = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5839  crtl->stack_realign_tried = crtl->stack_realign_needed;
5840
5841  crtl->stack_realign_processed = true;
5842
5843  /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5844     alignment.  */
5845  gcc_assert (targetm.calls.get_drap_rtx != NULL);
5846  drap_rtx = targetm.calls.get_drap_rtx ();
5847
5848  /* stack_realign_drap and drap_rtx must match.  */
5849  gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5850
5851  /* Do nothing if NULL is returned, which means DRAP is not needed.  */
5852  if (NULL != drap_rtx)
5853    {
5854      crtl->args.internal_arg_pointer = drap_rtx;
5855
5856      /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5857         needed. */
5858      fixup_tail_calls ();
5859    }
5860}
5861
5862
5863static void
5864expand_main_function (void)
5865{
5866#if (defined(INVOKE__main)				\
5867     || (!defined(HAS_INIT_SECTION)			\
5868	 && !defined(INIT_SECTION_ASM_OP)		\
5869	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5870  emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5871#endif
5872}
5873
5874
5875/* Expand code to initialize the stack_protect_guard.  This is invoked at
5876   the beginning of a function to be protected.  */
5877
5878#ifndef HAVE_stack_protect_set
5879# define HAVE_stack_protect_set		0
5880# define gen_stack_protect_set(x,y)	(gcc_unreachable (), NULL_RTX)
5881#endif
5882
5883static void
5884stack_protect_prologue (void)
5885{
5886  tree guard_decl = targetm.stack_protect_guard ();
5887  rtx x, y;
5888
5889  x = expand_normal (crtl->stack_protect_guard);
5890  y = expand_normal (guard_decl);
5891
5892  /* Allow the target to copy from Y to X without leaking Y into a
5893     register.  */
5894  if (HAVE_stack_protect_set)
5895    {
5896      rtx insn = gen_stack_protect_set (x, y);
5897      if (insn)
5898	{
5899	  emit_insn (insn);
5900	  return;
5901	}
5902    }
5903
5904  /* Otherwise do a straight move.  */
5905  emit_move_insn (x, y);
5906}
5907
5908/* Translate the intermediate representation contained in the CFG
5909   from GIMPLE trees to RTL.
5910
5911   We do conversion per basic block and preserve/update the tree CFG.
5912   This implies we have to do some magic as the CFG can simultaneously
5913   consist of basic blocks containing RTL and GIMPLE trees.  This can
5914   confuse the CFG hooks, so be careful to not manipulate CFG during
5915   the expansion.  */
5916
5917namespace {
5918
5919const pass_data pass_data_expand =
5920{
5921  RTL_PASS, /* type */
5922  "expand", /* name */
5923  OPTGROUP_NONE, /* optinfo_flags */
5924  TV_EXPAND, /* tv_id */
5925  ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5926    | PROP_gimple_lcx
5927    | PROP_gimple_lvec ), /* properties_required */
5928  PROP_rtl, /* properties_provided */
5929  ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5930  0, /* todo_flags_start */
5931  0, /* todo_flags_finish */
5932};
5933
5934class pass_expand : public rtl_opt_pass
5935{
5936public:
5937  pass_expand (gcc::context *ctxt)
5938    : rtl_opt_pass (pass_data_expand, ctxt)
5939  {}
5940
5941  /* opt_pass methods: */
5942  virtual unsigned int execute (function *);
5943
5944}; // class pass_expand
5945
5946unsigned int
5947pass_expand::execute (function *fun)
5948{
5949  basic_block bb, init_block;
5950  sbitmap blocks;
5951  edge_iterator ei;
5952  edge e;
5953  rtx_insn *var_seq, *var_ret_seq;
5954  unsigned i;
5955
5956  timevar_push (TV_OUT_OF_SSA);
5957  rewrite_out_of_ssa (&SA);
5958  timevar_pop (TV_OUT_OF_SSA);
5959  SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5960
5961  if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
5962    {
5963      gimple_stmt_iterator gsi;
5964      FOR_EACH_BB_FN (bb, cfun)
5965	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5966	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
5967	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
5968    }
5969
5970  /* Make sure all values used by the optimization passes have sane
5971     defaults.  */
5972  reg_renumber = 0;
5973
5974  /* Some backends want to know that we are expanding to RTL.  */
5975  currently_expanding_to_rtl = 1;
5976  /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
5977  free_dominance_info (CDI_DOMINATORS);
5978
5979  rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5980
5981  if (chkp_function_instrumented_p (current_function_decl))
5982    chkp_reset_rtl_bounds ();
5983
5984  insn_locations_init ();
5985  if (!DECL_IS_BUILTIN (current_function_decl))
5986    {
5987      /* Eventually, all FEs should explicitly set function_start_locus.  */
5988      if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5989	set_curr_insn_location
5990	  (DECL_SOURCE_LOCATION (current_function_decl));
5991      else
5992	set_curr_insn_location (fun->function_start_locus);
5993    }
5994  else
5995    set_curr_insn_location (UNKNOWN_LOCATION);
5996  prologue_location = curr_insn_location ();
5997
5998#ifdef INSN_SCHEDULING
5999  init_sched_attrs ();
6000#endif
6001
6002  /* Make sure first insn is a note even if we don't want linenums.
6003     This makes sure the first insn will never be deleted.
6004     Also, final expects a note to appear there.  */
6005  emit_note (NOTE_INSN_DELETED);
6006
6007  /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6008  discover_nonconstant_array_refs ();
6009
6010  targetm.expand_to_rtl_hook ();
6011  crtl->stack_alignment_needed = STACK_BOUNDARY;
6012  crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
6013  crtl->stack_alignment_estimated = 0;
6014  crtl->preferred_stack_boundary = STACK_BOUNDARY;
6015  fun->cfg->max_jumptable_ents = 0;
6016
6017  /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6018     of the function section at exapnsion time to predict distance of calls.  */
6019  resolve_unique_section (current_function_decl, 0, flag_function_sections);
6020
6021  /* Expand the variables recorded during gimple lowering.  */
6022  timevar_push (TV_VAR_EXPAND);
6023  start_sequence ();
6024
6025  var_ret_seq = expand_used_vars ();
6026
6027  var_seq = get_insns ();
6028  end_sequence ();
6029  timevar_pop (TV_VAR_EXPAND);
6030
6031  /* Honor stack protection warnings.  */
6032  if (warn_stack_protect)
6033    {
6034      if (fun->calls_alloca)
6035	warning (OPT_Wstack_protector,
6036		 "stack protector not protecting local variables: "
6037		 "variable length buffer");
6038      if (has_short_buffer && !crtl->stack_protect_guard)
6039	warning (OPT_Wstack_protector,
6040		 "stack protector not protecting function: "
6041		 "all local arrays are less than %d bytes long",
6042		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6043    }
6044
6045  /* Set up parameters and prepare for return, for the function.  */
6046  expand_function_start (current_function_decl);
6047
6048  /* If we emitted any instructions for setting up the variables,
6049     emit them before the FUNCTION_START note.  */
6050  if (var_seq)
6051    {
6052      emit_insn_before (var_seq, parm_birth_insn);
6053
6054      /* In expand_function_end we'll insert the alloca save/restore
6055	 before parm_birth_insn.  We've just insertted an alloca call.
6056	 Adjust the pointer to match.  */
6057      parm_birth_insn = var_seq;
6058    }
6059
6060  /* Now that we also have the parameter RTXs, copy them over to our
6061     partitions.  */
6062  for (i = 0; i < SA.map->num_partitions; i++)
6063    {
6064      tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
6065
6066      if (TREE_CODE (var) != VAR_DECL
6067	  && !SA.partition_to_pseudo[i])
6068	SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
6069      gcc_assert (SA.partition_to_pseudo[i]);
6070
6071      /* If this decl was marked as living in multiple places, reset
6072	 this now to NULL.  */
6073      if (DECL_RTL_IF_SET (var) == pc_rtx)
6074	SET_DECL_RTL (var, NULL);
6075
6076      /* Some RTL parts really want to look at DECL_RTL(x) when x
6077	 was a decl marked in REG_ATTR or MEM_ATTR.  We could use
6078	 SET_DECL_RTL here making this available, but that would mean
6079	 to select one of the potentially many RTLs for one DECL.  Instead
6080	 of doing that we simply reset the MEM_EXPR of the RTL in question,
6081	 then nobody can get at it and hence nobody can call DECL_RTL on it.  */
6082      if (!DECL_RTL_SET_P (var))
6083	{
6084	  if (MEM_P (SA.partition_to_pseudo[i]))
6085	    set_mem_expr (SA.partition_to_pseudo[i], NULL);
6086	}
6087    }
6088
6089  /* If we have a class containing differently aligned pointers
6090     we need to merge those into the corresponding RTL pointer
6091     alignment.  */
6092  for (i = 1; i < num_ssa_names; i++)
6093    {
6094      tree name = ssa_name (i);
6095      int part;
6096      rtx r;
6097
6098      if (!name
6099	  /* We might have generated new SSA names in
6100	     update_alias_info_with_stack_vars.  They will have a NULL
6101	     defining statements, and won't be part of the partitioning,
6102	     so ignore those.  */
6103	  || !SSA_NAME_DEF_STMT (name))
6104	continue;
6105      part = var_to_partition (SA.map, name);
6106      if (part == NO_PARTITION)
6107	continue;
6108
6109      /* Adjust all partition members to get the underlying decl of
6110	 the representative which we might have created in expand_one_var.  */
6111      if (SSA_NAME_VAR (name) == NULL_TREE)
6112	{
6113	  tree leader = partition_to_var (SA.map, part);
6114	  gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
6115	  replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
6116	}
6117      if (!POINTER_TYPE_P (TREE_TYPE (name)))
6118	continue;
6119
6120      r = SA.partition_to_pseudo[part];
6121      if (REG_P (r))
6122	mark_reg_pointer (r, get_pointer_alignment (name));
6123    }
6124
6125  /* If this function is `main', emit a call to `__main'
6126     to run global initializers, etc.  */
6127  if (DECL_NAME (current_function_decl)
6128      && MAIN_NAME_P (DECL_NAME (current_function_decl))
6129      && DECL_FILE_SCOPE_P (current_function_decl))
6130    expand_main_function ();
6131
6132  /* Initialize the stack_protect_guard field.  This must happen after the
6133     call to __main (if any) so that the external decl is initialized.  */
6134  if (crtl->stack_protect_guard)
6135    stack_protect_prologue ();
6136
6137  expand_phi_nodes (&SA);
6138
6139  /* Register rtl specific functions for cfg.  */
6140  rtl_register_cfg_hooks ();
6141
6142  init_block = construct_init_block ();
6143
6144  /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6145     remaining edges later.  */
6146  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6147    e->flags &= ~EDGE_EXECUTABLE;
6148
6149  lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6150  FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6151		  next_bb)
6152    bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6153
6154  if (MAY_HAVE_DEBUG_INSNS)
6155    expand_debug_locations ();
6156
6157  if (deep_ter_debug_map)
6158    {
6159      delete deep_ter_debug_map;
6160      deep_ter_debug_map = NULL;
6161    }
6162
6163  /* Free stuff we no longer need after GIMPLE optimizations.  */
6164  free_dominance_info (CDI_DOMINATORS);
6165  free_dominance_info (CDI_POST_DOMINATORS);
6166  delete_tree_cfg_annotations ();
6167
6168  timevar_push (TV_OUT_OF_SSA);
6169  finish_out_of_ssa (&SA);
6170  timevar_pop (TV_OUT_OF_SSA);
6171
6172  timevar_push (TV_POST_EXPAND);
6173  /* We are no longer in SSA form.  */
6174  fun->gimple_df->in_ssa_p = false;
6175  loops_state_clear (LOOP_CLOSED_SSA);
6176
6177  /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6178     conservatively to true until they are all profile aware.  */
6179  delete lab_rtx_for_bb;
6180  free_histograms ();
6181
6182  construct_exit_block ();
6183  insn_locations_finalize ();
6184
6185  if (var_ret_seq)
6186    {
6187      rtx_insn *after = return_label;
6188      rtx_insn *next = NEXT_INSN (after);
6189      if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6190	after = next;
6191      emit_insn_after (var_ret_seq, after);
6192    }
6193
6194  /* Zap the tree EH table.  */
6195  set_eh_throw_stmt_table (fun, NULL);
6196
6197  /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6198     split edges which edge insertions might do.  */
6199  rebuild_jump_labels (get_insns ());
6200
6201  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6202		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6203    {
6204      edge e;
6205      edge_iterator ei;
6206      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6207	{
6208	  if (e->insns.r)
6209	    {
6210	      rebuild_jump_labels_chain (e->insns.r);
6211	      /* Put insns after parm birth, but before
6212		 NOTE_INSNS_FUNCTION_BEG.  */
6213	      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6214		  && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6215		{
6216		  rtx_insn *insns = e->insns.r;
6217		  e->insns.r = NULL;
6218		  if (NOTE_P (parm_birth_insn)
6219		      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6220		    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6221		  else
6222		    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6223		}
6224	      else
6225		commit_one_edge_insertion (e);
6226	    }
6227	  else
6228	    ei_next (&ei);
6229	}
6230    }
6231
6232  /* We're done expanding trees to RTL.  */
6233  currently_expanding_to_rtl = 0;
6234
6235  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6236		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6237    {
6238      edge e;
6239      edge_iterator ei;
6240      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6241	{
6242	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6243	  e->flags &= ~EDGE_EXECUTABLE;
6244
6245	  /* At the moment not all abnormal edges match the RTL
6246	     representation.  It is safe to remove them here as
6247	     find_many_sub_basic_blocks will rediscover them.
6248	     In the future we should get this fixed properly.  */
6249	  if ((e->flags & EDGE_ABNORMAL)
6250	      && !(e->flags & EDGE_SIBCALL))
6251	    remove_edge (e);
6252	  else
6253	    ei_next (&ei);
6254	}
6255    }
6256
6257  blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6258  bitmap_ones (blocks);
6259  find_many_sub_basic_blocks (blocks);
6260  sbitmap_free (blocks);
6261  purge_all_dead_edges ();
6262
6263  expand_stack_alignment ();
6264
6265  /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6266     function.  */
6267  if (crtl->tail_call_emit)
6268    fixup_tail_calls ();
6269
6270  /* After initial rtl generation, call back to finish generating
6271     exception support code.  We need to do this before cleaning up
6272     the CFG as the code does not expect dead landing pads.  */
6273  if (fun->eh->region_tree != NULL)
6274    finish_eh_generation ();
6275
6276  /* Remove unreachable blocks, otherwise we cannot compute dominators
6277     which are needed for loop state verification.  As a side-effect
6278     this also compacts blocks.
6279     ???  We cannot remove trivially dead insns here as for example
6280     the DRAP reg on i?86 is not magically live at this point.
6281     gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6282  cleanup_cfg (CLEANUP_NO_INSN_DEL);
6283
6284#ifdef ENABLE_CHECKING
6285  verify_flow_info ();
6286#endif
6287
6288  /* Initialize pseudos allocated for hard registers.  */
6289  emit_initial_value_sets ();
6290
6291  /* And finally unshare all RTL.  */
6292  unshare_all_rtl ();
6293
6294  /* There's no need to defer outputting this function any more; we
6295     know we want to output it.  */
6296  DECL_DEFER_OUTPUT (current_function_decl) = 0;
6297
6298  /* Now that we're done expanding trees to RTL, we shouldn't have any
6299     more CONCATs anywhere.  */
6300  generating_concat_p = 0;
6301
6302  if (dump_file)
6303    {
6304      fprintf (dump_file,
6305	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6306      /* And the pass manager will dump RTL for us.  */
6307    }
6308
6309  /* If we're emitting a nested function, make sure its parent gets
6310     emitted as well.  Doing otherwise confuses debug info.  */
6311    {
6312      tree parent;
6313      for (parent = DECL_CONTEXT (current_function_decl);
6314	   parent != NULL_TREE;
6315	   parent = get_containing_scope (parent))
6316	if (TREE_CODE (parent) == FUNCTION_DECL)
6317	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6318    }
6319
6320  /* We are now committed to emitting code for this function.  Do any
6321     preparation, such as emitting abstract debug info for the inline
6322     before it gets mangled by optimization.  */
6323  if (cgraph_function_possibly_inlined_p (current_function_decl))
6324    (*debug_hooks->outlining_inline_function) (current_function_decl);
6325
6326  TREE_ASM_WRITTEN (current_function_decl) = 1;
6327
6328  /* After expanding, the return labels are no longer needed. */
6329  return_label = NULL;
6330  naked_return_label = NULL;
6331
6332  /* After expanding, the tm_restart map is no longer needed.  */
6333  if (fun->gimple_df->tm_restart)
6334    fun->gimple_df->tm_restart = NULL;
6335
6336  /* Tag the blocks with a depth number so that change_scope can find
6337     the common parent easily.  */
6338  set_block_levels (DECL_INITIAL (fun->decl), 0);
6339  default_rtl_profile ();
6340
6341  timevar_pop (TV_POST_EXPAND);
6342
6343  return 0;
6344}
6345
6346} // anon namespace
6347
6348rtl_opt_pass *
6349make_pass_expand (gcc::context *ctxt)
6350{
6351  return new pass_expand (ctxt);
6352}
6353