tree-nested.c revision 1.7
1/* Nested function decomposition for GIMPLE.
2   Copyright (C) 2004-2016 Free Software Foundation, Inc.
3
4   This file is part of GCC.
5
6   GCC is free software; you can redistribute it and/or modify
7   it under the terms of the GNU General Public License as published by
8   the Free Software Foundation; either version 3, or (at your option)
9   any later version.
10
11   GCC is distributed in the hope that it will be useful,
12   but WITHOUT ANY WARRANTY; without even the implied warranty of
13   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14   GNU General Public License for more details.
15
16   You should have received a copy of the GNU General Public License
17   along with GCC; see the file COPYING3.  If not see
18   <http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "tm_p.h"
28#include "stringpool.h"
29#include "cgraph.h"
30#include "fold-const.h"
31#include "stor-layout.h"
32#include "tree-dump.h"
33#include "tree-inline.h"
34#include "gimplify.h"
35#include "gimple-iterator.h"
36#include "gimple-walk.h"
37#include "tree-cfg.h"
38#include "explow.h"
39#include "langhooks.h"
40#include "gimple-low.h"
41#include "gomp-constants.h"
42
43
44/* The object of this pass is to lower the representation of a set of nested
45   functions in order to expose all of the gory details of the various
46   nonlocal references.  We want to do this sooner rather than later, in
47   order to give us more freedom in emitting all of the functions in question.
48
49   Back in olden times, when gcc was young, we developed an insanely
50   complicated scheme whereby variables which were referenced nonlocally
51   were forced to live in the stack of the declaring function, and then
52   the nested functions magically discovered where these variables were
53   placed.  In order for this scheme to function properly, it required
54   that the outer function be partially expanded, then we switch to
55   compiling the inner function, and once done with those we switch back
56   to compiling the outer function.  Such delicate ordering requirements
57   makes it difficult to do whole translation unit optimizations
58   involving such functions.
59
60   The implementation here is much more direct.  Everything that can be
61   referenced by an inner function is a member of an explicitly created
62   structure herein called the "nonlocal frame struct".  The incoming
63   static chain for a nested function is a pointer to this struct in
64   the parent.  In this way, we settle on known offsets from a known
65   base, and so are decoupled from the logic that places objects in the
66   function's stack frame.  More importantly, we don't have to wait for
67   that to happen -- since the compilation of the inner function is no
68   longer tied to a real stack frame, the nonlocal frame struct can be
69   allocated anywhere.  Which means that the outer function is now
70   inlinable.
71
72   Theory of operation here is very simple.  Iterate over all the
73   statements in all the functions (depth first) several times,
74   allocating structures and fields on demand.  In general we want to
75   examine inner functions first, so that we can avoid making changes
76   to outer functions which are unnecessary.
77
78   The order of the passes matters a bit, in that later passes will be
79   skipped if it is discovered that the functions don't actually interact
80   at all.  That is, they're nested in the lexical sense but could have
81   been written as independent functions without change.  */
82
83
84struct nesting_info
85{
86  struct nesting_info *outer;
87  struct nesting_info *inner;
88  struct nesting_info *next;
89
90  hash_map<tree, tree> *field_map;
91  hash_map<tree, tree> *var_map;
92  hash_set<tree *> *mem_refs;
93  bitmap suppress_expansion;
94
95  tree context;
96  tree new_local_var_chain;
97  tree debug_var_chain;
98  tree frame_type;
99  tree frame_decl;
100  tree chain_field;
101  tree chain_decl;
102  tree nl_goto_field;
103
104  bool any_parm_remapped;
105  bool any_tramp_created;
106  char static_chain_added;
107};
108
109
110/* Iterate over the nesting tree, starting with ROOT, depth first.  */
111
112static inline struct nesting_info *
113iter_nestinfo_start (struct nesting_info *root)
114{
115  while (root->inner)
116    root = root->inner;
117  return root;
118}
119
120static inline struct nesting_info *
121iter_nestinfo_next (struct nesting_info *node)
122{
123  if (node->next)
124    return iter_nestinfo_start (node->next);
125  return node->outer;
126}
127
128#define FOR_EACH_NEST_INFO(I, ROOT) \
129  for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
130
131/* Obstack used for the bitmaps in the struct above.  */
132static struct bitmap_obstack nesting_info_bitmap_obstack;
133
134
135/* We're working in so many different function contexts simultaneously,
136   that create_tmp_var is dangerous.  Prevent mishap.  */
137#define create_tmp_var cant_use_create_tmp_var_here_dummy
138
139/* Like create_tmp_var, except record the variable for registration at
140   the given nesting level.  */
141
142static tree
143create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
144{
145  tree tmp_var;
146
147  /* If the type is of variable size or a type which must be created by the
148     frontend, something is wrong.  Note that we explicitly allow
149     incomplete types here, since we create them ourselves here.  */
150  gcc_assert (!TREE_ADDRESSABLE (type));
151  gcc_assert (!TYPE_SIZE_UNIT (type)
152	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
153
154  tmp_var = create_tmp_var_raw (type, prefix);
155  DECL_CONTEXT (tmp_var) = info->context;
156  DECL_CHAIN (tmp_var) = info->new_local_var_chain;
157  DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
158  if (TREE_CODE (type) == COMPLEX_TYPE
159      || TREE_CODE (type) == VECTOR_TYPE)
160    DECL_GIMPLE_REG_P (tmp_var) = 1;
161
162  info->new_local_var_chain = tmp_var;
163
164  return tmp_var;
165}
166
167/* Take the address of EXP to be used within function CONTEXT.
168   Mark it for addressability as necessary.  */
169
170tree
171build_addr (tree exp)
172{
173  mark_addressable (exp);
174  return build_fold_addr_expr (exp);
175}
176
177/* Insert FIELD into TYPE, sorted by alignment requirements.  */
178
179void
180insert_field_into_struct (tree type, tree field)
181{
182  tree *p;
183
184  DECL_CONTEXT (field) = type;
185
186  for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
187    if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
188      break;
189
190  DECL_CHAIN (field) = *p;
191  *p = field;
192
193  /* Set correct alignment for frame struct type.  */
194  if (TYPE_ALIGN (type) < DECL_ALIGN (field))
195    TYPE_ALIGN (type) = DECL_ALIGN (field);
196}
197
198/* Build or return the RECORD_TYPE that describes the frame state that is
199   shared between INFO->CONTEXT and its nested functions.  This record will
200   not be complete until finalize_nesting_tree; up until that point we'll
201   be adding fields as necessary.
202
203   We also build the DECL that represents this frame in the function.  */
204
205static tree
206get_frame_type (struct nesting_info *info)
207{
208  tree type = info->frame_type;
209  if (!type)
210    {
211      char *name;
212
213      type = make_node (RECORD_TYPE);
214
215      name = concat ("FRAME.",
216		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
217		     NULL);
218      TYPE_NAME (type) = get_identifier (name);
219      free (name);
220
221      info->frame_type = type;
222      info->frame_decl = create_tmp_var_for (info, type, "FRAME");
223      DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
224
225      /* ??? Always make it addressable for now, since it is meant to
226	 be pointed to by the static chain pointer.  This pessimizes
227	 when it turns out that no static chains are needed because
228	 the nested functions referencing non-local variables are not
229	 reachable, but the true pessimization is to create the non-
230	 local frame structure in the first place.  */
231      TREE_ADDRESSABLE (info->frame_decl) = 1;
232    }
233  return type;
234}
235
236/* Return true if DECL should be referenced by pointer in the non-local
237   frame structure.  */
238
239static bool
240use_pointer_in_frame (tree decl)
241{
242  if (TREE_CODE (decl) == PARM_DECL)
243    {
244      /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
245         sized decls, and inefficient to copy large aggregates.  Don't bother
246         moving anything but scalar variables.  */
247      return AGGREGATE_TYPE_P (TREE_TYPE (decl));
248    }
249  else
250    {
251      /* Variable sized types make things "interesting" in the frame.  */
252      return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
253    }
254}
255
256/* Given DECL, a non-locally accessed variable, find or create a field
257   in the non-local frame structure for the given nesting context.  */
258
259static tree
260lookup_field_for_decl (struct nesting_info *info, tree decl,
261		       enum insert_option insert)
262{
263  if (insert == NO_INSERT)
264    {
265      tree *slot = info->field_map->get (decl);
266      return slot ? *slot : NULL_TREE;
267    }
268
269  tree *slot = &info->field_map->get_or_insert (decl);
270  if (!*slot)
271    {
272      tree field = make_node (FIELD_DECL);
273      DECL_NAME (field) = DECL_NAME (decl);
274
275      if (use_pointer_in_frame (decl))
276	{
277	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
278	  DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
279	  DECL_NONADDRESSABLE_P (field) = 1;
280	}
281      else
282	{
283          TREE_TYPE (field) = TREE_TYPE (decl);
284          DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
285          DECL_ALIGN (field) = DECL_ALIGN (decl);
286          DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
287          TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
288          DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
289          TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
290	}
291
292      insert_field_into_struct (get_frame_type (info), field);
293      *slot = field;
294
295      if (TREE_CODE (decl) == PARM_DECL)
296	info->any_parm_remapped = true;
297    }
298
299  return *slot;
300}
301
302/* Build or return the variable that holds the static chain within
303   INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
304
305static tree
306get_chain_decl (struct nesting_info *info)
307{
308  tree decl = info->chain_decl;
309
310  if (!decl)
311    {
312      tree type;
313
314      type = get_frame_type (info->outer);
315      type = build_pointer_type (type);
316
317      /* Note that this variable is *not* entered into any BIND_EXPR;
318	 the construction of this variable is handled specially in
319	 expand_function_start and initialize_inlined_parameters.
320	 Note also that it's represented as a parameter.  This is more
321	 close to the truth, since the initial value does come from
322	 the caller.  */
323      decl = build_decl (DECL_SOURCE_LOCATION (info->context),
324			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
325      DECL_ARTIFICIAL (decl) = 1;
326      DECL_IGNORED_P (decl) = 1;
327      TREE_USED (decl) = 1;
328      DECL_CONTEXT (decl) = info->context;
329      DECL_ARG_TYPE (decl) = type;
330
331      /* Tell tree-inline.c that we never write to this variable, so
332	 it can copy-prop the replacement value immediately.  */
333      TREE_READONLY (decl) = 1;
334
335      info->chain_decl = decl;
336
337      if (dump_file
338          && (dump_flags & TDF_DETAILS)
339	  && !DECL_STATIC_CHAIN (info->context))
340	fprintf (dump_file, "Setting static-chain for %s\n",
341		 lang_hooks.decl_printable_name (info->context, 2));
342
343      DECL_STATIC_CHAIN (info->context) = 1;
344    }
345  return decl;
346}
347
348/* Build or return the field within the non-local frame state that holds
349   the static chain for INFO->CONTEXT.  This is the way to walk back up
350   multiple nesting levels.  */
351
352static tree
353get_chain_field (struct nesting_info *info)
354{
355  tree field = info->chain_field;
356
357  if (!field)
358    {
359      tree type = build_pointer_type (get_frame_type (info->outer));
360
361      field = make_node (FIELD_DECL);
362      DECL_NAME (field) = get_identifier ("__chain");
363      TREE_TYPE (field) = type;
364      DECL_ALIGN (field) = TYPE_ALIGN (type);
365      DECL_NONADDRESSABLE_P (field) = 1;
366
367      insert_field_into_struct (get_frame_type (info), field);
368
369      info->chain_field = field;
370
371      if (dump_file
372          && (dump_flags & TDF_DETAILS)
373	  && !DECL_STATIC_CHAIN (info->context))
374	fprintf (dump_file, "Setting static-chain for %s\n",
375		 lang_hooks.decl_printable_name (info->context, 2));
376
377      DECL_STATIC_CHAIN (info->context) = 1;
378    }
379  return field;
380}
381
382/* Initialize a new temporary with the GIMPLE_CALL STMT.  */
383
384static tree
385init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
386		        gcall *call)
387{
388  tree t;
389
390  t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
391  gimple_call_set_lhs (call, t);
392  if (! gsi_end_p (*gsi))
393    gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
394  gsi_insert_before (gsi, call, GSI_SAME_STMT);
395
396  return t;
397}
398
399
400/* Copy EXP into a temporary.  Allocate the temporary in the context of
401   INFO and insert the initialization statement before GSI.  */
402
403static tree
404init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
405{
406  tree t;
407  gimple *stmt;
408
409  t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
410  stmt = gimple_build_assign (t, exp);
411  if (! gsi_end_p (*gsi))
412    gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
413  gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
414
415  return t;
416}
417
418
419/* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
420
421static tree
422gsi_gimplify_val (struct nesting_info *info, tree exp,
423		  gimple_stmt_iterator *gsi)
424{
425  if (is_gimple_val (exp))
426    return exp;
427  else
428    return init_tmp_var (info, exp, gsi);
429}
430
431/* Similarly, but copy from the temporary and insert the statement
432   after the iterator.  */
433
434static tree
435save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
436{
437  tree t;
438  gimple *stmt;
439
440  t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
441  stmt = gimple_build_assign (exp, t);
442  if (! gsi_end_p (*gsi))
443    gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
444  gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
445
446  return t;
447}
448
449/* Build or return the type used to represent a nested function trampoline.  */
450
451static GTY(()) tree trampoline_type;
452
453static tree
454get_trampoline_type (struct nesting_info *info)
455{
456  unsigned align, size;
457  tree t;
458
459  if (trampoline_type)
460    return trampoline_type;
461
462  align = TRAMPOLINE_ALIGNMENT;
463  size = TRAMPOLINE_SIZE;
464
465  /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
466     then allocate extra space so that we can do dynamic alignment.  */
467  if (align > STACK_BOUNDARY)
468    {
469      size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
470      align = STACK_BOUNDARY;
471    }
472
473  t = build_index_type (size_int (size - 1));
474  t = build_array_type (char_type_node, t);
475  t = build_decl (DECL_SOURCE_LOCATION (info->context),
476		  FIELD_DECL, get_identifier ("__data"), t);
477  DECL_ALIGN (t) = align;
478  DECL_USER_ALIGN (t) = 1;
479
480  trampoline_type = make_node (RECORD_TYPE);
481  TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
482  TYPE_FIELDS (trampoline_type) = t;
483  layout_type (trampoline_type);
484  DECL_CONTEXT (t) = trampoline_type;
485
486  return trampoline_type;
487}
488
489/* Given DECL, a nested function, find or create a field in the non-local
490   frame structure for a trampoline for this function.  */
491
492static tree
493lookup_tramp_for_decl (struct nesting_info *info, tree decl,
494		       enum insert_option insert)
495{
496  if (insert == NO_INSERT)
497    {
498      tree *slot = info->var_map->get (decl);
499      return slot ? *slot : NULL_TREE;
500    }
501
502  tree *slot = &info->var_map->get_or_insert (decl);
503  if (!*slot)
504    {
505      tree field = make_node (FIELD_DECL);
506      DECL_NAME (field) = DECL_NAME (decl);
507      TREE_TYPE (field) = get_trampoline_type (info);
508      TREE_ADDRESSABLE (field) = 1;
509
510      insert_field_into_struct (get_frame_type (info), field);
511      *slot = field;
512
513      info->any_tramp_created = true;
514    }
515
516  return *slot;
517}
518
519/* Build or return the field within the non-local frame state that holds
520   the non-local goto "jmp_buf".  The buffer itself is maintained by the
521   rtl middle-end as dynamic stack space is allocated.  */
522
523static tree
524get_nl_goto_field (struct nesting_info *info)
525{
526  tree field = info->nl_goto_field;
527  if (!field)
528    {
529      unsigned size;
530      tree type;
531
532      /* For __builtin_nonlocal_goto, we need N words.  The first is the
533	 frame pointer, the rest is for the target's stack pointer save
534	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
535	 not the best interface, but it'll do for now.  */
536      if (Pmode == ptr_mode)
537	type = ptr_type_node;
538      else
539	type = lang_hooks.types.type_for_mode (Pmode, 1);
540
541      size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
542      size = size / GET_MODE_SIZE (Pmode);
543      size = size + 1;
544
545      type = build_array_type
546	(type, build_index_type (size_int (size)));
547
548      field = make_node (FIELD_DECL);
549      DECL_NAME (field) = get_identifier ("__nl_goto_buf");
550      TREE_TYPE (field) = type;
551      DECL_ALIGN (field) = TYPE_ALIGN (type);
552      TREE_ADDRESSABLE (field) = 1;
553
554      insert_field_into_struct (get_frame_type (info), field);
555
556      info->nl_goto_field = field;
557    }
558
559  return field;
560}
561
562/* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
563
564static void
565walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
566	   struct nesting_info *info, gimple_seq *pseq)
567{
568  struct walk_stmt_info wi;
569
570  memset (&wi, 0, sizeof (wi));
571  wi.info = info;
572  wi.val_only = true;
573  walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
574}
575
576
577/* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
578
579static inline void
580walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
581	       struct nesting_info *info)
582{
583  gimple_seq body = gimple_body (info->context);
584  walk_body (callback_stmt, callback_op, info, &body);
585  gimple_set_body (info->context, body);
586}
587
588/* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
589
590static void
591walk_gimple_omp_for (gomp_for *for_stmt,
592    		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
593    		     struct nesting_info *info)
594{
595  struct walk_stmt_info wi;
596  gimple_seq seq;
597  tree t;
598  size_t i;
599
600  walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
601
602  seq = NULL;
603  memset (&wi, 0, sizeof (wi));
604  wi.info = info;
605  wi.gsi = gsi_last (seq);
606
607  for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
608    {
609      wi.val_only = false;
610      walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
611		 &wi, NULL);
612      wi.val_only = true;
613      wi.is_lhs = false;
614      walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
615		 &wi, NULL);
616
617      wi.val_only = true;
618      wi.is_lhs = false;
619      walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
620		 &wi, NULL);
621
622      t = gimple_omp_for_incr (for_stmt, i);
623      gcc_assert (BINARY_CLASS_P (t));
624      wi.val_only = false;
625      walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
626      wi.val_only = true;
627      wi.is_lhs = false;
628      walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
629    }
630
631  seq = gsi_seq (wi.gsi);
632  if (!gimple_seq_empty_p (seq))
633    {
634      gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
635      annotate_all_with_location (seq, gimple_location (for_stmt));
636      gimple_seq_add_seq (&pre_body, seq);
637      gimple_omp_for_set_pre_body (for_stmt, pre_body);
638    }
639}
640
641/* Similarly for ROOT and all functions nested underneath, depth first.  */
642
643static void
644walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
645		    struct nesting_info *root)
646{
647  struct nesting_info *n;
648  FOR_EACH_NEST_INFO (n, root)
649    walk_function (callback_stmt, callback_op, n);
650}
651
652
653/* We have to check for a fairly pathological case.  The operands of function
654   nested function are to be interpreted in the context of the enclosing
655   function.  So if any are variably-sized, they will get remapped when the
656   enclosing function is inlined.  But that remapping would also have to be
657   done in the types of the PARM_DECLs of the nested function, meaning the
658   argument types of that function will disagree with the arguments in the
659   calls to that function.  So we'd either have to make a copy of the nested
660   function corresponding to each time the enclosing function was inlined or
661   add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
662   function.  The former is not practical.  The latter would still require
663   detecting this case to know when to add the conversions.  So, for now at
664   least, we don't inline such an enclosing function.
665
666   We have to do that check recursively, so here return indicating whether
667   FNDECL has such a nested function.  ORIG_FN is the function we were
668   trying to inline to use for checking whether any argument is variably
669   modified by anything in it.
670
671   It would be better to do this in tree-inline.c so that we could give
672   the appropriate warning for why a function can't be inlined, but that's
673   too late since the nesting structure has already been flattened and
674   adding a flag just to record this fact seems a waste of a flag.  */
675
676static bool
677check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
678{
679  struct cgraph_node *cgn = cgraph_node::get (fndecl);
680  tree arg;
681
682  for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
683    {
684      for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
685	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
686	  return true;
687
688      if (check_for_nested_with_variably_modified (cgn->decl,
689						   orig_fndecl))
690	return true;
691    }
692
693  return false;
694}
695
696/* Construct our local datastructure describing the function nesting
697   tree rooted by CGN.  */
698
699static struct nesting_info *
700create_nesting_tree (struct cgraph_node *cgn)
701{
702  struct nesting_info *info = XCNEW (struct nesting_info);
703  info->field_map = new hash_map<tree, tree>;
704  info->var_map = new hash_map<tree, tree>;
705  info->mem_refs = new hash_set<tree *>;
706  info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
707  info->context = cgn->decl;
708
709  for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
710    {
711      struct nesting_info *sub = create_nesting_tree (cgn);
712      sub->outer = info;
713      sub->next = info->inner;
714      info->inner = sub;
715    }
716
717  /* See discussion at check_for_nested_with_variably_modified for a
718     discussion of why this has to be here.  */
719  if (check_for_nested_with_variably_modified (info->context, info->context))
720    DECL_UNINLINABLE (info->context) = true;
721
722  return info;
723}
724
725/* Return an expression computing the static chain for TARGET_CONTEXT
726   from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
727
728static tree
729get_static_chain (struct nesting_info *info, tree target_context,
730		  gimple_stmt_iterator *gsi)
731{
732  struct nesting_info *i;
733  tree x;
734
735  if (info->context == target_context)
736    {
737      x = build_addr (info->frame_decl);
738      info->static_chain_added |= 1;
739    }
740  else
741    {
742      x = get_chain_decl (info);
743      info->static_chain_added |= 2;
744
745      for (i = info->outer; i->context != target_context; i = i->outer)
746	{
747	  tree field = get_chain_field (i);
748
749	  x = build_simple_mem_ref (x);
750	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
751	  x = init_tmp_var (info, x, gsi);
752	}
753    }
754
755  return x;
756}
757
758
759/* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
760   frame as seen from INFO->CONTEXT.  Insert any necessary computations
761   before GSI.  */
762
763static tree
764get_frame_field (struct nesting_info *info, tree target_context,
765		 tree field, gimple_stmt_iterator *gsi)
766{
767  struct nesting_info *i;
768  tree x;
769
770  if (info->context == target_context)
771    {
772      /* Make sure frame_decl gets created.  */
773      (void) get_frame_type (info);
774      x = info->frame_decl;
775      info->static_chain_added |= 1;
776    }
777  else
778    {
779      x = get_chain_decl (info);
780      info->static_chain_added |= 2;
781
782      for (i = info->outer; i->context != target_context; i = i->outer)
783	{
784	  tree field = get_chain_field (i);
785
786	  x = build_simple_mem_ref (x);
787	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
788	  x = init_tmp_var (info, x, gsi);
789	}
790
791      x = build_simple_mem_ref (x);
792    }
793
794  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
795  return x;
796}
797
798static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
799
800/* A subroutine of convert_nonlocal_reference_op.  Create a local variable
801   in the nested function with DECL_VALUE_EXPR set to reference the true
802   variable in the parent function.  This is used both for debug info
803   and in OMP lowering.  */
804
805static tree
806get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
807{
808  tree target_context;
809  struct nesting_info *i;
810  tree x, field, new_decl;
811
812  tree *slot = &info->var_map->get_or_insert (decl);
813
814  if (*slot)
815    return *slot;
816
817  target_context = decl_function_context (decl);
818
819  /* A copy of the code in get_frame_field, but without the temporaries.  */
820  if (info->context == target_context)
821    {
822      /* Make sure frame_decl gets created.  */
823      (void) get_frame_type (info);
824      x = info->frame_decl;
825      i = info;
826      info->static_chain_added |= 1;
827    }
828  else
829    {
830      x = get_chain_decl (info);
831      info->static_chain_added |= 2;
832      for (i = info->outer; i->context != target_context; i = i->outer)
833	{
834	  field = get_chain_field (i);
835	  x = build_simple_mem_ref (x);
836	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
837	}
838      x = build_simple_mem_ref (x);
839    }
840
841  field = lookup_field_for_decl (i, decl, INSERT);
842  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
843  if (use_pointer_in_frame (decl))
844    x = build_simple_mem_ref (x);
845
846  /* ??? We should be remapping types as well, surely.  */
847  new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
848			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
849  DECL_CONTEXT (new_decl) = info->context;
850  DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
851  DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
852  TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
853  TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
854  TREE_READONLY (new_decl) = TREE_READONLY (decl);
855  TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
856  DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
857  if ((TREE_CODE (decl) == PARM_DECL
858       || TREE_CODE (decl) == RESULT_DECL
859       || TREE_CODE (decl) == VAR_DECL)
860      && DECL_BY_REFERENCE (decl))
861    DECL_BY_REFERENCE (new_decl) = 1;
862
863  SET_DECL_VALUE_EXPR (new_decl, x);
864  DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
865
866  *slot = new_decl;
867  DECL_CHAIN (new_decl) = info->debug_var_chain;
868  info->debug_var_chain = new_decl;
869
870  if (!optimize
871      && info->context != target_context
872      && variably_modified_type_p (TREE_TYPE (decl), NULL))
873    note_nonlocal_vla_type (info, TREE_TYPE (decl));
874
875  return new_decl;
876}
877
878
879/* Callback for walk_gimple_stmt, rewrite all references to VAR
880   and PARM_DECLs that belong to outer functions.
881
882   The rewrite will involve some number of structure accesses back up
883   the static chain.  E.g. for a variable FOO up one nesting level it'll
884   be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
885   indirections apply to decls for which use_pointer_in_frame is true.  */
886
887static tree
888convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
889{
890  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
891  struct nesting_info *const info = (struct nesting_info *) wi->info;
892  tree t = *tp;
893
894  *walk_subtrees = 0;
895  switch (TREE_CODE (t))
896    {
897    case VAR_DECL:
898      /* Non-automatic variables are never processed.  */
899      if (TREE_STATIC (t) || DECL_EXTERNAL (t))
900	break;
901      /* FALLTHRU */
902
903    case PARM_DECL:
904      if (decl_function_context (t) != info->context)
905	{
906	  tree x;
907	  wi->changed = true;
908
909	  x = get_nonlocal_debug_decl (info, t);
910	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
911	    {
912	      tree target_context = decl_function_context (t);
913	      struct nesting_info *i;
914	      for (i = info->outer; i->context != target_context; i = i->outer)
915		continue;
916	      x = lookup_field_for_decl (i, t, INSERT);
917	      x = get_frame_field (info, target_context, x, &wi->gsi);
918	      if (use_pointer_in_frame (t))
919		{
920		  x = init_tmp_var (info, x, &wi->gsi);
921		  x = build_simple_mem_ref (x);
922		}
923	    }
924
925	  if (wi->val_only)
926	    {
927	      if (wi->is_lhs)
928		x = save_tmp_var (info, x, &wi->gsi);
929	      else
930		x = init_tmp_var (info, x, &wi->gsi);
931	    }
932
933	  *tp = x;
934	}
935      break;
936
937    case LABEL_DECL:
938      /* We're taking the address of a label from a parent function, but
939	 this is not itself a non-local goto.  Mark the label such that it
940	 will not be deleted, much as we would with a label address in
941	 static storage.  */
942      if (decl_function_context (t) != info->context)
943        FORCED_LABEL (t) = 1;
944      break;
945
946    case ADDR_EXPR:
947      {
948	bool save_val_only = wi->val_only;
949
950	wi->val_only = false;
951	wi->is_lhs = false;
952	wi->changed = false;
953	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
954	wi->val_only = true;
955
956	if (wi->changed)
957	  {
958	    tree save_context;
959
960	    /* If we changed anything, we might no longer be directly
961	       referencing a decl.  */
962	    save_context = current_function_decl;
963	    current_function_decl = info->context;
964	    recompute_tree_invariant_for_addr_expr (t);
965	    current_function_decl = save_context;
966
967	    /* If the callback converted the address argument in a context
968	       where we only accept variables (and min_invariant, presumably),
969	       then compute the address into a temporary.  */
970	    if (save_val_only)
971	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
972				      t, &wi->gsi);
973	  }
974      }
975      break;
976
977    case REALPART_EXPR:
978    case IMAGPART_EXPR:
979    case COMPONENT_REF:
980    case ARRAY_REF:
981    case ARRAY_RANGE_REF:
982    case BIT_FIELD_REF:
983      /* Go down this entire nest and just look at the final prefix and
984	 anything that describes the references.  Otherwise, we lose track
985	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
986      wi->val_only = true;
987      wi->is_lhs = false;
988      for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
989	{
990	  if (TREE_CODE (t) == COMPONENT_REF)
991	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
992		       NULL);
993	  else if (TREE_CODE (t) == ARRAY_REF
994		   || TREE_CODE (t) == ARRAY_RANGE_REF)
995	    {
996	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
997			 wi, NULL);
998	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
999			 wi, NULL);
1000	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1001			 wi, NULL);
1002	    }
1003	}
1004      wi->val_only = false;
1005      walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1006      break;
1007
1008    case VIEW_CONVERT_EXPR:
1009      /* Just request to look at the subtrees, leaving val_only and lhs
1010	 untouched.  This might actually be for !val_only + lhs, in which
1011	 case we don't want to force a replacement by a temporary.  */
1012      *walk_subtrees = 1;
1013      break;
1014
1015    default:
1016      if (!IS_TYPE_OR_DECL_P (t))
1017	{
1018	  *walk_subtrees = 1;
1019          wi->val_only = true;
1020	  wi->is_lhs = false;
1021	}
1022      break;
1023    }
1024
1025  return NULL_TREE;
1026}
1027
1028static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1029					     struct walk_stmt_info *);
1030
1031/* Helper for convert_nonlocal_references, rewrite all references to VAR
1032   and PARM_DECLs that belong to outer functions.  */
1033
1034static bool
1035convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1036{
1037  struct nesting_info *const info = (struct nesting_info *) wi->info;
1038  bool need_chain = false, need_stmts = false;
1039  tree clause, decl;
1040  int dummy;
1041  bitmap new_suppress;
1042
1043  new_suppress = BITMAP_GGC_ALLOC ();
1044  bitmap_copy (new_suppress, info->suppress_expansion);
1045
1046  for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1047    {
1048      switch (OMP_CLAUSE_CODE (clause))
1049	{
1050	case OMP_CLAUSE_REDUCTION:
1051	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1052	    need_stmts = true;
1053	  goto do_decl_clause;
1054
1055	case OMP_CLAUSE_LASTPRIVATE:
1056	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1057	    need_stmts = true;
1058	  goto do_decl_clause;
1059
1060	case OMP_CLAUSE_LINEAR:
1061	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1062	    need_stmts = true;
1063	  wi->val_only = true;
1064	  wi->is_lhs = false;
1065	  convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1066					 &dummy, wi);
1067	  goto do_decl_clause;
1068
1069	case OMP_CLAUSE_PRIVATE:
1070	case OMP_CLAUSE_FIRSTPRIVATE:
1071	case OMP_CLAUSE_COPYPRIVATE:
1072	case OMP_CLAUSE_SHARED:
1073	case OMP_CLAUSE_TO_DECLARE:
1074	case OMP_CLAUSE_LINK:
1075	case OMP_CLAUSE_USE_DEVICE_PTR:
1076	case OMP_CLAUSE_IS_DEVICE_PTR:
1077	do_decl_clause:
1078	  decl = OMP_CLAUSE_DECL (clause);
1079	  if (TREE_CODE (decl) == VAR_DECL
1080	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1081	    break;
1082	  if (decl_function_context (decl) != info->context)
1083	    {
1084	      if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1085		OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1086	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1087	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1088	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1089		need_chain = true;
1090	    }
1091	  break;
1092
1093	case OMP_CLAUSE_SCHEDULE:
1094	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1095	    break;
1096	  /* FALLTHRU */
1097	case OMP_CLAUSE_FINAL:
1098	case OMP_CLAUSE_IF:
1099	case OMP_CLAUSE_NUM_THREADS:
1100	case OMP_CLAUSE_DEPEND:
1101	case OMP_CLAUSE_DEVICE:
1102	case OMP_CLAUSE_NUM_TEAMS:
1103	case OMP_CLAUSE_THREAD_LIMIT:
1104	case OMP_CLAUSE_SAFELEN:
1105	case OMP_CLAUSE_SIMDLEN:
1106	case OMP_CLAUSE_PRIORITY:
1107	case OMP_CLAUSE_GRAINSIZE:
1108	case OMP_CLAUSE_NUM_TASKS:
1109	case OMP_CLAUSE_HINT:
1110	case OMP_CLAUSE__CILK_FOR_COUNT_:
1111	case OMP_CLAUSE_NUM_GANGS:
1112	case OMP_CLAUSE_NUM_WORKERS:
1113	case OMP_CLAUSE_VECTOR_LENGTH:
1114	case OMP_CLAUSE_GANG:
1115	case OMP_CLAUSE_WORKER:
1116	case OMP_CLAUSE_VECTOR:
1117	case OMP_CLAUSE_ASYNC:
1118	case OMP_CLAUSE_WAIT:
1119	  /* Several OpenACC clauses have optional arguments.  Check if they
1120	     are present.  */
1121	  if (OMP_CLAUSE_OPERAND (clause, 0))
1122	    {
1123	      wi->val_only = true;
1124	      wi->is_lhs = false;
1125	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1126					     &dummy, wi);
1127	    }
1128
1129	  /* The gang clause accepts two arguments.  */
1130	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1131	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1132	    {
1133		wi->val_only = true;
1134		wi->is_lhs = false;
1135		convert_nonlocal_reference_op
1136		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1137	    }
1138	  break;
1139
1140	case OMP_CLAUSE_DIST_SCHEDULE:
1141	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1142	    {
1143	      wi->val_only = true;
1144	      wi->is_lhs = false;
1145	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1146					     &dummy, wi);
1147	    }
1148	  break;
1149
1150	case OMP_CLAUSE_MAP:
1151	case OMP_CLAUSE_TO:
1152	case OMP_CLAUSE_FROM:
1153	  if (OMP_CLAUSE_SIZE (clause))
1154	    {
1155	      wi->val_only = true;
1156	      wi->is_lhs = false;
1157	      convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1158					     &dummy, wi);
1159	    }
1160	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1161	    goto do_decl_clause;
1162	  wi->val_only = true;
1163	  wi->is_lhs = false;
1164	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1165		     wi, NULL);
1166	  break;
1167
1168	case OMP_CLAUSE_ALIGNED:
1169	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1170	    {
1171	      wi->val_only = true;
1172	      wi->is_lhs = false;
1173	      convert_nonlocal_reference_op
1174		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1175	    }
1176	  /* Like do_decl_clause, but don't add any suppression.  */
1177	  decl = OMP_CLAUSE_DECL (clause);
1178	  if (TREE_CODE (decl) == VAR_DECL
1179	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1180	    break;
1181	  if (decl_function_context (decl) != info->context)
1182	    {
1183	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1184	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1185		need_chain = true;
1186	    }
1187	  break;
1188
1189	case OMP_CLAUSE_NOWAIT:
1190	case OMP_CLAUSE_ORDERED:
1191	case OMP_CLAUSE_DEFAULT:
1192	case OMP_CLAUSE_COPYIN:
1193	case OMP_CLAUSE_COLLAPSE:
1194	case OMP_CLAUSE_UNTIED:
1195	case OMP_CLAUSE_MERGEABLE:
1196	case OMP_CLAUSE_PROC_BIND:
1197	case OMP_CLAUSE_NOGROUP:
1198	case OMP_CLAUSE_THREADS:
1199	case OMP_CLAUSE_SIMD:
1200	case OMP_CLAUSE_DEFAULTMAP:
1201	case OMP_CLAUSE_SEQ:
1202	case OMP_CLAUSE_INDEPENDENT:
1203	case OMP_CLAUSE_AUTO:
1204	  break;
1205
1206	case OMP_CLAUSE_TILE:
1207	  /* OpenACC tile clauses are discarded during gimplification, so we
1208	     don't expect to see anything here.  */
1209	  gcc_unreachable ();
1210
1211	case OMP_CLAUSE__CACHE_:
1212	  /* These clauses belong to the OpenACC cache directive, which is
1213	     discarded during gimplification, so we don't expect to see
1214	     anything here.  */
1215	  gcc_unreachable ();
1216
1217	case OMP_CLAUSE_DEVICE_RESIDENT:
1218	default:
1219	  gcc_unreachable ();
1220	}
1221    }
1222
1223  info->suppress_expansion = new_suppress;
1224
1225  if (need_stmts)
1226    for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1227      switch (OMP_CLAUSE_CODE (clause))
1228	{
1229	case OMP_CLAUSE_REDUCTION:
1230	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1231	    {
1232	      tree old_context
1233		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1234	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1235		= info->context;
1236	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1237		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1238		  = info->context;
1239	      walk_body (convert_nonlocal_reference_stmt,
1240			 convert_nonlocal_reference_op, info,
1241			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1242	      walk_body (convert_nonlocal_reference_stmt,
1243			 convert_nonlocal_reference_op, info,
1244			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1245	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1246		= old_context;
1247	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1248		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1249		  = old_context;
1250	    }
1251	  break;
1252
1253	case OMP_CLAUSE_LASTPRIVATE:
1254	  walk_body (convert_nonlocal_reference_stmt,
1255		     convert_nonlocal_reference_op, info,
1256		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1257	  break;
1258
1259	case OMP_CLAUSE_LINEAR:
1260	  walk_body (convert_nonlocal_reference_stmt,
1261		     convert_nonlocal_reference_op, info,
1262		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1263	  break;
1264
1265	default:
1266	  break;
1267	}
1268
1269  return need_chain;
1270}
1271
1272/* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1273
1274static void
1275note_nonlocal_vla_type (struct nesting_info *info, tree type)
1276{
1277  while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1278    type = TREE_TYPE (type);
1279
1280  if (TYPE_NAME (type)
1281      && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1282      && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1283    type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1284
1285  while (POINTER_TYPE_P (type)
1286	 || TREE_CODE (type) == VECTOR_TYPE
1287	 || TREE_CODE (type) == FUNCTION_TYPE
1288	 || TREE_CODE (type) == METHOD_TYPE)
1289    type = TREE_TYPE (type);
1290
1291  if (TREE_CODE (type) == ARRAY_TYPE)
1292    {
1293      tree domain, t;
1294
1295      note_nonlocal_vla_type (info, TREE_TYPE (type));
1296      domain = TYPE_DOMAIN (type);
1297      if (domain)
1298	{
1299	  t = TYPE_MIN_VALUE (domain);
1300	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1301	      && decl_function_context (t) != info->context)
1302	    get_nonlocal_debug_decl (info, t);
1303	  t = TYPE_MAX_VALUE (domain);
1304	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1305	      && decl_function_context (t) != info->context)
1306	    get_nonlocal_debug_decl (info, t);
1307	}
1308    }
1309}
1310
1311/* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1312   in BLOCK.  */
1313
1314static void
1315note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1316{
1317  tree var;
1318
1319  for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1320    if (TREE_CODE (var) == VAR_DECL
1321	&& variably_modified_type_p (TREE_TYPE (var), NULL)
1322	&& DECL_HAS_VALUE_EXPR_P (var)
1323	&& decl_function_context (var) != info->context)
1324      note_nonlocal_vla_type (info, TREE_TYPE (var));
1325}
1326
1327/* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1328   PARM_DECLs that belong to outer functions.  This handles statements
1329   that are not handled via the standard recursion done in
1330   walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1331   convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1332   operands of STMT have been handled by this function.  */
1333
1334static tree
1335convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1336				 struct walk_stmt_info *wi)
1337{
1338  struct nesting_info *info = (struct nesting_info *) wi->info;
1339  tree save_local_var_chain;
1340  bitmap save_suppress;
1341  gimple *stmt = gsi_stmt (*gsi);
1342
1343  switch (gimple_code (stmt))
1344    {
1345    case GIMPLE_GOTO:
1346      /* Don't walk non-local gotos for now.  */
1347      if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1348	{
1349	  wi->val_only = true;
1350	  wi->is_lhs = false;
1351	  *handled_ops_p = false;
1352	  return NULL_TREE;
1353	}
1354      break;
1355
1356    case GIMPLE_OMP_PARALLEL:
1357    case GIMPLE_OMP_TASK:
1358      save_suppress = info->suppress_expansion;
1359      if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1360	                                wi))
1361	{
1362	  tree c, decl;
1363	  decl = get_chain_decl (info);
1364	  c = build_omp_clause (gimple_location (stmt),
1365				OMP_CLAUSE_FIRSTPRIVATE);
1366	  OMP_CLAUSE_DECL (c) = decl;
1367	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1368	  gimple_omp_taskreg_set_clauses (stmt, c);
1369	}
1370
1371      save_local_var_chain = info->new_local_var_chain;
1372      info->new_local_var_chain = NULL;
1373
1374      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1375	         info, gimple_omp_body_ptr (stmt));
1376
1377      if (info->new_local_var_chain)
1378	declare_vars (info->new_local_var_chain,
1379	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1380		      false);
1381      info->new_local_var_chain = save_local_var_chain;
1382      info->suppress_expansion = save_suppress;
1383      break;
1384
1385    case GIMPLE_OMP_FOR:
1386      save_suppress = info->suppress_expansion;
1387      convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1388      walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1389			   convert_nonlocal_reference_stmt,
1390	  		   convert_nonlocal_reference_op, info);
1391      walk_body (convert_nonlocal_reference_stmt,
1392	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1393      info->suppress_expansion = save_suppress;
1394      break;
1395
1396    case GIMPLE_OMP_SECTIONS:
1397      save_suppress = info->suppress_expansion;
1398      convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1399      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1400	         info, gimple_omp_body_ptr (stmt));
1401      info->suppress_expansion = save_suppress;
1402      break;
1403
1404    case GIMPLE_OMP_SINGLE:
1405      save_suppress = info->suppress_expansion;
1406      convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1407      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1408	         info, gimple_omp_body_ptr (stmt));
1409      info->suppress_expansion = save_suppress;
1410      break;
1411
1412    case GIMPLE_OMP_TARGET:
1413      if (!is_gimple_omp_offloaded (stmt))
1414	{
1415	  save_suppress = info->suppress_expansion;
1416	  convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1417					wi);
1418	  info->suppress_expansion = save_suppress;
1419	  walk_body (convert_nonlocal_reference_stmt,
1420		     convert_nonlocal_reference_op, info,
1421		     gimple_omp_body_ptr (stmt));
1422	  break;
1423	}
1424      save_suppress = info->suppress_expansion;
1425      if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1426					wi))
1427	{
1428	  tree c, decl;
1429	  decl = get_chain_decl (info);
1430	  c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1431	  OMP_CLAUSE_DECL (c) = decl;
1432	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1433	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1434	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1435	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1436	}
1437
1438      save_local_var_chain = info->new_local_var_chain;
1439      info->new_local_var_chain = NULL;
1440
1441      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1442		 info, gimple_omp_body_ptr (stmt));
1443
1444      if (info->new_local_var_chain)
1445	declare_vars (info->new_local_var_chain,
1446		      gimple_seq_first_stmt (gimple_omp_body (stmt)),
1447		      false);
1448      info->new_local_var_chain = save_local_var_chain;
1449      info->suppress_expansion = save_suppress;
1450      break;
1451
1452    case GIMPLE_OMP_TEAMS:
1453      save_suppress = info->suppress_expansion;
1454      convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1455      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1456		 info, gimple_omp_body_ptr (stmt));
1457      info->suppress_expansion = save_suppress;
1458      break;
1459
1460    case GIMPLE_OMP_SECTION:
1461    case GIMPLE_OMP_MASTER:
1462    case GIMPLE_OMP_TASKGROUP:
1463    case GIMPLE_OMP_ORDERED:
1464      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1465	         info, gimple_omp_body_ptr (stmt));
1466      break;
1467
1468    case GIMPLE_BIND:
1469      {
1470      gbind *bind_stmt = as_a <gbind *> (stmt);
1471      if (!optimize && gimple_bind_block (bind_stmt))
1472	note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1473
1474      for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1475	if (TREE_CODE (var) == NAMELIST_DECL)
1476	  {
1477	    /* Adjust decls mentioned in NAMELIST_DECL.  */
1478	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1479	    tree decl;
1480	    unsigned int i;
1481
1482	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1483	      {
1484		if (TREE_CODE (decl) == VAR_DECL
1485		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1486		  continue;
1487		if (decl_function_context (decl) != info->context)
1488		  CONSTRUCTOR_ELT (decls, i)->value
1489		    = get_nonlocal_debug_decl (info, decl);
1490	      }
1491	  }
1492
1493      *handled_ops_p = false;
1494      return NULL_TREE;
1495      }
1496    case GIMPLE_COND:
1497      wi->val_only = true;
1498      wi->is_lhs = false;
1499      *handled_ops_p = false;
1500      return NULL_TREE;
1501
1502    default:
1503      /* For every other statement that we are not interested in
1504	 handling here, let the walker traverse the operands.  */
1505      *handled_ops_p = false;
1506      return NULL_TREE;
1507    }
1508
1509  /* We have handled all of STMT operands, no need to traverse the operands.  */
1510  *handled_ops_p = true;
1511  return NULL_TREE;
1512}
1513
1514
1515/* A subroutine of convert_local_reference.  Create a local variable
1516   in the parent function with DECL_VALUE_EXPR set to reference the
1517   field in FRAME.  This is used both for debug info and in OMP
1518   lowering.  */
1519
1520static tree
1521get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1522{
1523  tree x, new_decl;
1524
1525  tree *slot = &info->var_map->get_or_insert (decl);
1526  if (*slot)
1527    return *slot;
1528
1529  /* Make sure frame_decl gets created.  */
1530  (void) get_frame_type (info);
1531  x = info->frame_decl;
1532  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1533
1534  new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1535			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1536  DECL_CONTEXT (new_decl) = info->context;
1537  DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1538  DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1539  TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1540  TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1541  TREE_READONLY (new_decl) = TREE_READONLY (decl);
1542  TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1543  DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1544  if ((TREE_CODE (decl) == PARM_DECL
1545       || TREE_CODE (decl) == RESULT_DECL
1546       || TREE_CODE (decl) == VAR_DECL)
1547      && DECL_BY_REFERENCE (decl))
1548    DECL_BY_REFERENCE (new_decl) = 1;
1549
1550  SET_DECL_VALUE_EXPR (new_decl, x);
1551  DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1552  *slot = new_decl;
1553
1554  DECL_CHAIN (new_decl) = info->debug_var_chain;
1555  info->debug_var_chain = new_decl;
1556
1557  /* Do not emit debug info twice.  */
1558  DECL_IGNORED_P (decl) = 1;
1559
1560  return new_decl;
1561}
1562
1563
1564/* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1565   and PARM_DECLs that were referenced by inner nested functions.
1566   The rewrite will be a structure reference to the local frame variable.  */
1567
1568static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1569
1570static tree
1571convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1572{
1573  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1574  struct nesting_info *const info = (struct nesting_info *) wi->info;
1575  tree t = *tp, field, x;
1576  bool save_val_only;
1577
1578  *walk_subtrees = 0;
1579  switch (TREE_CODE (t))
1580    {
1581    case VAR_DECL:
1582      /* Non-automatic variables are never processed.  */
1583      if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1584	break;
1585      /* FALLTHRU */
1586
1587    case PARM_DECL:
1588      if (decl_function_context (t) == info->context)
1589	{
1590	  /* If we copied a pointer to the frame, then the original decl
1591	     is used unchanged in the parent function.  */
1592	  if (use_pointer_in_frame (t))
1593	    break;
1594
1595	  /* No need to transform anything if no child references the
1596	     variable.  */
1597	  field = lookup_field_for_decl (info, t, NO_INSERT);
1598	  if (!field)
1599	    break;
1600	  wi->changed = true;
1601
1602	  x = get_local_debug_decl (info, t, field);
1603	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1604	    x = get_frame_field (info, info->context, field, &wi->gsi);
1605
1606	  if (wi->val_only)
1607	    {
1608	      if (wi->is_lhs)
1609		x = save_tmp_var (info, x, &wi->gsi);
1610	      else
1611		x = init_tmp_var (info, x, &wi->gsi);
1612	    }
1613
1614	  *tp = x;
1615	}
1616      break;
1617
1618    case ADDR_EXPR:
1619      save_val_only = wi->val_only;
1620      wi->val_only = false;
1621      wi->is_lhs = false;
1622      wi->changed = false;
1623      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1624      wi->val_only = save_val_only;
1625
1626      /* If we converted anything ... */
1627      if (wi->changed)
1628	{
1629	  tree save_context;
1630
1631	  /* Then the frame decl is now addressable.  */
1632	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1633
1634	  save_context = current_function_decl;
1635	  current_function_decl = info->context;
1636	  recompute_tree_invariant_for_addr_expr (t);
1637	  current_function_decl = save_context;
1638
1639	  /* If we are in a context where we only accept values, then
1640	     compute the address into a temporary.  */
1641	  if (save_val_only)
1642	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1643				    t, &wi->gsi);
1644	}
1645      break;
1646
1647    case REALPART_EXPR:
1648    case IMAGPART_EXPR:
1649    case COMPONENT_REF:
1650    case ARRAY_REF:
1651    case ARRAY_RANGE_REF:
1652    case BIT_FIELD_REF:
1653      /* Go down this entire nest and just look at the final prefix and
1654	 anything that describes the references.  Otherwise, we lose track
1655	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1656      save_val_only = wi->val_only;
1657      wi->val_only = true;
1658      wi->is_lhs = false;
1659      for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1660	{
1661	  if (TREE_CODE (t) == COMPONENT_REF)
1662	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1663		       NULL);
1664	  else if (TREE_CODE (t) == ARRAY_REF
1665		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1666	    {
1667	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1668			 NULL);
1669	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1670			 NULL);
1671	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1672			 NULL);
1673	    }
1674	}
1675      wi->val_only = false;
1676      walk_tree (tp, convert_local_reference_op, wi, NULL);
1677      wi->val_only = save_val_only;
1678      break;
1679
1680    case MEM_REF:
1681      save_val_only = wi->val_only;
1682      wi->val_only = true;
1683      wi->is_lhs = false;
1684      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1685		 wi, NULL);
1686      /* We need to re-fold the MEM_REF as component references as
1687	 part of a ADDR_EXPR address are not allowed.  But we cannot
1688	 fold here, as the chain record type is not yet finalized.  */
1689      if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1690	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1691	info->mem_refs->add (tp);
1692      wi->val_only = save_val_only;
1693      break;
1694
1695    case VIEW_CONVERT_EXPR:
1696      /* Just request to look at the subtrees, leaving val_only and lhs
1697	 untouched.  This might actually be for !val_only + lhs, in which
1698	 case we don't want to force a replacement by a temporary.  */
1699      *walk_subtrees = 1;
1700      break;
1701
1702    default:
1703      if (!IS_TYPE_OR_DECL_P (t))
1704	{
1705	  *walk_subtrees = 1;
1706	  wi->val_only = true;
1707	  wi->is_lhs = false;
1708	}
1709      break;
1710    }
1711
1712  return NULL_TREE;
1713}
1714
1715static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1716					  struct walk_stmt_info *);
1717
1718/* Helper for convert_local_reference.  Convert all the references in
1719   the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1720
1721static bool
1722convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1723{
1724  struct nesting_info *const info = (struct nesting_info *) wi->info;
1725  bool need_frame = false, need_stmts = false;
1726  tree clause, decl;
1727  int dummy;
1728  bitmap new_suppress;
1729
1730  new_suppress = BITMAP_GGC_ALLOC ();
1731  bitmap_copy (new_suppress, info->suppress_expansion);
1732
1733  for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1734    {
1735      switch (OMP_CLAUSE_CODE (clause))
1736	{
1737	case OMP_CLAUSE_REDUCTION:
1738	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1739	    need_stmts = true;
1740	  goto do_decl_clause;
1741
1742	case OMP_CLAUSE_LASTPRIVATE:
1743	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1744	    need_stmts = true;
1745	  goto do_decl_clause;
1746
1747	case OMP_CLAUSE_LINEAR:
1748	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1749	    need_stmts = true;
1750	  wi->val_only = true;
1751	  wi->is_lhs = false;
1752	  convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1753				      wi);
1754	  goto do_decl_clause;
1755
1756	case OMP_CLAUSE_PRIVATE:
1757	case OMP_CLAUSE_FIRSTPRIVATE:
1758	case OMP_CLAUSE_COPYPRIVATE:
1759	case OMP_CLAUSE_SHARED:
1760	case OMP_CLAUSE_TO_DECLARE:
1761	case OMP_CLAUSE_LINK:
1762	case OMP_CLAUSE_USE_DEVICE_PTR:
1763	case OMP_CLAUSE_IS_DEVICE_PTR:
1764	do_decl_clause:
1765	  decl = OMP_CLAUSE_DECL (clause);
1766	  if (TREE_CODE (decl) == VAR_DECL
1767	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1768	    break;
1769	  if (decl_function_context (decl) == info->context
1770	      && !use_pointer_in_frame (decl))
1771	    {
1772	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1773	      if (field)
1774		{
1775		  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1776		    OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1777		  bitmap_set_bit (new_suppress, DECL_UID (decl));
1778		  OMP_CLAUSE_DECL (clause)
1779		    = get_local_debug_decl (info, decl, field);
1780		  need_frame = true;
1781		}
1782	    }
1783	  break;
1784
1785	case OMP_CLAUSE_SCHEDULE:
1786	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1787	    break;
1788	  /* FALLTHRU */
1789	case OMP_CLAUSE_FINAL:
1790	case OMP_CLAUSE_IF:
1791	case OMP_CLAUSE_NUM_THREADS:
1792	case OMP_CLAUSE_DEPEND:
1793	case OMP_CLAUSE_DEVICE:
1794	case OMP_CLAUSE_NUM_TEAMS:
1795	case OMP_CLAUSE_THREAD_LIMIT:
1796	case OMP_CLAUSE_SAFELEN:
1797	case OMP_CLAUSE_SIMDLEN:
1798	case OMP_CLAUSE_PRIORITY:
1799	case OMP_CLAUSE_GRAINSIZE:
1800	case OMP_CLAUSE_NUM_TASKS:
1801	case OMP_CLAUSE_HINT:
1802	case OMP_CLAUSE__CILK_FOR_COUNT_:
1803	case OMP_CLAUSE_NUM_GANGS:
1804	case OMP_CLAUSE_NUM_WORKERS:
1805	case OMP_CLAUSE_VECTOR_LENGTH:
1806	case OMP_CLAUSE_GANG:
1807	case OMP_CLAUSE_WORKER:
1808	case OMP_CLAUSE_VECTOR:
1809	case OMP_CLAUSE_ASYNC:
1810	case OMP_CLAUSE_WAIT:
1811	  /* Several OpenACC clauses have optional arguments.  Check if they
1812	     are present.  */
1813	  if (OMP_CLAUSE_OPERAND (clause, 0))
1814	    {
1815	      wi->val_only = true;
1816	      wi->is_lhs = false;
1817	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1818					  &dummy, wi);
1819	    }
1820
1821	  /* The gang clause accepts two arguments.  */
1822	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1823	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1824	    {
1825		wi->val_only = true;
1826		wi->is_lhs = false;
1827		convert_nonlocal_reference_op
1828		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1829	    }
1830	  break;
1831
1832	case OMP_CLAUSE_DIST_SCHEDULE:
1833	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1834	    {
1835	      wi->val_only = true;
1836	      wi->is_lhs = false;
1837	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1838					  &dummy, wi);
1839	    }
1840	  break;
1841
1842	case OMP_CLAUSE_MAP:
1843	case OMP_CLAUSE_TO:
1844	case OMP_CLAUSE_FROM:
1845	  if (OMP_CLAUSE_SIZE (clause))
1846	    {
1847	      wi->val_only = true;
1848	      wi->is_lhs = false;
1849	      convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1850					  &dummy, wi);
1851	    }
1852	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1853	    goto do_decl_clause;
1854	  wi->val_only = true;
1855	  wi->is_lhs = false;
1856	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1857		     wi, NULL);
1858	  break;
1859
1860	case OMP_CLAUSE_ALIGNED:
1861	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1862	    {
1863	      wi->val_only = true;
1864	      wi->is_lhs = false;
1865	      convert_local_reference_op
1866		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1867	    }
1868	  /* Like do_decl_clause, but don't add any suppression.  */
1869	  decl = OMP_CLAUSE_DECL (clause);
1870	  if (TREE_CODE (decl) == VAR_DECL
1871	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1872	    break;
1873	  if (decl_function_context (decl) == info->context
1874	      && !use_pointer_in_frame (decl))
1875	    {
1876	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1877	      if (field)
1878		{
1879		  OMP_CLAUSE_DECL (clause)
1880		    = get_local_debug_decl (info, decl, field);
1881		  need_frame = true;
1882		}
1883	    }
1884	  break;
1885
1886	case OMP_CLAUSE_NOWAIT:
1887	case OMP_CLAUSE_ORDERED:
1888	case OMP_CLAUSE_DEFAULT:
1889	case OMP_CLAUSE_COPYIN:
1890	case OMP_CLAUSE_COLLAPSE:
1891	case OMP_CLAUSE_UNTIED:
1892	case OMP_CLAUSE_MERGEABLE:
1893	case OMP_CLAUSE_PROC_BIND:
1894	case OMP_CLAUSE_NOGROUP:
1895	case OMP_CLAUSE_THREADS:
1896	case OMP_CLAUSE_SIMD:
1897	case OMP_CLAUSE_DEFAULTMAP:
1898	case OMP_CLAUSE_SEQ:
1899	case OMP_CLAUSE_INDEPENDENT:
1900	case OMP_CLAUSE_AUTO:
1901	  break;
1902
1903	case OMP_CLAUSE_TILE:
1904	  /* OpenACC tile clauses are discarded during gimplification, so we
1905	     don't expect to see anything here.  */
1906	  gcc_unreachable ();
1907
1908	case OMP_CLAUSE__CACHE_:
1909	  /* These clauses belong to the OpenACC cache directive, which is
1910	     discarded during gimplification, so we don't expect to see
1911	     anything here.  */
1912	  gcc_unreachable ();
1913
1914	case OMP_CLAUSE_DEVICE_RESIDENT:
1915	default:
1916	  gcc_unreachable ();
1917	}
1918    }
1919
1920  info->suppress_expansion = new_suppress;
1921
1922  if (need_stmts)
1923    for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1924      switch (OMP_CLAUSE_CODE (clause))
1925	{
1926	case OMP_CLAUSE_REDUCTION:
1927	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1928	    {
1929	      tree old_context
1930		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1931	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1932		= info->context;
1933	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1934		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1935		  = info->context;
1936	      walk_body (convert_local_reference_stmt,
1937			 convert_local_reference_op, info,
1938			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1939	      walk_body (convert_local_reference_stmt,
1940			 convert_local_reference_op, info,
1941			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1942	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1943		= old_context;
1944	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1945		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1946		  = old_context;
1947	    }
1948	  break;
1949
1950	case OMP_CLAUSE_LASTPRIVATE:
1951	  walk_body (convert_local_reference_stmt,
1952		     convert_local_reference_op, info,
1953		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1954	  break;
1955
1956	case OMP_CLAUSE_LINEAR:
1957	  walk_body (convert_local_reference_stmt,
1958		     convert_local_reference_op, info,
1959		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1960	  break;
1961
1962	default:
1963	  break;
1964	}
1965
1966  return need_frame;
1967}
1968
1969
1970/* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1971   and PARM_DECLs that were referenced by inner nested functions.
1972   The rewrite will be a structure reference to the local frame variable.  */
1973
1974static tree
1975convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1976			      struct walk_stmt_info *wi)
1977{
1978  struct nesting_info *info = (struct nesting_info *) wi->info;
1979  tree save_local_var_chain;
1980  bitmap save_suppress;
1981  char save_static_chain_added;
1982  bool frame_decl_added;
1983  gimple *stmt = gsi_stmt (*gsi);
1984
1985  switch (gimple_code (stmt))
1986    {
1987    case GIMPLE_OMP_PARALLEL:
1988    case GIMPLE_OMP_TASK:
1989      save_suppress = info->suppress_expansion;
1990      frame_decl_added = false;
1991      if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1992	                             wi))
1993	{
1994	  tree c = build_omp_clause (gimple_location (stmt),
1995				     OMP_CLAUSE_SHARED);
1996	  (void) get_frame_type (info);
1997	  OMP_CLAUSE_DECL (c) = info->frame_decl;
1998	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1999	  gimple_omp_taskreg_set_clauses (stmt, c);
2000	  info->static_chain_added |= 4;
2001	  frame_decl_added = true;
2002	}
2003
2004      save_local_var_chain = info->new_local_var_chain;
2005      save_static_chain_added = info->static_chain_added;
2006      info->new_local_var_chain = NULL;
2007      info->static_chain_added = 0;
2008
2009      walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2010	         gimple_omp_body_ptr (stmt));
2011
2012      if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2013	{
2014	  tree c = build_omp_clause (gimple_location (stmt),
2015				     OMP_CLAUSE_SHARED);
2016	  (void) get_frame_type (info);
2017	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2018	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2019	  info->static_chain_added |= 4;
2020	  gimple_omp_taskreg_set_clauses (stmt, c);
2021	}
2022      if (info->new_local_var_chain)
2023	declare_vars (info->new_local_var_chain,
2024		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2025      info->new_local_var_chain = save_local_var_chain;
2026      info->suppress_expansion = save_suppress;
2027      info->static_chain_added |= save_static_chain_added;
2028      break;
2029
2030    case GIMPLE_OMP_FOR:
2031      save_suppress = info->suppress_expansion;
2032      convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2033      walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2034			   convert_local_reference_stmt,
2035			   convert_local_reference_op, info);
2036      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2037		 info, gimple_omp_body_ptr (stmt));
2038      info->suppress_expansion = save_suppress;
2039      break;
2040
2041    case GIMPLE_OMP_SECTIONS:
2042      save_suppress = info->suppress_expansion;
2043      convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2044      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2045		 info, gimple_omp_body_ptr (stmt));
2046      info->suppress_expansion = save_suppress;
2047      break;
2048
2049    case GIMPLE_OMP_SINGLE:
2050      save_suppress = info->suppress_expansion;
2051      convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2052      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2053		 info, gimple_omp_body_ptr (stmt));
2054      info->suppress_expansion = save_suppress;
2055      break;
2056
2057    case GIMPLE_OMP_TARGET:
2058      if (!is_gimple_omp_offloaded (stmt))
2059	{
2060	  save_suppress = info->suppress_expansion;
2061	  convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2062	  info->suppress_expansion = save_suppress;
2063	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2064		     info, gimple_omp_body_ptr (stmt));
2065	  break;
2066	}
2067      save_suppress = info->suppress_expansion;
2068      frame_decl_added = false;
2069      if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2070	{
2071	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2072	  (void) get_frame_type (info);
2073	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2074	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2075	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2076	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2077	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2078	  info->static_chain_added |= 4;
2079	  frame_decl_added = true;
2080	}
2081
2082      save_local_var_chain = info->new_local_var_chain;
2083      save_static_chain_added = info->static_chain_added;
2084      info->new_local_var_chain = NULL;
2085      info->static_chain_added = 0;
2086
2087      walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2088		 gimple_omp_body_ptr (stmt));
2089
2090      if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2091	{
2092	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2093	  (void) get_frame_type (info);
2094	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2095	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2096	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2097	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2098	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2099	  info->static_chain_added |= 4;
2100	}
2101
2102      if (info->new_local_var_chain)
2103	declare_vars (info->new_local_var_chain,
2104		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2105      info->new_local_var_chain = save_local_var_chain;
2106      info->suppress_expansion = save_suppress;
2107      info->static_chain_added |= save_static_chain_added;
2108      break;
2109
2110    case GIMPLE_OMP_TEAMS:
2111      save_suppress = info->suppress_expansion;
2112      convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2113      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2114		 info, gimple_omp_body_ptr (stmt));
2115      info->suppress_expansion = save_suppress;
2116      break;
2117
2118    case GIMPLE_OMP_SECTION:
2119    case GIMPLE_OMP_MASTER:
2120    case GIMPLE_OMP_TASKGROUP:
2121    case GIMPLE_OMP_ORDERED:
2122      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2123		 info, gimple_omp_body_ptr (stmt));
2124      break;
2125
2126    case GIMPLE_COND:
2127      wi->val_only = true;
2128      wi->is_lhs = false;
2129      *handled_ops_p = false;
2130      return NULL_TREE;
2131
2132    case GIMPLE_ASSIGN:
2133      if (gimple_clobber_p (stmt))
2134	{
2135	  tree lhs = gimple_assign_lhs (stmt);
2136	  if (!use_pointer_in_frame (lhs)
2137	      && lookup_field_for_decl (info, lhs, NO_INSERT))
2138	    {
2139	      gsi_replace (gsi, gimple_build_nop (), true);
2140	      break;
2141	    }
2142	}
2143      *handled_ops_p = false;
2144      return NULL_TREE;
2145
2146    case GIMPLE_BIND:
2147      for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2148	   var;
2149	   var = DECL_CHAIN (var))
2150	if (TREE_CODE (var) == NAMELIST_DECL)
2151	  {
2152	    /* Adjust decls mentioned in NAMELIST_DECL.  */
2153	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2154	    tree decl;
2155	    unsigned int i;
2156
2157	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2158	      {
2159		if (TREE_CODE (decl) == VAR_DECL
2160		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2161		  continue;
2162		if (decl_function_context (decl) == info->context
2163		    && !use_pointer_in_frame (decl))
2164		  {
2165		    tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2166		    if (field)
2167		      {
2168			CONSTRUCTOR_ELT (decls, i)->value
2169			  = get_local_debug_decl (info, decl, field);
2170		      }
2171		  }
2172	      }
2173	  }
2174
2175      *handled_ops_p = false;
2176      return NULL_TREE;
2177
2178    default:
2179      /* For every other statement that we are not interested in
2180	 handling here, let the walker traverse the operands.  */
2181      *handled_ops_p = false;
2182      return NULL_TREE;
2183    }
2184
2185  /* Indicate that we have handled all the operands ourselves.  */
2186  *handled_ops_p = true;
2187  return NULL_TREE;
2188}
2189
2190
2191/* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2192   that reference labels from outer functions.  The rewrite will be a
2193   call to __builtin_nonlocal_goto.  */
2194
2195static tree
2196convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2197			   struct walk_stmt_info *wi)
2198{
2199  struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2200  tree label, new_label, target_context, x, field;
2201  gcall *call;
2202  gimple *stmt = gsi_stmt (*gsi);
2203
2204  if (gimple_code (stmt) != GIMPLE_GOTO)
2205    {
2206      *handled_ops_p = false;
2207      return NULL_TREE;
2208    }
2209
2210  label = gimple_goto_dest (stmt);
2211  if (TREE_CODE (label) != LABEL_DECL)
2212    {
2213      *handled_ops_p = false;
2214      return NULL_TREE;
2215    }
2216
2217  target_context = decl_function_context (label);
2218  if (target_context == info->context)
2219    {
2220      *handled_ops_p = false;
2221      return NULL_TREE;
2222    }
2223
2224  for (i = info->outer; target_context != i->context; i = i->outer)
2225    continue;
2226
2227  /* The original user label may also be use for a normal goto, therefore
2228     we must create a new label that will actually receive the abnormal
2229     control transfer.  This new label will be marked LABEL_NONLOCAL; this
2230     mark will trigger proper behavior in the cfg, as well as cause the
2231     (hairy target-specific) non-local goto receiver code to be generated
2232     when we expand rtl.  Enter this association into var_map so that we
2233     can insert the new label into the IL during a second pass.  */
2234  tree *slot = &i->var_map->get_or_insert (label);
2235  if (*slot == NULL)
2236    {
2237      new_label = create_artificial_label (UNKNOWN_LOCATION);
2238      DECL_NONLOCAL (new_label) = 1;
2239      *slot = new_label;
2240    }
2241  else
2242    new_label = *slot;
2243
2244  /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
2245  field = get_nl_goto_field (i);
2246  x = get_frame_field (info, target_context, field, gsi);
2247  x = build_addr (x);
2248  x = gsi_gimplify_val (info, x, gsi);
2249  call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2250			    2, build_addr (new_label), x);
2251  gsi_replace (gsi, call, false);
2252
2253  /* We have handled all of STMT's operands, no need to keep going.  */
2254  *handled_ops_p = true;
2255  return NULL_TREE;
2256}
2257
2258
2259/* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2260   are referenced via nonlocal goto from a nested function.  The rewrite
2261   will involve installing a newly generated DECL_NONLOCAL label, and
2262   (potentially) a branch around the rtl gunk that is assumed to be
2263   attached to such a label.  */
2264
2265static tree
2266convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2267			  struct walk_stmt_info *wi)
2268{
2269  struct nesting_info *const info = (struct nesting_info *) wi->info;
2270  tree label, new_label;
2271  gimple_stmt_iterator tmp_gsi;
2272  glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2273
2274  if (!stmt)
2275    {
2276      *handled_ops_p = false;
2277      return NULL_TREE;
2278    }
2279
2280  label = gimple_label_label (stmt);
2281
2282  tree *slot = info->var_map->get (label);
2283  if (!slot)
2284    {
2285      *handled_ops_p = false;
2286      return NULL_TREE;
2287    }
2288
2289  /* If there's any possibility that the previous statement falls through,
2290     then we must branch around the new non-local label.  */
2291  tmp_gsi = wi->gsi;
2292  gsi_prev (&tmp_gsi);
2293  if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2294    {
2295      gimple *stmt = gimple_build_goto (label);
2296      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2297    }
2298
2299  new_label = (tree) *slot;
2300  stmt = gimple_build_label (new_label);
2301  gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2302
2303  *handled_ops_p = true;
2304  return NULL_TREE;
2305}
2306
2307
2308/* Called via walk_function+walk_stmt, rewrite all references to addresses
2309   of nested functions that require the use of trampolines.  The rewrite
2310   will involve a reference a trampoline generated for the occasion.  */
2311
2312static tree
2313convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2314{
2315  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2316  struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2317  tree t = *tp, decl, target_context, x, builtin;
2318  gcall *call;
2319
2320  *walk_subtrees = 0;
2321  switch (TREE_CODE (t))
2322    {
2323    case ADDR_EXPR:
2324      /* Build
2325	   T.1 = &CHAIN->tramp;
2326	   T.2 = __builtin_adjust_trampoline (T.1);
2327	   T.3 = (func_type)T.2;
2328      */
2329
2330      decl = TREE_OPERAND (t, 0);
2331      if (TREE_CODE (decl) != FUNCTION_DECL)
2332	break;
2333
2334      /* Only need to process nested functions.  */
2335      target_context = decl_function_context (decl);
2336      if (!target_context)
2337	break;
2338
2339      /* If the nested function doesn't use a static chain, then
2340	 it doesn't need a trampoline.  */
2341      if (!DECL_STATIC_CHAIN (decl))
2342	break;
2343
2344      /* If we don't want a trampoline, then don't build one.  */
2345      if (TREE_NO_TRAMPOLINE (t))
2346	break;
2347
2348      /* Lookup the immediate parent of the callee, as that's where
2349	 we need to insert the trampoline.  */
2350      for (i = info; i->context != target_context; i = i->outer)
2351	continue;
2352      x = lookup_tramp_for_decl (i, decl, INSERT);
2353
2354      /* Compute the address of the field holding the trampoline.  */
2355      x = get_frame_field (info, target_context, x, &wi->gsi);
2356      x = build_addr (x);
2357      x = gsi_gimplify_val (info, x, &wi->gsi);
2358
2359      /* Do machine-specific ugliness.  Normally this will involve
2360	 computing extra alignment, but it can really be anything.  */
2361      builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2362      call = gimple_build_call (builtin, 1, x);
2363      x = init_tmp_var_with_call (info, &wi->gsi, call);
2364
2365      /* Cast back to the proper function type.  */
2366      x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2367      x = init_tmp_var (info, x, &wi->gsi);
2368
2369      *tp = x;
2370      break;
2371
2372    default:
2373      if (!IS_TYPE_OR_DECL_P (t))
2374	*walk_subtrees = 1;
2375      break;
2376    }
2377
2378  return NULL_TREE;
2379}
2380
2381
2382/* Called via walk_function+walk_gimple_stmt, rewrite all references
2383   to addresses of nested functions that require the use of
2384   trampolines.  The rewrite will involve a reference a trampoline
2385   generated for the occasion.  */
2386
2387static tree
2388convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2389			      struct walk_stmt_info *wi)
2390{
2391  struct nesting_info *info = (struct nesting_info *) wi->info;
2392  gimple *stmt = gsi_stmt (*gsi);
2393
2394  switch (gimple_code (stmt))
2395    {
2396    case GIMPLE_CALL:
2397      {
2398	/* Only walk call arguments, lest we generate trampolines for
2399	   direct calls.  */
2400	unsigned long i, nargs = gimple_call_num_args (stmt);
2401	for (i = 0; i < nargs; i++)
2402	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2403		     wi, NULL);
2404	break;
2405      }
2406
2407    case GIMPLE_OMP_TARGET:
2408      if (!is_gimple_omp_offloaded (stmt))
2409	{
2410	  *handled_ops_p = false;
2411	  return NULL_TREE;
2412	}
2413      /* FALLTHRU */
2414    case GIMPLE_OMP_PARALLEL:
2415    case GIMPLE_OMP_TASK:
2416      {
2417	tree save_local_var_chain = info->new_local_var_chain;
2418        walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2419	info->new_local_var_chain = NULL;
2420	char save_static_chain_added = info->static_chain_added;
2421	info->static_chain_added = 0;
2422        walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2423		   info, gimple_omp_body_ptr (stmt));
2424	if (info->new_local_var_chain)
2425	  declare_vars (info->new_local_var_chain,
2426			gimple_seq_first_stmt (gimple_omp_body (stmt)),
2427			false);
2428	for (int i = 0; i < 2; i++)
2429	  {
2430	    tree c, decl;
2431	    if ((info->static_chain_added & (1 << i)) == 0)
2432	      continue;
2433	    decl = i ? get_chain_decl (info) : info->frame_decl;
2434	    /* Don't add CHAIN.* or FRAME.* twice.  */
2435	    for (c = gimple_omp_taskreg_clauses (stmt);
2436		 c;
2437		 c = OMP_CLAUSE_CHAIN (c))
2438	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2439		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2440		  && OMP_CLAUSE_DECL (c) == decl)
2441		break;
2442	    if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2443	      {
2444		c = build_omp_clause (gimple_location (stmt),
2445				      i ? OMP_CLAUSE_FIRSTPRIVATE
2446				      : OMP_CLAUSE_SHARED);
2447		OMP_CLAUSE_DECL (c) = decl;
2448		OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2449		gimple_omp_taskreg_set_clauses (stmt, c);
2450	      }
2451	    else if (c == NULL)
2452	      {
2453		c = build_omp_clause (gimple_location (stmt),
2454				      OMP_CLAUSE_MAP);
2455		OMP_CLAUSE_DECL (c) = decl;
2456		OMP_CLAUSE_SET_MAP_KIND (c,
2457					 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2458		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2459		OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2460		gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2461					       c);
2462	      }
2463	  }
2464	info->new_local_var_chain = save_local_var_chain;
2465	info->static_chain_added |= save_static_chain_added;
2466      }
2467      break;
2468
2469    default:
2470      *handled_ops_p = false;
2471      return NULL_TREE;
2472    }
2473
2474  *handled_ops_p = true;
2475  return NULL_TREE;
2476}
2477
2478
2479
2480/* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2481   that reference nested functions to make sure that the static chain
2482   is set up properly for the call.  */
2483
2484static tree
2485convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2486                     struct walk_stmt_info *wi)
2487{
2488  struct nesting_info *const info = (struct nesting_info *) wi->info;
2489  tree decl, target_context;
2490  char save_static_chain_added;
2491  int i;
2492  gimple *stmt = gsi_stmt (*gsi);
2493
2494  switch (gimple_code (stmt))
2495    {
2496    case GIMPLE_CALL:
2497      if (gimple_call_chain (stmt))
2498	break;
2499      decl = gimple_call_fndecl (stmt);
2500      if (!decl)
2501	break;
2502      target_context = decl_function_context (decl);
2503      if (target_context && DECL_STATIC_CHAIN (decl))
2504	{
2505	  gimple_call_set_chain (as_a <gcall *> (stmt),
2506				 get_static_chain (info, target_context,
2507						   &wi->gsi));
2508	  info->static_chain_added |= (1 << (info->context != target_context));
2509	}
2510      break;
2511
2512    case GIMPLE_OMP_PARALLEL:
2513    case GIMPLE_OMP_TASK:
2514      save_static_chain_added = info->static_chain_added;
2515      info->static_chain_added = 0;
2516      walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2517      for (i = 0; i < 2; i++)
2518	{
2519	  tree c, decl;
2520	  if ((info->static_chain_added & (1 << i)) == 0)
2521	    continue;
2522	  decl = i ? get_chain_decl (info) : info->frame_decl;
2523	  /* Don't add CHAIN.* or FRAME.* twice.  */
2524	  for (c = gimple_omp_taskreg_clauses (stmt);
2525	       c;
2526	       c = OMP_CLAUSE_CHAIN (c))
2527	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2528		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2529		&& OMP_CLAUSE_DECL (c) == decl)
2530	      break;
2531	  if (c == NULL)
2532	    {
2533	      c = build_omp_clause (gimple_location (stmt),
2534				    i ? OMP_CLAUSE_FIRSTPRIVATE
2535				    : OMP_CLAUSE_SHARED);
2536	      OMP_CLAUSE_DECL (c) = decl;
2537	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2538	      gimple_omp_taskreg_set_clauses (stmt, c);
2539	    }
2540	}
2541      info->static_chain_added |= save_static_chain_added;
2542      break;
2543
2544    case GIMPLE_OMP_TARGET:
2545      if (!is_gimple_omp_offloaded (stmt))
2546	{
2547	  walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2548	  break;
2549	}
2550      save_static_chain_added = info->static_chain_added;
2551      info->static_chain_added = 0;
2552      walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2553      for (i = 0; i < 2; i++)
2554	{
2555	  tree c, decl;
2556	  if ((info->static_chain_added & (1 << i)) == 0)
2557	    continue;
2558	  decl = i ? get_chain_decl (info) : info->frame_decl;
2559	  /* Don't add CHAIN.* or FRAME.* twice.  */
2560	  for (c = gimple_omp_target_clauses (stmt);
2561	       c;
2562	       c = OMP_CLAUSE_CHAIN (c))
2563	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2564		&& OMP_CLAUSE_DECL (c) == decl)
2565	      break;
2566	  if (c == NULL)
2567	    {
2568	      c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2569	      OMP_CLAUSE_DECL (c) = decl;
2570	      OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2571	      OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2572	      OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2573	      gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2574					     c);
2575	    }
2576	}
2577      info->static_chain_added |= save_static_chain_added;
2578      break;
2579
2580    case GIMPLE_OMP_FOR:
2581      walk_body (convert_gimple_call, NULL, info,
2582	  	 gimple_omp_for_pre_body_ptr (stmt));
2583      /* FALLTHRU */
2584    case GIMPLE_OMP_SECTIONS:
2585    case GIMPLE_OMP_SECTION:
2586    case GIMPLE_OMP_SINGLE:
2587    case GIMPLE_OMP_TEAMS:
2588    case GIMPLE_OMP_MASTER:
2589    case GIMPLE_OMP_TASKGROUP:
2590    case GIMPLE_OMP_ORDERED:
2591    case GIMPLE_OMP_CRITICAL:
2592      walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2593      break;
2594
2595    default:
2596      /* Keep looking for other operands.  */
2597      *handled_ops_p = false;
2598      return NULL_TREE;
2599    }
2600
2601  *handled_ops_p = true;
2602  return NULL_TREE;
2603}
2604
2605/* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2606   call expressions.  At the same time, determine if a nested function
2607   actually uses its static chain; if not, remember that.  */
2608
2609static void
2610convert_all_function_calls (struct nesting_info *root)
2611{
2612  unsigned int chain_count = 0, old_chain_count, iter_count;
2613  struct nesting_info *n;
2614
2615  /* First, optimistically clear static_chain for all decls that haven't
2616     used the static chain already for variable access.  But always create
2617     it if not optimizing.  This makes it possible to reconstruct the static
2618     nesting tree at run time and thus to resolve up-level references from
2619     within the debugger.  */
2620  FOR_EACH_NEST_INFO (n, root)
2621    {
2622      tree decl = n->context;
2623      if (!optimize)
2624	{
2625	  if (n->inner)
2626	    (void) get_frame_type (n);
2627	  if (n->outer)
2628	    (void) get_chain_decl (n);
2629	}
2630      else if (!n->outer || (!n->chain_decl && !n->chain_field))
2631	{
2632	  DECL_STATIC_CHAIN (decl) = 0;
2633	  if (dump_file && (dump_flags & TDF_DETAILS))
2634	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2635		     lang_hooks.decl_printable_name (decl, 2));
2636	}
2637      else
2638	DECL_STATIC_CHAIN (decl) = 1;
2639      chain_count += DECL_STATIC_CHAIN (decl);
2640    }
2641
2642  /* Walk the functions and perform transformations.  Note that these
2643     transformations can induce new uses of the static chain, which in turn
2644     require re-examining all users of the decl.  */
2645  /* ??? It would make sense to try to use the call graph to speed this up,
2646     but the call graph hasn't really been built yet.  Even if it did, we
2647     would still need to iterate in this loop since address-of references
2648     wouldn't show up in the callgraph anyway.  */
2649  iter_count = 0;
2650  do
2651    {
2652      old_chain_count = chain_count;
2653      chain_count = 0;
2654      iter_count++;
2655
2656      if (dump_file && (dump_flags & TDF_DETAILS))
2657	fputc ('\n', dump_file);
2658
2659      FOR_EACH_NEST_INFO (n, root)
2660	{
2661	  tree decl = n->context;
2662	  walk_function (convert_tramp_reference_stmt,
2663			 convert_tramp_reference_op, n);
2664	  walk_function (convert_gimple_call, NULL, n);
2665	  chain_count += DECL_STATIC_CHAIN (decl);
2666	}
2667    }
2668  while (chain_count != old_chain_count);
2669
2670  if (dump_file && (dump_flags & TDF_DETAILS))
2671    fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2672	     iter_count);
2673}
2674
2675struct nesting_copy_body_data
2676{
2677  copy_body_data cb;
2678  struct nesting_info *root;
2679};
2680
2681/* A helper subroutine for debug_var_chain type remapping.  */
2682
2683static tree
2684nesting_copy_decl (tree decl, copy_body_data *id)
2685{
2686  struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2687  tree *slot = nid->root->var_map->get (decl);
2688
2689  if (slot)
2690    return (tree) *slot;
2691
2692  if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2693    {
2694      tree new_decl = copy_decl_no_change (decl, id);
2695      DECL_ORIGINAL_TYPE (new_decl)
2696	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
2697      return new_decl;
2698    }
2699
2700  if (TREE_CODE (decl) == VAR_DECL
2701      || TREE_CODE (decl) == PARM_DECL
2702      || TREE_CODE (decl) == RESULT_DECL)
2703    return decl;
2704
2705  return copy_decl_no_change (decl, id);
2706}
2707
2708/* A helper function for remap_vla_decls.  See if *TP contains
2709   some remapped variables.  */
2710
2711static tree
2712contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2713{
2714  struct nesting_info *root = (struct nesting_info *) data;
2715  tree t = *tp;
2716
2717  if (DECL_P (t))
2718    {
2719      *walk_subtrees = 0;
2720      tree *slot = root->var_map->get (t);
2721
2722      if (slot)
2723	return *slot;
2724    }
2725  return NULL;
2726}
2727
2728/* Remap VLA decls in BLOCK and subblocks if remapped variables are
2729   involved.  */
2730
2731static void
2732remap_vla_decls (tree block, struct nesting_info *root)
2733{
2734  tree var, subblock, val, type;
2735  struct nesting_copy_body_data id;
2736
2737  for (subblock = BLOCK_SUBBLOCKS (block);
2738       subblock;
2739       subblock = BLOCK_CHAIN (subblock))
2740    remap_vla_decls (subblock, root);
2741
2742  for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2743    if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2744      {
2745	val = DECL_VALUE_EXPR (var);
2746	type = TREE_TYPE (var);
2747
2748	if (!(TREE_CODE (val) == INDIRECT_REF
2749	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2750	      && variably_modified_type_p (type, NULL)))
2751	  continue;
2752
2753	if (root->var_map->get (TREE_OPERAND (val, 0))
2754	    || walk_tree (&type, contains_remapped_vars, root, NULL))
2755	  break;
2756      }
2757
2758  if (var == NULL_TREE)
2759    return;
2760
2761  memset (&id, 0, sizeof (id));
2762  id.cb.copy_decl = nesting_copy_decl;
2763  id.cb.decl_map = new hash_map<tree, tree>;
2764  id.root = root;
2765
2766  for (; var; var = DECL_CHAIN (var))
2767    if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2768      {
2769	struct nesting_info *i;
2770	tree newt, context;
2771
2772	val = DECL_VALUE_EXPR (var);
2773	type = TREE_TYPE (var);
2774
2775	if (!(TREE_CODE (val) == INDIRECT_REF
2776	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2777	      && variably_modified_type_p (type, NULL)))
2778	  continue;
2779
2780	tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2781	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2782	  continue;
2783
2784	context = decl_function_context (var);
2785	for (i = root; i; i = i->outer)
2786	  if (i->context == context)
2787	    break;
2788
2789	if (i == NULL)
2790	  continue;
2791
2792	/* Fully expand value expressions.  This avoids having debug variables
2793	   only referenced from them and that can be swept during GC.  */
2794        if (slot)
2795	  {
2796	    tree t = (tree) *slot;
2797	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2798	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2799	  }
2800
2801	id.cb.src_fn = i->context;
2802	id.cb.dst_fn = i->context;
2803	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2804
2805	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2806	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2807	  {
2808	    newt = TREE_TYPE (newt);
2809	    type = TREE_TYPE (type);
2810	  }
2811	if (TYPE_NAME (newt)
2812	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2813	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2814	    && newt != type
2815	    && TYPE_NAME (newt) == TYPE_NAME (type))
2816	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2817
2818	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2819	if (val != DECL_VALUE_EXPR (var))
2820	  SET_DECL_VALUE_EXPR (var, val);
2821      }
2822
2823  delete id.cb.decl_map;
2824}
2825
2826/* Fold the MEM_REF *E.  */
2827bool
2828fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2829{
2830  tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2831  *ref_p = fold (*ref_p);
2832  return true;
2833}
2834
2835/* Do "everything else" to clean up or complete state collected by the various
2836   walking passes -- create a field to hold the frame base address, lay out the
2837   types and decls, generate code to initialize the frame decl, store critical
2838   expressions in the struct function for rtl to find.  */
2839
2840static void
2841finalize_nesting_tree_1 (struct nesting_info *root)
2842{
2843  gimple_seq stmt_list;
2844  gimple *stmt;
2845  tree context = root->context;
2846  struct function *sf;
2847
2848  stmt_list = NULL;
2849
2850  /* If we created a non-local frame type or decl, we need to lay them
2851     out at this time.  */
2852  if (root->frame_type)
2853    {
2854      /* Debugging information needs to compute the frame base address of the
2855	 parent frame out of the static chain from the nested frame.
2856
2857	 The static chain is the address of the FRAME record, so one could
2858	 imagine it would be possible to compute the frame base address just
2859	 adding a constant offset to this address.  Unfortunately, this is not
2860	 possible: if the FRAME object has alignment constraints that are
2861	 stronger than the stack, then the offset between the frame base and
2862	 the FRAME object will be dynamic.
2863
2864	 What we do instead is to append a field to the FRAME object that holds
2865	 the frame base address: then debug info just has to fetch this
2866	 field.  */
2867
2868      /* Debugging information will refer to the CFA as the frame base
2869	 address: we will do the same here.  */
2870      const tree frame_addr_fndecl
2871        = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
2872
2873      /* Create a field in the FRAME record to hold the frame base address for
2874	 this stack frame.  Since it will be used only by the debugger, put it
2875	 at the end of the record in order not to shift all other offsets.  */
2876      tree fb_decl = make_node (FIELD_DECL);
2877
2878      DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
2879      TREE_TYPE (fb_decl) = ptr_type_node;
2880      TREE_ADDRESSABLE (fb_decl) = 1;
2881      DECL_CONTEXT (fb_decl) = root->frame_type;
2882      TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
2883						fb_decl);
2884
2885      /* In some cases the frame type will trigger the -Wpadded warning.
2886	 This is not helpful; suppress it. */
2887      int save_warn_padded = warn_padded;
2888      warn_padded = 0;
2889      layout_type (root->frame_type);
2890      warn_padded = save_warn_padded;
2891      layout_decl (root->frame_decl, 0);
2892
2893      /* Initialize the frame base address field.  If the builtin we need is
2894	 not available, set it to NULL so that debugging information does not
2895	 reference junk.  */
2896      tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
2897			    root->frame_decl, fb_decl, NULL_TREE);
2898      tree fb_tmp;
2899
2900      if (frame_addr_fndecl != NULL_TREE)
2901	{
2902	  gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
2903						integer_zero_node);
2904	  gimple_stmt_iterator gsi = gsi_last (stmt_list);
2905
2906	  fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
2907	}
2908      else
2909	fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
2910      gimple_seq_add_stmt (&stmt_list,
2911			   gimple_build_assign (fb_ref, fb_tmp));
2912
2913      /* Remove root->frame_decl from root->new_local_var_chain, so
2914	 that we can declare it also in the lexical blocks, which
2915	 helps ensure virtual regs that end up appearing in its RTL
2916	 expression get substituted in instantiate_virtual_regs().  */
2917      tree *adjust;
2918      for (adjust = &root->new_local_var_chain;
2919	   *adjust != root->frame_decl;
2920	   adjust = &DECL_CHAIN (*adjust))
2921	gcc_assert (DECL_CHAIN (*adjust));
2922      *adjust = DECL_CHAIN (*adjust);
2923
2924      DECL_CHAIN (root->frame_decl) = NULL_TREE;
2925      declare_vars (root->frame_decl,
2926		    gimple_seq_first_stmt (gimple_body (context)), true);
2927    }
2928
2929  /* If any parameters were referenced non-locally, then we need to
2930     insert a copy.  Likewise, if any variables were referenced by
2931     pointer, we need to initialize the address.  */
2932  if (root->any_parm_remapped)
2933    {
2934      tree p;
2935      for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2936	{
2937	  tree field, x, y;
2938
2939	  field = lookup_field_for_decl (root, p, NO_INSERT);
2940	  if (!field)
2941	    continue;
2942
2943	  if (use_pointer_in_frame (p))
2944	    x = build_addr (p);
2945	  else
2946	    x = p;
2947
2948	  /* If the assignment is from a non-register the stmt is
2949	     not valid gimple.  Make it so by using a temporary instead.  */
2950	  if (!is_gimple_reg (x)
2951	      && is_gimple_reg_type (TREE_TYPE (x)))
2952	    {
2953	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
2954	      x = init_tmp_var (root, x, &gsi);
2955	    }
2956
2957	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
2958		      root->frame_decl, field, NULL_TREE);
2959	  stmt = gimple_build_assign (y, x);
2960	  gimple_seq_add_stmt (&stmt_list, stmt);
2961	}
2962    }
2963
2964  /* If a chain_field was created, then it needs to be initialized
2965     from chain_decl.  */
2966  if (root->chain_field)
2967    {
2968      tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2969		       root->frame_decl, root->chain_field, NULL_TREE);
2970      stmt = gimple_build_assign (x, get_chain_decl (root));
2971      gimple_seq_add_stmt (&stmt_list, stmt);
2972    }
2973
2974  /* If trampolines were created, then we need to initialize them.  */
2975  if (root->any_tramp_created)
2976    {
2977      struct nesting_info *i;
2978      for (i = root->inner; i ; i = i->next)
2979	{
2980	  tree arg1, arg2, arg3, x, field;
2981
2982	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2983	  if (!field)
2984	    continue;
2985
2986	  gcc_assert (DECL_STATIC_CHAIN (i->context));
2987	  arg3 = build_addr (root->frame_decl);
2988
2989	  arg2 = build_addr (i->context);
2990
2991	  x = build3 (COMPONENT_REF, TREE_TYPE (field),
2992		      root->frame_decl, field, NULL_TREE);
2993	  arg1 = build_addr (x);
2994
2995	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2996	  stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2997	  gimple_seq_add_stmt (&stmt_list, stmt);
2998	}
2999    }
3000
3001  /* If we created initialization statements, insert them.  */
3002  if (stmt_list)
3003    {
3004      gbind *bind;
3005      annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3006      bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3007      gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3008      gimple_bind_set_body (bind, stmt_list);
3009    }
3010
3011  /* If a chain_decl was created, then it needs to be registered with
3012     struct function so that it gets initialized from the static chain
3013     register at the beginning of the function.  */
3014  sf = DECL_STRUCT_FUNCTION (root->context);
3015  sf->static_chain_decl = root->chain_decl;
3016
3017  /* Similarly for the non-local goto save area.  */
3018  if (root->nl_goto_field)
3019    {
3020      sf->nonlocal_goto_save_area
3021	= get_frame_field (root, context, root->nl_goto_field, NULL);
3022      sf->has_nonlocal_label = 1;
3023    }
3024
3025  /* Make sure all new local variables get inserted into the
3026     proper BIND_EXPR.  */
3027  if (root->new_local_var_chain)
3028    declare_vars (root->new_local_var_chain,
3029		  gimple_seq_first_stmt (gimple_body (root->context)),
3030		  false);
3031
3032  if (root->debug_var_chain)
3033    {
3034      tree debug_var;
3035      gbind *scope;
3036
3037      remap_vla_decls (DECL_INITIAL (root->context), root);
3038
3039      for (debug_var = root->debug_var_chain; debug_var;
3040	   debug_var = DECL_CHAIN (debug_var))
3041	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3042	  break;
3043
3044      /* If there are any debug decls with variable length types,
3045	 remap those types using other debug_var_chain variables.  */
3046      if (debug_var)
3047	{
3048	  struct nesting_copy_body_data id;
3049
3050	  memset (&id, 0, sizeof (id));
3051	  id.cb.copy_decl = nesting_copy_decl;
3052	  id.cb.decl_map = new hash_map<tree, tree>;
3053	  id.root = root;
3054
3055	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3056	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3057	      {
3058		tree type = TREE_TYPE (debug_var);
3059		tree newt, t = type;
3060		struct nesting_info *i;
3061
3062		for (i = root; i; i = i->outer)
3063		  if (variably_modified_type_p (type, i->context))
3064		    break;
3065
3066		if (i == NULL)
3067		  continue;
3068
3069		id.cb.src_fn = i->context;
3070		id.cb.dst_fn = i->context;
3071		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3072
3073		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3074		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3075		  {
3076		    newt = TREE_TYPE (newt);
3077		    t = TREE_TYPE (t);
3078		  }
3079		if (TYPE_NAME (newt)
3080		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3081		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3082		    && newt != t
3083		    && TYPE_NAME (newt) == TYPE_NAME (t))
3084		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3085	      }
3086
3087	  delete id.cb.decl_map;
3088	}
3089
3090      scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3091      if (gimple_bind_block (scope))
3092	declare_vars (root->debug_var_chain, scope, true);
3093      else
3094	BLOCK_VARS (DECL_INITIAL (root->context))
3095	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3096		     root->debug_var_chain);
3097    }
3098
3099  /* Fold the rewritten MEM_REF trees.  */
3100  root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3101
3102  /* Dump the translated tree function.  */
3103  if (dump_file)
3104    {
3105      fputs ("\n\n", dump_file);
3106      dump_function_to_file (root->context, dump_file, dump_flags);
3107    }
3108}
3109
3110static void
3111finalize_nesting_tree (struct nesting_info *root)
3112{
3113  struct nesting_info *n;
3114  FOR_EACH_NEST_INFO (n, root)
3115    finalize_nesting_tree_1 (n);
3116}
3117
3118/* Unnest the nodes and pass them to cgraph.  */
3119
3120static void
3121unnest_nesting_tree_1 (struct nesting_info *root)
3122{
3123  struct cgraph_node *node = cgraph_node::get (root->context);
3124
3125  /* For nested functions update the cgraph to reflect unnesting.
3126     We also delay finalizing of these functions up to this point.  */
3127  if (node->origin)
3128    {
3129       node->unnest ();
3130       cgraph_node::finalize_function (root->context, true);
3131    }
3132}
3133
3134static void
3135unnest_nesting_tree (struct nesting_info *root)
3136{
3137  struct nesting_info *n;
3138  FOR_EACH_NEST_INFO (n, root)
3139    unnest_nesting_tree_1 (n);
3140}
3141
3142/* Free the data structures allocated during this pass.  */
3143
3144static void
3145free_nesting_tree (struct nesting_info *root)
3146{
3147  struct nesting_info *node, *next;
3148
3149  node = iter_nestinfo_start (root);
3150  do
3151    {
3152      next = iter_nestinfo_next (node);
3153      delete node->var_map;
3154      delete node->field_map;
3155      delete node->mem_refs;
3156      free (node);
3157      node = next;
3158    }
3159  while (node);
3160}
3161
3162/* Gimplify a function and all its nested functions.  */
3163static void
3164gimplify_all_functions (struct cgraph_node *root)
3165{
3166  struct cgraph_node *iter;
3167  if (!gimple_body (root->decl))
3168    gimplify_function_tree (root->decl);
3169  for (iter = root->nested; iter; iter = iter->next_nested)
3170    gimplify_all_functions (iter);
3171}
3172
3173/* Main entry point for this pass.  Process FNDECL and all of its nested
3174   subroutines and turn them into something less tightly bound.  */
3175
3176void
3177lower_nested_functions (tree fndecl)
3178{
3179  struct cgraph_node *cgn;
3180  struct nesting_info *root;
3181
3182  /* If there are no nested functions, there's nothing to do.  */
3183  cgn = cgraph_node::get (fndecl);
3184  if (!cgn->nested)
3185    return;
3186
3187  gimplify_all_functions (cgn);
3188
3189  dump_file = dump_begin (TDI_nested, &dump_flags);
3190  if (dump_file)
3191    fprintf (dump_file, "\n;; Function %s\n\n",
3192	     lang_hooks.decl_printable_name (fndecl, 2));
3193
3194  bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3195  root = create_nesting_tree (cgn);
3196
3197  walk_all_functions (convert_nonlocal_reference_stmt,
3198                      convert_nonlocal_reference_op,
3199		      root);
3200  walk_all_functions (convert_local_reference_stmt,
3201                      convert_local_reference_op,
3202		      root);
3203  walk_all_functions (convert_nl_goto_reference, NULL, root);
3204  walk_all_functions (convert_nl_goto_receiver, NULL, root);
3205
3206  convert_all_function_calls (root);
3207  finalize_nesting_tree (root);
3208  unnest_nesting_tree (root);
3209
3210  free_nesting_tree (root);
3211  bitmap_obstack_release (&nesting_info_bitmap_obstack);
3212
3213  if (dump_file)
3214    {
3215      dump_end (TDI_nested, dump_file);
3216      dump_file = NULL;
3217    }
3218}
3219
3220#include "gt-tree-nested.h"
3221