1/* Nested function decomposition for GIMPLE.
2   Copyright (C) 2004-2020 Free Software Foundation, Inc.
3
4   This file is part of GCC.
5
6   GCC is free software; you can redistribute it and/or modify
7   it under the terms of the GNU General Public License as published by
8   the Free Software Foundation; either version 3, or (at your option)
9   any later version.
10
11   GCC is distributed in the hope that it will be useful,
12   but WITHOUT ANY WARRANTY; without even the implied warranty of
13   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14   GNU General Public License for more details.
15
16   You should have received a copy of the GNU General Public License
17   along with GCC; see the file COPYING3.  If not see
18   <http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "memmodel.h"
29#include "tm_p.h"
30#include "stringpool.h"
31#include "cgraph.h"
32#include "fold-const.h"
33#include "stor-layout.h"
34#include "dumpfile.h"
35#include "tree-inline.h"
36#include "gimplify.h"
37#include "gimple-iterator.h"
38#include "gimple-walk.h"
39#include "tree-cfg.h"
40#include "explow.h"
41#include "langhooks.h"
42#include "gimple-low.h"
43#include "gomp-constants.h"
44#include "diagnostic.h"
45
46
47/* The object of this pass is to lower the representation of a set of nested
48   functions in order to expose all of the gory details of the various
49   nonlocal references.  We want to do this sooner rather than later, in
50   order to give us more freedom in emitting all of the functions in question.
51
52   Back in olden times, when gcc was young, we developed an insanely
53   complicated scheme whereby variables which were referenced nonlocally
54   were forced to live in the stack of the declaring function, and then
55   the nested functions magically discovered where these variables were
56   placed.  In order for this scheme to function properly, it required
57   that the outer function be partially expanded, then we switch to
58   compiling the inner function, and once done with those we switch back
59   to compiling the outer function.  Such delicate ordering requirements
60   makes it difficult to do whole translation unit optimizations
61   involving such functions.
62
63   The implementation here is much more direct.  Everything that can be
64   referenced by an inner function is a member of an explicitly created
65   structure herein called the "nonlocal frame struct".  The incoming
66   static chain for a nested function is a pointer to this struct in
67   the parent.  In this way, we settle on known offsets from a known
68   base, and so are decoupled from the logic that places objects in the
69   function's stack frame.  More importantly, we don't have to wait for
70   that to happen -- since the compilation of the inner function is no
71   longer tied to a real stack frame, the nonlocal frame struct can be
72   allocated anywhere.  Which means that the outer function is now
73   inlinable.
74
75   Theory of operation here is very simple.  Iterate over all the
76   statements in all the functions (depth first) several times,
77   allocating structures and fields on demand.  In general we want to
78   examine inner functions first, so that we can avoid making changes
79   to outer functions which are unnecessary.
80
81   The order of the passes matters a bit, in that later passes will be
82   skipped if it is discovered that the functions don't actually interact
83   at all.  That is, they're nested in the lexical sense but could have
84   been written as independent functions without change.  */
85
86
87struct nesting_info
88{
89  struct nesting_info *outer;
90  struct nesting_info *inner;
91  struct nesting_info *next;
92
93  hash_map<tree, tree> *field_map;
94  hash_map<tree, tree> *var_map;
95  hash_set<tree *> *mem_refs;
96  bitmap suppress_expansion;
97
98  tree context;
99  tree new_local_var_chain;
100  tree debug_var_chain;
101  tree frame_type;
102  tree frame_decl;
103  tree chain_field;
104  tree chain_decl;
105  tree nl_goto_field;
106
107  bool thunk_p;
108  bool any_parm_remapped;
109  bool any_tramp_created;
110  bool any_descr_created;
111  char static_chain_added;
112};
113
114
115/* Iterate over the nesting tree, starting with ROOT, depth first.  */
116
117static inline struct nesting_info *
118iter_nestinfo_start (struct nesting_info *root)
119{
120  while (root->inner)
121    root = root->inner;
122  return root;
123}
124
125static inline struct nesting_info *
126iter_nestinfo_next (struct nesting_info *node)
127{
128  if (node->next)
129    return iter_nestinfo_start (node->next);
130  return node->outer;
131}
132
133#define FOR_EACH_NEST_INFO(I, ROOT) \
134  for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
135
136/* Obstack used for the bitmaps in the struct above.  */
137static struct bitmap_obstack nesting_info_bitmap_obstack;
138
139
140/* We're working in so many different function contexts simultaneously,
141   that create_tmp_var is dangerous.  Prevent mishap.  */
142#define create_tmp_var cant_use_create_tmp_var_here_dummy
143
144/* Like create_tmp_var, except record the variable for registration at
145   the given nesting level.  */
146
147static tree
148create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
149{
150  tree tmp_var;
151
152  /* If the type is of variable size or a type which must be created by the
153     frontend, something is wrong.  Note that we explicitly allow
154     incomplete types here, since we create them ourselves here.  */
155  gcc_assert (!TREE_ADDRESSABLE (type));
156  gcc_assert (!TYPE_SIZE_UNIT (type)
157	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
158
159  tmp_var = create_tmp_var_raw (type, prefix);
160  DECL_CONTEXT (tmp_var) = info->context;
161  DECL_CHAIN (tmp_var) = info->new_local_var_chain;
162  DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
163  if (TREE_CODE (type) == COMPLEX_TYPE
164      || TREE_CODE (type) == VECTOR_TYPE)
165    DECL_GIMPLE_REG_P (tmp_var) = 1;
166
167  info->new_local_var_chain = tmp_var;
168
169  return tmp_var;
170}
171
172/* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result.  */
173
174static tree
175build_simple_mem_ref_notrap (tree ptr)
176{
177  tree t = build_simple_mem_ref (ptr);
178  TREE_THIS_NOTRAP (t) = 1;
179  return t;
180}
181
182/* Take the address of EXP to be used within function CONTEXT.
183   Mark it for addressability as necessary.  */
184
185tree
186build_addr (tree exp)
187{
188  mark_addressable (exp);
189  return build_fold_addr_expr (exp);
190}
191
192/* Insert FIELD into TYPE, sorted by alignment requirements.  */
193
194void
195insert_field_into_struct (tree type, tree field)
196{
197  tree *p;
198
199  DECL_CONTEXT (field) = type;
200
201  for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
202    if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
203      break;
204
205  DECL_CHAIN (field) = *p;
206  *p = field;
207
208  /* Set correct alignment for frame struct type.  */
209  if (TYPE_ALIGN (type) < DECL_ALIGN (field))
210    SET_TYPE_ALIGN (type, DECL_ALIGN (field));
211}
212
213/* Build or return the RECORD_TYPE that describes the frame state that is
214   shared between INFO->CONTEXT and its nested functions.  This record will
215   not be complete until finalize_nesting_tree; up until that point we'll
216   be adding fields as necessary.
217
218   We also build the DECL that represents this frame in the function.  */
219
220static tree
221get_frame_type (struct nesting_info *info)
222{
223  tree type = info->frame_type;
224  if (!type)
225    {
226      char *name;
227
228      type = make_node (RECORD_TYPE);
229
230      name = concat ("FRAME.",
231		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
232		     NULL);
233      TYPE_NAME (type) = get_identifier (name);
234      free (name);
235
236      info->frame_type = type;
237
238      /* Do not put info->frame_decl on info->new_local_var_chain,
239	 so that we can declare it in the lexical blocks, which
240	 makes sure virtual regs that end up appearing in its RTL
241	 expression get substituted in instantiate_virtual_regs.  */
242      info->frame_decl = create_tmp_var_raw (type, "FRAME");
243      DECL_CONTEXT (info->frame_decl) = info->context;
244      DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
245      DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
246
247      /* ??? Always make it addressable for now, since it is meant to
248	 be pointed to by the static chain pointer.  This pessimizes
249	 when it turns out that no static chains are needed because
250	 the nested functions referencing non-local variables are not
251	 reachable, but the true pessimization is to create the non-
252	 local frame structure in the first place.  */
253      TREE_ADDRESSABLE (info->frame_decl) = 1;
254    }
255
256  return type;
257}
258
259/* Return true if DECL should be referenced by pointer in the non-local frame
260   structure.  */
261
262static bool
263use_pointer_in_frame (tree decl)
264{
265  if (TREE_CODE (decl) == PARM_DECL)
266    {
267      /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
268	 sized DECLs, and inefficient to copy large aggregates.  Don't bother
269	 moving anything but scalar parameters.  */
270      return AGGREGATE_TYPE_P (TREE_TYPE (decl));
271    }
272  else
273    {
274      /* Variable-sized DECLs can only come from OMP clauses at this point
275	 since the gimplifier has already turned the regular variables into
276	 pointers.  Do the same as the gimplifier.  */
277      return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
278    }
279}
280
281/* Given DECL, a non-locally accessed variable, find or create a field
282   in the non-local frame structure for the given nesting context.  */
283
284static tree
285lookup_field_for_decl (struct nesting_info *info, tree decl,
286		       enum insert_option insert)
287{
288  gcc_checking_assert (decl_function_context (decl) == info->context);
289
290  if (insert == NO_INSERT)
291    {
292      tree *slot = info->field_map->get (decl);
293      return slot ? *slot : NULL_TREE;
294    }
295
296  tree *slot = &info->field_map->get_or_insert (decl);
297  if (!*slot)
298    {
299      tree type = get_frame_type (info);
300      tree field = make_node (FIELD_DECL);
301      DECL_NAME (field) = DECL_NAME (decl);
302
303      if (use_pointer_in_frame (decl))
304	{
305	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
306	  SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
307	  DECL_NONADDRESSABLE_P (field) = 1;
308	}
309      else
310	{
311          TREE_TYPE (field) = TREE_TYPE (decl);
312          DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
313          SET_DECL_ALIGN (field, DECL_ALIGN (decl));
314          DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
315          TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
316          DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
317          TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
318
319	  /* Declare the transformation and adjust the original DECL.  For a
320	     variable or for a parameter when not optimizing, we make it point
321	     to the field in the frame directly.  For a parameter, we don't do
322	     it when optimizing because the variable tracking pass will already
323	     do the job,  */
324	  if (VAR_P (decl) || !optimize)
325	    {
326	      tree x
327		= build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
328			  field, NULL_TREE);
329
330	      /* If the next declaration is a PARM_DECL pointing to the DECL,
331		 we need to adjust its VALUE_EXPR directly, since chains of
332		 VALUE_EXPRs run afoul of garbage collection.  This occurs
333		 in Ada for Out parameters that aren't copied in.  */
334	      tree next = DECL_CHAIN (decl);
335	      if (next
336		  && TREE_CODE (next) == PARM_DECL
337		  && DECL_HAS_VALUE_EXPR_P (next)
338		  && DECL_VALUE_EXPR (next) == decl)
339		SET_DECL_VALUE_EXPR (next, x);
340
341	      SET_DECL_VALUE_EXPR (decl, x);
342	      DECL_HAS_VALUE_EXPR_P (decl) = 1;
343	    }
344	}
345
346      insert_field_into_struct (type, field);
347      *slot = field;
348
349      if (TREE_CODE (decl) == PARM_DECL)
350	info->any_parm_remapped = true;
351    }
352
353  return *slot;
354}
355
356/* Build or return the variable that holds the static chain within
357   INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
358
359static tree
360get_chain_decl (struct nesting_info *info)
361{
362  tree decl = info->chain_decl;
363
364  if (!decl)
365    {
366      tree type;
367
368      type = get_frame_type (info->outer);
369      type = build_pointer_type (type);
370
371      /* Note that this variable is *not* entered into any BIND_EXPR;
372	 the construction of this variable is handled specially in
373	 expand_function_start and initialize_inlined_parameters.
374	 Note also that it's represented as a parameter.  This is more
375	 close to the truth, since the initial value does come from
376	 the caller.  */
377      decl = build_decl (DECL_SOURCE_LOCATION (info->context),
378			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
379      DECL_ARTIFICIAL (decl) = 1;
380      DECL_IGNORED_P (decl) = 1;
381      TREE_USED (decl) = 1;
382      DECL_CONTEXT (decl) = info->context;
383      DECL_ARG_TYPE (decl) = type;
384
385      /* Tell tree-inline.c that we never write to this variable, so
386	 it can copy-prop the replacement value immediately.  */
387      TREE_READONLY (decl) = 1;
388
389      info->chain_decl = decl;
390
391      if (dump_file
392          && (dump_flags & TDF_DETAILS)
393	  && !DECL_STATIC_CHAIN (info->context))
394	fprintf (dump_file, "Setting static-chain for %s\n",
395		 lang_hooks.decl_printable_name (info->context, 2));
396
397      DECL_STATIC_CHAIN (info->context) = 1;
398    }
399  return decl;
400}
401
402/* Build or return the field within the non-local frame state that holds
403   the static chain for INFO->CONTEXT.  This is the way to walk back up
404   multiple nesting levels.  */
405
406static tree
407get_chain_field (struct nesting_info *info)
408{
409  tree field = info->chain_field;
410
411  if (!field)
412    {
413      tree type = build_pointer_type (get_frame_type (info->outer));
414
415      field = make_node (FIELD_DECL);
416      DECL_NAME (field) = get_identifier ("__chain");
417      TREE_TYPE (field) = type;
418      SET_DECL_ALIGN (field, TYPE_ALIGN (type));
419      DECL_NONADDRESSABLE_P (field) = 1;
420
421      insert_field_into_struct (get_frame_type (info), field);
422
423      info->chain_field = field;
424
425      if (dump_file
426          && (dump_flags & TDF_DETAILS)
427	  && !DECL_STATIC_CHAIN (info->context))
428	fprintf (dump_file, "Setting static-chain for %s\n",
429		 lang_hooks.decl_printable_name (info->context, 2));
430
431      DECL_STATIC_CHAIN (info->context) = 1;
432    }
433  return field;
434}
435
436/* Initialize a new temporary with the GIMPLE_CALL STMT.  */
437
438static tree
439init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
440		        gcall *call)
441{
442  tree t;
443
444  t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
445  gimple_call_set_lhs (call, t);
446  if (! gsi_end_p (*gsi))
447    gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
448  gsi_insert_before (gsi, call, GSI_SAME_STMT);
449
450  return t;
451}
452
453
454/* Copy EXP into a temporary.  Allocate the temporary in the context of
455   INFO and insert the initialization statement before GSI.  */
456
457static tree
458init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
459{
460  tree t;
461  gimple *stmt;
462
463  t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
464  stmt = gimple_build_assign (t, exp);
465  if (! gsi_end_p (*gsi))
466    gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
467  gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
468
469  return t;
470}
471
472
473/* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
474
475static tree
476gsi_gimplify_val (struct nesting_info *info, tree exp,
477		  gimple_stmt_iterator *gsi)
478{
479  if (is_gimple_val (exp))
480    return exp;
481  else
482    return init_tmp_var (info, exp, gsi);
483}
484
485/* Similarly, but copy from the temporary and insert the statement
486   after the iterator.  */
487
488static tree
489save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
490{
491  tree t;
492  gimple *stmt;
493
494  t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
495  stmt = gimple_build_assign (exp, t);
496  if (! gsi_end_p (*gsi))
497    gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
498  gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
499
500  return t;
501}
502
503/* Build or return the type used to represent a nested function trampoline.  */
504
505static GTY(()) tree trampoline_type;
506
507static tree
508get_trampoline_type (struct nesting_info *info)
509{
510  unsigned align, size;
511  tree t;
512
513  if (trampoline_type)
514    return trampoline_type;
515
516  align = TRAMPOLINE_ALIGNMENT;
517  size = TRAMPOLINE_SIZE;
518
519  /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
520     then allocate extra space so that we can do dynamic alignment.  */
521  if (align > STACK_BOUNDARY)
522    {
523      size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
524      align = STACK_BOUNDARY;
525    }
526
527  t = build_index_type (size_int (size - 1));
528  t = build_array_type (char_type_node, t);
529  t = build_decl (DECL_SOURCE_LOCATION (info->context),
530		  FIELD_DECL, get_identifier ("__data"), t);
531  SET_DECL_ALIGN (t, align);
532  DECL_USER_ALIGN (t) = 1;
533
534  trampoline_type = make_node (RECORD_TYPE);
535  TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
536  TYPE_FIELDS (trampoline_type) = t;
537  layout_type (trampoline_type);
538  DECL_CONTEXT (t) = trampoline_type;
539
540  return trampoline_type;
541}
542
543/* Build or return the type used to represent a nested function descriptor.  */
544
545static GTY(()) tree descriptor_type;
546
547static tree
548get_descriptor_type (struct nesting_info *info)
549{
550  /* The base alignment is that of a function.  */
551  const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
552  tree t;
553
554  if (descriptor_type)
555    return descriptor_type;
556
557  t = build_index_type (integer_one_node);
558  t = build_array_type (ptr_type_node, t);
559  t = build_decl (DECL_SOURCE_LOCATION (info->context),
560		  FIELD_DECL, get_identifier ("__data"), t);
561  SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
562  DECL_USER_ALIGN (t) = 1;
563
564  descriptor_type = make_node (RECORD_TYPE);
565  TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
566  TYPE_FIELDS (descriptor_type) = t;
567  layout_type (descriptor_type);
568  DECL_CONTEXT (t) = descriptor_type;
569
570  return descriptor_type;
571}
572
573/* Given DECL, a nested function, find or create an element in the
574   var map for this function.  */
575
576static tree
577lookup_element_for_decl (struct nesting_info *info, tree decl,
578			 enum insert_option insert)
579{
580  if (insert == NO_INSERT)
581    {
582      tree *slot = info->var_map->get (decl);
583      return slot ? *slot : NULL_TREE;
584    }
585
586  tree *slot = &info->var_map->get_or_insert (decl);
587  if (!*slot)
588    *slot = build_tree_list (NULL_TREE, NULL_TREE);
589
590  return (tree) *slot;
591}
592
593/* Given DECL, a nested function, create a field in the non-local
594   frame structure for this function.  */
595
596static tree
597create_field_for_decl (struct nesting_info *info, tree decl, tree type)
598{
599  tree field = make_node (FIELD_DECL);
600  DECL_NAME (field) = DECL_NAME (decl);
601  TREE_TYPE (field) = type;
602  TREE_ADDRESSABLE (field) = 1;
603  insert_field_into_struct (get_frame_type (info), field);
604  return field;
605}
606
607/* Given DECL, a nested function, find or create a field in the non-local
608   frame structure for a trampoline for this function.  */
609
610static tree
611lookup_tramp_for_decl (struct nesting_info *info, tree decl,
612		       enum insert_option insert)
613{
614  tree elt, field;
615
616  elt = lookup_element_for_decl (info, decl, insert);
617  if (!elt)
618    return NULL_TREE;
619
620  field = TREE_PURPOSE (elt);
621
622  if (!field && insert == INSERT)
623    {
624      field = create_field_for_decl (info, decl, get_trampoline_type (info));
625      TREE_PURPOSE (elt) = field;
626      info->any_tramp_created = true;
627    }
628
629  return field;
630}
631
632/* Given DECL, a nested function, find or create a field in the non-local
633   frame structure for a descriptor for this function.  */
634
635static tree
636lookup_descr_for_decl (struct nesting_info *info, tree decl,
637		       enum insert_option insert)
638{
639  tree elt, field;
640
641  elt = lookup_element_for_decl (info, decl, insert);
642  if (!elt)
643    return NULL_TREE;
644
645  field = TREE_VALUE (elt);
646
647  if (!field && insert == INSERT)
648    {
649      field = create_field_for_decl (info, decl, get_descriptor_type (info));
650      TREE_VALUE (elt) = field;
651      info->any_descr_created = true;
652    }
653
654  return field;
655}
656
657/* Build or return the field within the non-local frame state that holds
658   the non-local goto "jmp_buf".  The buffer itself is maintained by the
659   rtl middle-end as dynamic stack space is allocated.  */
660
661static tree
662get_nl_goto_field (struct nesting_info *info)
663{
664  tree field = info->nl_goto_field;
665  if (!field)
666    {
667      unsigned size;
668      tree type;
669
670      /* For __builtin_nonlocal_goto, we need N words.  The first is the
671	 frame pointer, the rest is for the target's stack pointer save
672	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
673	 not the best interface, but it'll do for now.  */
674      if (Pmode == ptr_mode)
675	type = ptr_type_node;
676      else
677	type = lang_hooks.types.type_for_mode (Pmode, 1);
678
679      scalar_int_mode mode
680	= as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
681      size = GET_MODE_SIZE (mode);
682      size = size / GET_MODE_SIZE (Pmode);
683      size = size + 1;
684
685      type = build_array_type
686	(type, build_index_type (size_int (size)));
687
688      field = make_node (FIELD_DECL);
689      DECL_NAME (field) = get_identifier ("__nl_goto_buf");
690      TREE_TYPE (field) = type;
691      SET_DECL_ALIGN (field, TYPE_ALIGN (type));
692      TREE_ADDRESSABLE (field) = 1;
693
694      insert_field_into_struct (get_frame_type (info), field);
695
696      info->nl_goto_field = field;
697    }
698
699  return field;
700}
701
702/* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
703
704static void
705walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
706	   struct nesting_info *info, gimple_seq *pseq)
707{
708  struct walk_stmt_info wi;
709
710  memset (&wi, 0, sizeof (wi));
711  wi.info = info;
712  wi.val_only = true;
713  walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
714}
715
716
717/* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
718
719static inline void
720walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
721	       struct nesting_info *info)
722{
723  gimple_seq body = gimple_body (info->context);
724  walk_body (callback_stmt, callback_op, info, &body);
725  gimple_set_body (info->context, body);
726}
727
728/* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
729
730static void
731walk_gimple_omp_for (gomp_for *for_stmt,
732    		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
733    		     struct nesting_info *info)
734{
735  struct walk_stmt_info wi;
736  gimple_seq seq;
737  tree t;
738  size_t i;
739
740  walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
741
742  seq = NULL;
743  memset (&wi, 0, sizeof (wi));
744  wi.info = info;
745  wi.gsi = gsi_last (seq);
746
747  for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
748    {
749      wi.val_only = false;
750      walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
751		 &wi, NULL);
752      wi.val_only = true;
753      wi.is_lhs = false;
754      walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
755		 &wi, NULL);
756
757      wi.val_only = true;
758      wi.is_lhs = false;
759      walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
760		 &wi, NULL);
761
762      t = gimple_omp_for_incr (for_stmt, i);
763      gcc_assert (BINARY_CLASS_P (t));
764      wi.val_only = false;
765      walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
766      wi.val_only = true;
767      wi.is_lhs = false;
768      walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
769    }
770
771  seq = gsi_seq (wi.gsi);
772  if (!gimple_seq_empty_p (seq))
773    {
774      gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
775      annotate_all_with_location (seq, gimple_location (for_stmt));
776      gimple_seq_add_seq (&pre_body, seq);
777      gimple_omp_for_set_pre_body (for_stmt, pre_body);
778    }
779}
780
781/* Similarly for ROOT and all functions nested underneath, depth first.  */
782
783static void
784walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
785		    struct nesting_info *root)
786{
787  struct nesting_info *n;
788  FOR_EACH_NEST_INFO (n, root)
789    walk_function (callback_stmt, callback_op, n);
790}
791
792
793/* We have to check for a fairly pathological case.  The operands of function
794   nested function are to be interpreted in the context of the enclosing
795   function.  So if any are variably-sized, they will get remapped when the
796   enclosing function is inlined.  But that remapping would also have to be
797   done in the types of the PARM_DECLs of the nested function, meaning the
798   argument types of that function will disagree with the arguments in the
799   calls to that function.  So we'd either have to make a copy of the nested
800   function corresponding to each time the enclosing function was inlined or
801   add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
802   function.  The former is not practical.  The latter would still require
803   detecting this case to know when to add the conversions.  So, for now at
804   least, we don't inline such an enclosing function.
805
806   We have to do that check recursively, so here return indicating whether
807   FNDECL has such a nested function.  ORIG_FN is the function we were
808   trying to inline to use for checking whether any argument is variably
809   modified by anything in it.
810
811   It would be better to do this in tree-inline.c so that we could give
812   the appropriate warning for why a function can't be inlined, but that's
813   too late since the nesting structure has already been flattened and
814   adding a flag just to record this fact seems a waste of a flag.  */
815
816static bool
817check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
818{
819  struct cgraph_node *cgn = cgraph_node::get (fndecl);
820  tree arg;
821
822  for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
823    {
824      for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
825	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
826	  return true;
827
828      if (check_for_nested_with_variably_modified (cgn->decl,
829						   orig_fndecl))
830	return true;
831    }
832
833  return false;
834}
835
836/* Construct our local datastructure describing the function nesting
837   tree rooted by CGN.  */
838
839static struct nesting_info *
840create_nesting_tree (struct cgraph_node *cgn)
841{
842  struct nesting_info *info = XCNEW (struct nesting_info);
843  info->field_map = new hash_map<tree, tree>;
844  info->var_map = new hash_map<tree, tree>;
845  info->mem_refs = new hash_set<tree *>;
846  info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
847  info->context = cgn->decl;
848  info->thunk_p = cgn->thunk.thunk_p;
849
850  for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
851    {
852      struct nesting_info *sub = create_nesting_tree (cgn);
853      sub->outer = info;
854      sub->next = info->inner;
855      info->inner = sub;
856    }
857
858  /* See discussion at check_for_nested_with_variably_modified for a
859     discussion of why this has to be here.  */
860  if (check_for_nested_with_variably_modified (info->context, info->context))
861    DECL_UNINLINABLE (info->context) = true;
862
863  return info;
864}
865
866/* Return an expression computing the static chain for TARGET_CONTEXT
867   from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
868
869static tree
870get_static_chain (struct nesting_info *info, tree target_context,
871		  gimple_stmt_iterator *gsi)
872{
873  struct nesting_info *i;
874  tree x;
875
876  if (info->context == target_context)
877    {
878      x = build_addr (info->frame_decl);
879      info->static_chain_added |= 1;
880    }
881  else
882    {
883      x = get_chain_decl (info);
884      info->static_chain_added |= 2;
885
886      for (i = info->outer; i->context != target_context; i = i->outer)
887	{
888	  tree field = get_chain_field (i);
889
890	  x = build_simple_mem_ref_notrap (x);
891	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
892	  x = init_tmp_var (info, x, gsi);
893	}
894    }
895
896  return x;
897}
898
899
900/* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
901   frame as seen from INFO->CONTEXT.  Insert any necessary computations
902   before GSI.  */
903
904static tree
905get_frame_field (struct nesting_info *info, tree target_context,
906		 tree field, gimple_stmt_iterator *gsi)
907{
908  struct nesting_info *i;
909  tree x;
910
911  if (info->context == target_context)
912    {
913      /* Make sure frame_decl gets created.  */
914      (void) get_frame_type (info);
915      x = info->frame_decl;
916      info->static_chain_added |= 1;
917    }
918  else
919    {
920      x = get_chain_decl (info);
921      info->static_chain_added |= 2;
922
923      for (i = info->outer; i->context != target_context; i = i->outer)
924	{
925	  tree field = get_chain_field (i);
926
927	  x = build_simple_mem_ref_notrap (x);
928	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
929	  x = init_tmp_var (info, x, gsi);
930	}
931
932      x = build_simple_mem_ref_notrap (x);
933    }
934
935  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
936  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (field);
937  return x;
938}
939
940static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
941
942/* A subroutine of convert_nonlocal_reference_op.  Create a local variable
943   in the nested function with DECL_VALUE_EXPR set to reference the true
944   variable in the parent function.  This is used both for debug info
945   and in OMP lowering.  */
946
947static tree
948get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
949{
950  tree target_context;
951  struct nesting_info *i;
952  tree x, field, new_decl;
953
954  tree *slot = &info->var_map->get_or_insert (decl);
955
956  if (*slot)
957    return *slot;
958
959  target_context = decl_function_context (decl);
960
961  /* A copy of the code in get_frame_field, but without the temporaries.  */
962  if (info->context == target_context)
963    {
964      /* Make sure frame_decl gets created.  */
965      (void) get_frame_type (info);
966      x = info->frame_decl;
967      i = info;
968      info->static_chain_added |= 1;
969    }
970  else
971    {
972      x = get_chain_decl (info);
973      info->static_chain_added |= 2;
974      for (i = info->outer; i->context != target_context; i = i->outer)
975	{
976	  field = get_chain_field (i);
977	  x = build_simple_mem_ref_notrap (x);
978	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
979	}
980      x = build_simple_mem_ref_notrap (x);
981    }
982
983  field = lookup_field_for_decl (i, decl, INSERT);
984  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
985  if (use_pointer_in_frame (decl))
986    x = build_simple_mem_ref_notrap (x);
987
988  /* ??? We should be remapping types as well, surely.  */
989  new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
990			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
991  DECL_CONTEXT (new_decl) = info->context;
992  DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
993  DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
994  TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
995  TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
996  TREE_READONLY (new_decl) = TREE_READONLY (decl);
997  TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
998  DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
999  if ((TREE_CODE (decl) == PARM_DECL
1000       || TREE_CODE (decl) == RESULT_DECL
1001       || VAR_P (decl))
1002      && DECL_BY_REFERENCE (decl))
1003    DECL_BY_REFERENCE (new_decl) = 1;
1004
1005  SET_DECL_VALUE_EXPR (new_decl, x);
1006  DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1007
1008  *slot = new_decl;
1009  DECL_CHAIN (new_decl) = info->debug_var_chain;
1010  info->debug_var_chain = new_decl;
1011
1012  if (!optimize
1013      && info->context != target_context
1014      && variably_modified_type_p (TREE_TYPE (decl), NULL))
1015    note_nonlocal_vla_type (info, TREE_TYPE (decl));
1016
1017  return new_decl;
1018}
1019
1020
1021/* Callback for walk_gimple_stmt, rewrite all references to VAR
1022   and PARM_DECLs that belong to outer functions.
1023
1024   The rewrite will involve some number of structure accesses back up
1025   the static chain.  E.g. for a variable FOO up one nesting level it'll
1026   be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
1027   indirections apply to decls for which use_pointer_in_frame is true.  */
1028
1029static tree
1030convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1031{
1032  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1033  struct nesting_info *const info = (struct nesting_info *) wi->info;
1034  tree t = *tp;
1035
1036  *walk_subtrees = 0;
1037  switch (TREE_CODE (t))
1038    {
1039    case VAR_DECL:
1040      /* Non-automatic variables are never processed.  */
1041      if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1042	break;
1043      /* FALLTHRU */
1044
1045    case PARM_DECL:
1046      {
1047	tree x, target_context = decl_function_context (t);
1048
1049	if (info->context == target_context)
1050	  break;
1051
1052	wi->changed = true;
1053
1054	if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1055	  x = get_nonlocal_debug_decl (info, t);
1056	else
1057	  {
1058	    struct nesting_info *i = info;
1059	    while (i && i->context != target_context)
1060	      i = i->outer;
1061	    /* If none of the outer contexts is the target context, this means
1062	       that the VAR or PARM_DECL is referenced in a wrong context.  */
1063	    if (!i)
1064	      internal_error ("%s from %s referenced in %s",
1065			      IDENTIFIER_POINTER (DECL_NAME (t)),
1066			      IDENTIFIER_POINTER (DECL_NAME (target_context)),
1067			      IDENTIFIER_POINTER (DECL_NAME (info->context)));
1068
1069	    x = lookup_field_for_decl (i, t, INSERT);
1070	    x = get_frame_field (info, target_context, x, &wi->gsi);
1071	    if (use_pointer_in_frame (t))
1072	      {
1073		x = init_tmp_var (info, x, &wi->gsi);
1074		x = build_simple_mem_ref_notrap (x);
1075	      }
1076	  }
1077
1078	if (wi->val_only)
1079	  {
1080	    if (wi->is_lhs)
1081	      x = save_tmp_var (info, x, &wi->gsi);
1082	    else
1083	      x = init_tmp_var (info, x, &wi->gsi);
1084	  }
1085
1086	*tp = x;
1087      }
1088      break;
1089
1090    case LABEL_DECL:
1091      /* We're taking the address of a label from a parent function, but
1092	 this is not itself a non-local goto.  Mark the label such that it
1093	 will not be deleted, much as we would with a label address in
1094	 static storage.  */
1095      if (decl_function_context (t) != info->context)
1096        FORCED_LABEL (t) = 1;
1097      break;
1098
1099    case ADDR_EXPR:
1100      {
1101	bool save_val_only = wi->val_only;
1102
1103	wi->val_only = false;
1104	wi->is_lhs = false;
1105	wi->changed = false;
1106	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1107	wi->val_only = true;
1108
1109	if (wi->changed)
1110	  {
1111	    tree save_context;
1112
1113	    /* If we changed anything, we might no longer be directly
1114	       referencing a decl.  */
1115	    save_context = current_function_decl;
1116	    current_function_decl = info->context;
1117	    recompute_tree_invariant_for_addr_expr (t);
1118	    current_function_decl = save_context;
1119
1120	    /* If the callback converted the address argument in a context
1121	       where we only accept variables (and min_invariant, presumably),
1122	       then compute the address into a temporary.  */
1123	    if (save_val_only)
1124	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1125				      t, &wi->gsi);
1126	  }
1127      }
1128      break;
1129
1130    case REALPART_EXPR:
1131    case IMAGPART_EXPR:
1132    case COMPONENT_REF:
1133    case ARRAY_REF:
1134    case ARRAY_RANGE_REF:
1135    case BIT_FIELD_REF:
1136      /* Go down this entire nest and just look at the final prefix and
1137	 anything that describes the references.  Otherwise, we lose track
1138	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1139      wi->val_only = true;
1140      wi->is_lhs = false;
1141      for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1142	{
1143	  if (TREE_CODE (t) == COMPONENT_REF)
1144	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1145		       NULL);
1146	  else if (TREE_CODE (t) == ARRAY_REF
1147		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1148	    {
1149	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1150			 wi, NULL);
1151	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1152			 wi, NULL);
1153	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1154			 wi, NULL);
1155	    }
1156	}
1157      wi->val_only = false;
1158      walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1159      break;
1160
1161    case VIEW_CONVERT_EXPR:
1162      /* Just request to look at the subtrees, leaving val_only and lhs
1163	 untouched.  This might actually be for !val_only + lhs, in which
1164	 case we don't want to force a replacement by a temporary.  */
1165      *walk_subtrees = 1;
1166      break;
1167
1168    default:
1169      if (!IS_TYPE_OR_DECL_P (t))
1170	{
1171	  *walk_subtrees = 1;
1172          wi->val_only = true;
1173	  wi->is_lhs = false;
1174	}
1175      break;
1176    }
1177
1178  return NULL_TREE;
1179}
1180
1181static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1182					     struct walk_stmt_info *);
1183
1184/* Helper for convert_nonlocal_references, rewrite all references to VAR
1185   and PARM_DECLs that belong to outer functions.  */
1186
1187static bool
1188convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1189{
1190  struct nesting_info *const info = (struct nesting_info *) wi->info;
1191  bool need_chain = false, need_stmts = false;
1192  tree clause, decl, *pdecl;
1193  int dummy;
1194  bitmap new_suppress;
1195
1196  new_suppress = BITMAP_GGC_ALLOC ();
1197  bitmap_copy (new_suppress, info->suppress_expansion);
1198
1199  for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1200    {
1201      pdecl = NULL;
1202      switch (OMP_CLAUSE_CODE (clause))
1203	{
1204	case OMP_CLAUSE_REDUCTION:
1205	case OMP_CLAUSE_IN_REDUCTION:
1206	case OMP_CLAUSE_TASK_REDUCTION:
1207	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1208	    need_stmts = true;
1209	  if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1210	    {
1211	      pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1212	      if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1213		pdecl = &TREE_OPERAND (*pdecl, 0);
1214	      if (TREE_CODE (*pdecl) == INDIRECT_REF
1215		  || TREE_CODE (*pdecl) == ADDR_EXPR)
1216		pdecl = &TREE_OPERAND (*pdecl, 0);
1217	    }
1218	  goto do_decl_clause;
1219
1220	case OMP_CLAUSE_LASTPRIVATE:
1221	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1222	    need_stmts = true;
1223	  goto do_decl_clause;
1224
1225	case OMP_CLAUSE_LINEAR:
1226	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1227	    need_stmts = true;
1228	  wi->val_only = true;
1229	  wi->is_lhs = false;
1230	  convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1231					 &dummy, wi);
1232	  goto do_decl_clause;
1233
1234	case OMP_CLAUSE_PRIVATE:
1235	case OMP_CLAUSE_FIRSTPRIVATE:
1236	case OMP_CLAUSE_COPYPRIVATE:
1237	case OMP_CLAUSE_SHARED:
1238	case OMP_CLAUSE_TO_DECLARE:
1239	case OMP_CLAUSE_LINK:
1240	case OMP_CLAUSE_USE_DEVICE_PTR:
1241	case OMP_CLAUSE_USE_DEVICE_ADDR:
1242	case OMP_CLAUSE_IS_DEVICE_PTR:
1243	do_decl_clause:
1244	  if (pdecl == NULL)
1245	    pdecl = &OMP_CLAUSE_DECL (clause);
1246	  decl = *pdecl;
1247	  if (VAR_P (decl)
1248	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1249	    break;
1250	  if (decl_function_context (decl) != info->context)
1251	    {
1252	      if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1253		OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1254	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1255	      *pdecl = get_nonlocal_debug_decl (info, decl);
1256	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1257		need_chain = true;
1258	    }
1259	  break;
1260
1261	case OMP_CLAUSE_SCHEDULE:
1262	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1263	    break;
1264	  /* FALLTHRU */
1265	case OMP_CLAUSE_FINAL:
1266	case OMP_CLAUSE_IF:
1267	case OMP_CLAUSE_NUM_THREADS:
1268	case OMP_CLAUSE_DEPEND:
1269	case OMP_CLAUSE_DEVICE:
1270	case OMP_CLAUSE_NUM_TEAMS:
1271	case OMP_CLAUSE_THREAD_LIMIT:
1272	case OMP_CLAUSE_SAFELEN:
1273	case OMP_CLAUSE_SIMDLEN:
1274	case OMP_CLAUSE_PRIORITY:
1275	case OMP_CLAUSE_GRAINSIZE:
1276	case OMP_CLAUSE_NUM_TASKS:
1277	case OMP_CLAUSE_HINT:
1278	case OMP_CLAUSE_NUM_GANGS:
1279	case OMP_CLAUSE_NUM_WORKERS:
1280	case OMP_CLAUSE_VECTOR_LENGTH:
1281	case OMP_CLAUSE_GANG:
1282	case OMP_CLAUSE_WORKER:
1283	case OMP_CLAUSE_VECTOR:
1284	case OMP_CLAUSE_ASYNC:
1285	case OMP_CLAUSE_WAIT:
1286	  /* Several OpenACC clauses have optional arguments.  Check if they
1287	     are present.  */
1288	  if (OMP_CLAUSE_OPERAND (clause, 0))
1289	    {
1290	      wi->val_only = true;
1291	      wi->is_lhs = false;
1292	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1293					     &dummy, wi);
1294	    }
1295
1296	  /* The gang clause accepts two arguments.  */
1297	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1298	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1299	    {
1300		wi->val_only = true;
1301		wi->is_lhs = false;
1302		convert_nonlocal_reference_op
1303		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1304	    }
1305	  break;
1306
1307	case OMP_CLAUSE_DIST_SCHEDULE:
1308	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1309	    {
1310	      wi->val_only = true;
1311	      wi->is_lhs = false;
1312	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1313					     &dummy, wi);
1314	    }
1315	  break;
1316
1317	case OMP_CLAUSE_MAP:
1318	case OMP_CLAUSE_TO:
1319	case OMP_CLAUSE_FROM:
1320	  if (OMP_CLAUSE_SIZE (clause))
1321	    {
1322	      wi->val_only = true;
1323	      wi->is_lhs = false;
1324	      convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1325					     &dummy, wi);
1326	    }
1327	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1328	    goto do_decl_clause;
1329	  wi->val_only = true;
1330	  wi->is_lhs = false;
1331	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1332		     wi, NULL);
1333	  break;
1334
1335	case OMP_CLAUSE_ALIGNED:
1336	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1337	    {
1338	      wi->val_only = true;
1339	      wi->is_lhs = false;
1340	      convert_nonlocal_reference_op
1341		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1342	    }
1343	  /* FALLTHRU */
1344	case OMP_CLAUSE_NONTEMPORAL:
1345	  /* Like do_decl_clause, but don't add any suppression.  */
1346	  decl = OMP_CLAUSE_DECL (clause);
1347	  if (VAR_P (decl)
1348	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1349	    break;
1350	  if (decl_function_context (decl) != info->context)
1351	    {
1352	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1353	      need_chain = true;
1354	    }
1355	  break;
1356
1357	case OMP_CLAUSE_NOWAIT:
1358	case OMP_CLAUSE_ORDERED:
1359	case OMP_CLAUSE_DEFAULT:
1360	case OMP_CLAUSE_COPYIN:
1361	case OMP_CLAUSE_COLLAPSE:
1362	case OMP_CLAUSE_TILE:
1363	case OMP_CLAUSE_UNTIED:
1364	case OMP_CLAUSE_MERGEABLE:
1365	case OMP_CLAUSE_PROC_BIND:
1366	case OMP_CLAUSE_NOGROUP:
1367	case OMP_CLAUSE_THREADS:
1368	case OMP_CLAUSE_SIMD:
1369	case OMP_CLAUSE_DEFAULTMAP:
1370	case OMP_CLAUSE_ORDER:
1371	case OMP_CLAUSE_SEQ:
1372	case OMP_CLAUSE_INDEPENDENT:
1373	case OMP_CLAUSE_AUTO:
1374	case OMP_CLAUSE_IF_PRESENT:
1375	case OMP_CLAUSE_FINALIZE:
1376	case OMP_CLAUSE__CONDTEMP_:
1377	case OMP_CLAUSE__SCANTEMP_:
1378	  break;
1379
1380	  /* The following clause belongs to the OpenACC cache directive, which
1381	     is discarded during gimplification.  */
1382	case OMP_CLAUSE__CACHE_:
1383	  /* The following clauses are only allowed in the OpenMP declare simd
1384	     directive, so not seen here.  */
1385	case OMP_CLAUSE_UNIFORM:
1386	case OMP_CLAUSE_INBRANCH:
1387	case OMP_CLAUSE_NOTINBRANCH:
1388	  /* The following clauses are only allowed on OpenMP cancel and
1389	     cancellation point directives, which at this point have already
1390	     been lowered into a function call.  */
1391	case OMP_CLAUSE_FOR:
1392	case OMP_CLAUSE_PARALLEL:
1393	case OMP_CLAUSE_SECTIONS:
1394	case OMP_CLAUSE_TASKGROUP:
1395	  /* The following clauses are only added during OMP lowering; nested
1396	     function decomposition happens before that.  */
1397	case OMP_CLAUSE__LOOPTEMP_:
1398	case OMP_CLAUSE__REDUCTEMP_:
1399	case OMP_CLAUSE__SIMDUID_:
1400	case OMP_CLAUSE__GRIDDIM_:
1401	case OMP_CLAUSE__SIMT_:
1402	  /* Anything else.  */
1403	default:
1404	  gcc_unreachable ();
1405	}
1406    }
1407
1408  info->suppress_expansion = new_suppress;
1409
1410  if (need_stmts)
1411    for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1412      switch (OMP_CLAUSE_CODE (clause))
1413	{
1414	case OMP_CLAUSE_REDUCTION:
1415	case OMP_CLAUSE_IN_REDUCTION:
1416	case OMP_CLAUSE_TASK_REDUCTION:
1417	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1418	    {
1419	      tree old_context
1420		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1421	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1422		= info->context;
1423	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1424		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1425		  = info->context;
1426	      tree save_local_var_chain = info->new_local_var_chain;
1427	      info->new_local_var_chain = NULL;
1428	      gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1429	      walk_body (convert_nonlocal_reference_stmt,
1430			 convert_nonlocal_reference_op, info, seq);
1431	      if (info->new_local_var_chain)
1432		declare_vars (info->new_local_var_chain,
1433			      gimple_seq_first_stmt (*seq), false);
1434	      info->new_local_var_chain = NULL;
1435	      seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1436	      walk_body (convert_nonlocal_reference_stmt,
1437			 convert_nonlocal_reference_op, info, seq);
1438	      if (info->new_local_var_chain)
1439		declare_vars (info->new_local_var_chain,
1440			      gimple_seq_first_stmt (*seq), false);
1441	      info->new_local_var_chain = save_local_var_chain;
1442	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1443		= old_context;
1444	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1445		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1446		  = old_context;
1447	    }
1448	  break;
1449
1450	case OMP_CLAUSE_LASTPRIVATE:
1451	case OMP_CLAUSE_LINEAR:
1452	  {
1453	    tree save_local_var_chain = info->new_local_var_chain;
1454	    info->new_local_var_chain = NULL;
1455	    gimple_seq *seq;
1456	    if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE)
1457	      seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1458	    else
1459	      seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1460	    walk_body (convert_nonlocal_reference_stmt,
1461		       convert_nonlocal_reference_op, info, seq);
1462	    if (info->new_local_var_chain)
1463	      {
1464		gimple *g = gimple_seq_first_stmt (*seq);
1465		if (gimple_code (g) != GIMPLE_BIND)
1466		  {
1467		    g = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
1468		    *seq = NULL;
1469		    gimple_seq_add_stmt_without_update (seq, g);
1470		  }
1471		declare_vars (info->new_local_var_chain,
1472			      gimple_seq_first_stmt (*seq), false);
1473	      }
1474	    info->new_local_var_chain = save_local_var_chain;
1475	  }
1476	  break;
1477
1478	default:
1479	  break;
1480	}
1481
1482  return need_chain;
1483}
1484
1485/* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1486
1487static void
1488note_nonlocal_vla_type (struct nesting_info *info, tree type)
1489{
1490  while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1491    type = TREE_TYPE (type);
1492
1493  if (TYPE_NAME (type)
1494      && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1495      && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1496    type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1497
1498  while (POINTER_TYPE_P (type)
1499	 || TREE_CODE (type) == VECTOR_TYPE
1500	 || TREE_CODE (type) == FUNCTION_TYPE
1501	 || TREE_CODE (type) == METHOD_TYPE)
1502    type = TREE_TYPE (type);
1503
1504  if (TREE_CODE (type) == ARRAY_TYPE)
1505    {
1506      tree domain, t;
1507
1508      note_nonlocal_vla_type (info, TREE_TYPE (type));
1509      domain = TYPE_DOMAIN (type);
1510      if (domain)
1511	{
1512	  t = TYPE_MIN_VALUE (domain);
1513	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1514	      && decl_function_context (t) != info->context)
1515	    get_nonlocal_debug_decl (info, t);
1516	  t = TYPE_MAX_VALUE (domain);
1517	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1518	      && decl_function_context (t) != info->context)
1519	    get_nonlocal_debug_decl (info, t);
1520	}
1521    }
1522}
1523
1524/* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1525   PARM_DECLs that belong to outer functions.  This handles statements
1526   that are not handled via the standard recursion done in
1527   walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1528   convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1529   operands of STMT have been handled by this function.  */
1530
1531static tree
1532convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1533				 struct walk_stmt_info *wi)
1534{
1535  struct nesting_info *info = (struct nesting_info *) wi->info;
1536  tree save_local_var_chain;
1537  bitmap save_suppress;
1538  gimple *stmt = gsi_stmt (*gsi);
1539
1540  switch (gimple_code (stmt))
1541    {
1542    case GIMPLE_GOTO:
1543      /* Don't walk non-local gotos for now.  */
1544      if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1545	{
1546	  wi->val_only = true;
1547	  wi->is_lhs = false;
1548	  *handled_ops_p = false;
1549	  return NULL_TREE;
1550	}
1551      break;
1552
1553    case GIMPLE_OMP_TEAMS:
1554      if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1555	{
1556	  save_suppress = info->suppress_expansion;
1557	  convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1558					wi);
1559	  walk_body (convert_nonlocal_reference_stmt,
1560		     convert_nonlocal_reference_op, info,
1561		     gimple_omp_body_ptr (stmt));
1562	  info->suppress_expansion = save_suppress;
1563	  break;
1564	}
1565      /* FALLTHRU */
1566
1567    case GIMPLE_OMP_PARALLEL:
1568    case GIMPLE_OMP_TASK:
1569      save_suppress = info->suppress_expansion;
1570      if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1571	                                wi))
1572	{
1573	  tree c, decl;
1574	  decl = get_chain_decl (info);
1575	  c = build_omp_clause (gimple_location (stmt),
1576				OMP_CLAUSE_FIRSTPRIVATE);
1577	  OMP_CLAUSE_DECL (c) = decl;
1578	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1579	  gimple_omp_taskreg_set_clauses (stmt, c);
1580	}
1581
1582      save_local_var_chain = info->new_local_var_chain;
1583      info->new_local_var_chain = NULL;
1584
1585      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1586	         info, gimple_omp_body_ptr (stmt));
1587
1588      if (info->new_local_var_chain)
1589	declare_vars (info->new_local_var_chain,
1590	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1591		      false);
1592      info->new_local_var_chain = save_local_var_chain;
1593      info->suppress_expansion = save_suppress;
1594      break;
1595
1596    case GIMPLE_OMP_FOR:
1597      save_suppress = info->suppress_expansion;
1598      convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1599      walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1600			   convert_nonlocal_reference_stmt,
1601	  		   convert_nonlocal_reference_op, info);
1602      walk_body (convert_nonlocal_reference_stmt,
1603	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1604      info->suppress_expansion = save_suppress;
1605      break;
1606
1607    case GIMPLE_OMP_SECTIONS:
1608      save_suppress = info->suppress_expansion;
1609      convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1610      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1611	         info, gimple_omp_body_ptr (stmt));
1612      info->suppress_expansion = save_suppress;
1613      break;
1614
1615    case GIMPLE_OMP_SINGLE:
1616      save_suppress = info->suppress_expansion;
1617      convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1618      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1619	         info, gimple_omp_body_ptr (stmt));
1620      info->suppress_expansion = save_suppress;
1621      break;
1622
1623    case GIMPLE_OMP_TASKGROUP:
1624      save_suppress = info->suppress_expansion;
1625      convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1626      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1627		 info, gimple_omp_body_ptr (stmt));
1628      info->suppress_expansion = save_suppress;
1629      break;
1630
1631    case GIMPLE_OMP_TARGET:
1632      if (!is_gimple_omp_offloaded (stmt))
1633	{
1634	  save_suppress = info->suppress_expansion;
1635	  convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1636					wi);
1637	  info->suppress_expansion = save_suppress;
1638	  walk_body (convert_nonlocal_reference_stmt,
1639		     convert_nonlocal_reference_op, info,
1640		     gimple_omp_body_ptr (stmt));
1641	  break;
1642	}
1643      save_suppress = info->suppress_expansion;
1644      if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1645					wi))
1646	{
1647	  tree c, decl;
1648	  decl = get_chain_decl (info);
1649	  c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1650	  OMP_CLAUSE_DECL (c) = decl;
1651	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1652	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1653	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1654	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1655	}
1656
1657      save_local_var_chain = info->new_local_var_chain;
1658      info->new_local_var_chain = NULL;
1659
1660      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1661		 info, gimple_omp_body_ptr (stmt));
1662
1663      if (info->new_local_var_chain)
1664	declare_vars (info->new_local_var_chain,
1665		      gimple_seq_first_stmt (gimple_omp_body (stmt)),
1666		      false);
1667      info->new_local_var_chain = save_local_var_chain;
1668      info->suppress_expansion = save_suppress;
1669      break;
1670
1671    case GIMPLE_OMP_SECTION:
1672    case GIMPLE_OMP_MASTER:
1673    case GIMPLE_OMP_ORDERED:
1674    case GIMPLE_OMP_SCAN:
1675      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1676	         info, gimple_omp_body_ptr (stmt));
1677      break;
1678
1679    case GIMPLE_BIND:
1680      {
1681      gbind *bind_stmt = as_a <gbind *> (stmt);
1682
1683      for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1684	if (TREE_CODE (var) == NAMELIST_DECL)
1685	  {
1686	    /* Adjust decls mentioned in NAMELIST_DECL.  */
1687	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1688	    tree decl;
1689	    unsigned int i;
1690
1691	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1692	      {
1693		if (VAR_P (decl)
1694		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1695		  continue;
1696		if (decl_function_context (decl) != info->context)
1697		  CONSTRUCTOR_ELT (decls, i)->value
1698		    = get_nonlocal_debug_decl (info, decl);
1699	      }
1700	  }
1701
1702      *handled_ops_p = false;
1703      return NULL_TREE;
1704      }
1705    case GIMPLE_COND:
1706      wi->val_only = true;
1707      wi->is_lhs = false;
1708      *handled_ops_p = false;
1709      return NULL_TREE;
1710
1711    case GIMPLE_ASSIGN:
1712      if (gimple_clobber_p (stmt))
1713	{
1714	  tree lhs = gimple_assign_lhs (stmt);
1715	  if (DECL_P (lhs)
1716	      && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1717	      && decl_function_context (lhs) != info->context)
1718	    {
1719	      gsi_replace (gsi, gimple_build_nop (), true);
1720	      break;
1721	    }
1722	}
1723      *handled_ops_p = false;
1724      return NULL_TREE;
1725
1726    default:
1727      /* For every other statement that we are not interested in
1728	 handling here, let the walker traverse the operands.  */
1729      *handled_ops_p = false;
1730      return NULL_TREE;
1731    }
1732
1733  /* We have handled all of STMT operands, no need to traverse the operands.  */
1734  *handled_ops_p = true;
1735  return NULL_TREE;
1736}
1737
1738
1739/* A subroutine of convert_local_reference.  Create a local variable
1740   in the parent function with DECL_VALUE_EXPR set to reference the
1741   field in FRAME.  This is used both for debug info and in OMP
1742   lowering.  */
1743
1744static tree
1745get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1746{
1747  tree x, new_decl;
1748
1749  tree *slot = &info->var_map->get_or_insert (decl);
1750  if (*slot)
1751    return *slot;
1752
1753  /* Make sure frame_decl gets created.  */
1754  (void) get_frame_type (info);
1755  x = info->frame_decl;
1756  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1757
1758  new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1759			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1760  DECL_CONTEXT (new_decl) = info->context;
1761  DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1762  DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1763  TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1764  TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1765  TREE_READONLY (new_decl) = TREE_READONLY (decl);
1766  TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1767  DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1768  if ((TREE_CODE (decl) == PARM_DECL
1769       || TREE_CODE (decl) == RESULT_DECL
1770       || VAR_P (decl))
1771      && DECL_BY_REFERENCE (decl))
1772    DECL_BY_REFERENCE (new_decl) = 1;
1773
1774  SET_DECL_VALUE_EXPR (new_decl, x);
1775  DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1776  *slot = new_decl;
1777
1778  DECL_CHAIN (new_decl) = info->debug_var_chain;
1779  info->debug_var_chain = new_decl;
1780
1781  /* Do not emit debug info twice.  */
1782  DECL_IGNORED_P (decl) = 1;
1783
1784  return new_decl;
1785}
1786
1787
1788/* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1789   and PARM_DECLs that were referenced by inner nested functions.
1790   The rewrite will be a structure reference to the local frame variable.  */
1791
1792static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1793
1794static tree
1795convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1796{
1797  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1798  struct nesting_info *const info = (struct nesting_info *) wi->info;
1799  tree t = *tp, field, x;
1800  bool save_val_only;
1801
1802  *walk_subtrees = 0;
1803  switch (TREE_CODE (t))
1804    {
1805    case VAR_DECL:
1806      /* Non-automatic variables are never processed.  */
1807      if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1808	break;
1809      /* FALLTHRU */
1810
1811    case PARM_DECL:
1812      if (t != info->frame_decl && decl_function_context (t) == info->context)
1813	{
1814	  /* If we copied a pointer to the frame, then the original decl
1815	     is used unchanged in the parent function.  */
1816	  if (use_pointer_in_frame (t))
1817	    break;
1818
1819	  /* No need to transform anything if no child references the
1820	     variable.  */
1821	  field = lookup_field_for_decl (info, t, NO_INSERT);
1822	  if (!field)
1823	    break;
1824	  wi->changed = true;
1825
1826	  if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1827	    x = get_local_debug_decl (info, t, field);
1828	  else
1829	    x = get_frame_field (info, info->context, field, &wi->gsi);
1830
1831	  if (wi->val_only)
1832	    {
1833	      if (wi->is_lhs)
1834		x = save_tmp_var (info, x, &wi->gsi);
1835	      else
1836		x = init_tmp_var (info, x, &wi->gsi);
1837	    }
1838
1839	  *tp = x;
1840	}
1841      break;
1842
1843    case ADDR_EXPR:
1844      save_val_only = wi->val_only;
1845      wi->val_only = false;
1846      wi->is_lhs = false;
1847      wi->changed = false;
1848      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1849      wi->val_only = save_val_only;
1850
1851      /* If we converted anything ... */
1852      if (wi->changed)
1853	{
1854	  tree save_context;
1855
1856	  /* Then the frame decl is now addressable.  */
1857	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1858
1859	  save_context = current_function_decl;
1860	  current_function_decl = info->context;
1861	  recompute_tree_invariant_for_addr_expr (t);
1862	  current_function_decl = save_context;
1863
1864	  /* If we are in a context where we only accept values, then
1865	     compute the address into a temporary.  */
1866	  if (save_val_only)
1867	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1868				    t, &wi->gsi);
1869	}
1870      break;
1871
1872    case REALPART_EXPR:
1873    case IMAGPART_EXPR:
1874    case COMPONENT_REF:
1875    case ARRAY_REF:
1876    case ARRAY_RANGE_REF:
1877    case BIT_FIELD_REF:
1878      /* Go down this entire nest and just look at the final prefix and
1879	 anything that describes the references.  Otherwise, we lose track
1880	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1881      save_val_only = wi->val_only;
1882      wi->val_only = true;
1883      wi->is_lhs = false;
1884      for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1885	{
1886	  if (TREE_CODE (t) == COMPONENT_REF)
1887	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1888		       NULL);
1889	  else if (TREE_CODE (t) == ARRAY_REF
1890		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1891	    {
1892	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1893			 NULL);
1894	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1895			 NULL);
1896	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1897			 NULL);
1898	    }
1899	}
1900      wi->val_only = false;
1901      walk_tree (tp, convert_local_reference_op, wi, NULL);
1902      wi->val_only = save_val_only;
1903      break;
1904
1905    case MEM_REF:
1906      save_val_only = wi->val_only;
1907      wi->val_only = true;
1908      wi->is_lhs = false;
1909      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1910		 wi, NULL);
1911      /* We need to re-fold the MEM_REF as component references as
1912	 part of a ADDR_EXPR address are not allowed.  But we cannot
1913	 fold here, as the chain record type is not yet finalized.  */
1914      if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1915	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1916	info->mem_refs->add (tp);
1917      wi->val_only = save_val_only;
1918      break;
1919
1920    case VIEW_CONVERT_EXPR:
1921      /* Just request to look at the subtrees, leaving val_only and lhs
1922	 untouched.  This might actually be for !val_only + lhs, in which
1923	 case we don't want to force a replacement by a temporary.  */
1924      *walk_subtrees = 1;
1925      break;
1926
1927    default:
1928      if (!IS_TYPE_OR_DECL_P (t))
1929	{
1930	  *walk_subtrees = 1;
1931	  wi->val_only = true;
1932	  wi->is_lhs = false;
1933	}
1934      break;
1935    }
1936
1937  return NULL_TREE;
1938}
1939
1940static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1941					  struct walk_stmt_info *);
1942
1943/* Helper for convert_local_reference.  Convert all the references in
1944   the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1945
1946static bool
1947convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1948{
1949  struct nesting_info *const info = (struct nesting_info *) wi->info;
1950  bool need_frame = false, need_stmts = false;
1951  tree clause, decl, *pdecl;
1952  int dummy;
1953  bitmap new_suppress;
1954
1955  new_suppress = BITMAP_GGC_ALLOC ();
1956  bitmap_copy (new_suppress, info->suppress_expansion);
1957
1958  for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1959    {
1960      pdecl = NULL;
1961      switch (OMP_CLAUSE_CODE (clause))
1962	{
1963	case OMP_CLAUSE_REDUCTION:
1964	case OMP_CLAUSE_IN_REDUCTION:
1965	case OMP_CLAUSE_TASK_REDUCTION:
1966	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1967	    need_stmts = true;
1968	  if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1969	    {
1970	      pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1971	      if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1972		pdecl = &TREE_OPERAND (*pdecl, 0);
1973	      if (TREE_CODE (*pdecl) == INDIRECT_REF
1974		  || TREE_CODE (*pdecl) == ADDR_EXPR)
1975		pdecl = &TREE_OPERAND (*pdecl, 0);
1976	    }
1977	  goto do_decl_clause;
1978
1979	case OMP_CLAUSE_LASTPRIVATE:
1980	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1981	    need_stmts = true;
1982	  goto do_decl_clause;
1983
1984	case OMP_CLAUSE_LINEAR:
1985	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1986	    need_stmts = true;
1987	  wi->val_only = true;
1988	  wi->is_lhs = false;
1989	  convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1990				      wi);
1991	  goto do_decl_clause;
1992
1993	case OMP_CLAUSE_PRIVATE:
1994	case OMP_CLAUSE_FIRSTPRIVATE:
1995	case OMP_CLAUSE_COPYPRIVATE:
1996	case OMP_CLAUSE_SHARED:
1997	case OMP_CLAUSE_TO_DECLARE:
1998	case OMP_CLAUSE_LINK:
1999	case OMP_CLAUSE_USE_DEVICE_PTR:
2000	case OMP_CLAUSE_USE_DEVICE_ADDR:
2001	case OMP_CLAUSE_IS_DEVICE_PTR:
2002	do_decl_clause:
2003	  if (pdecl == NULL)
2004	    pdecl = &OMP_CLAUSE_DECL (clause);
2005	  decl = *pdecl;
2006	  if (VAR_P (decl)
2007	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2008	    break;
2009	  if (decl_function_context (decl) == info->context
2010	      && !use_pointer_in_frame (decl))
2011	    {
2012	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2013	      if (field)
2014		{
2015		  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2016		    OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2017		  bitmap_set_bit (new_suppress, DECL_UID (decl));
2018		  *pdecl = get_local_debug_decl (info, decl, field);
2019		  need_frame = true;
2020		}
2021	    }
2022	  break;
2023
2024	case OMP_CLAUSE_SCHEDULE:
2025	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2026	    break;
2027	  /* FALLTHRU */
2028	case OMP_CLAUSE_FINAL:
2029	case OMP_CLAUSE_IF:
2030	case OMP_CLAUSE_NUM_THREADS:
2031	case OMP_CLAUSE_DEPEND:
2032	case OMP_CLAUSE_DEVICE:
2033	case OMP_CLAUSE_NUM_TEAMS:
2034	case OMP_CLAUSE_THREAD_LIMIT:
2035	case OMP_CLAUSE_SAFELEN:
2036	case OMP_CLAUSE_SIMDLEN:
2037	case OMP_CLAUSE_PRIORITY:
2038	case OMP_CLAUSE_GRAINSIZE:
2039	case OMP_CLAUSE_NUM_TASKS:
2040	case OMP_CLAUSE_HINT:
2041	case OMP_CLAUSE_NUM_GANGS:
2042	case OMP_CLAUSE_NUM_WORKERS:
2043	case OMP_CLAUSE_VECTOR_LENGTH:
2044	case OMP_CLAUSE_GANG:
2045	case OMP_CLAUSE_WORKER:
2046	case OMP_CLAUSE_VECTOR:
2047	case OMP_CLAUSE_ASYNC:
2048	case OMP_CLAUSE_WAIT:
2049	  /* Several OpenACC clauses have optional arguments.  Check if they
2050	     are present.  */
2051	  if (OMP_CLAUSE_OPERAND (clause, 0))
2052	    {
2053	      wi->val_only = true;
2054	      wi->is_lhs = false;
2055	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2056					  &dummy, wi);
2057	    }
2058
2059	  /* The gang clause accepts two arguments.  */
2060	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2061	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2062	    {
2063		wi->val_only = true;
2064		wi->is_lhs = false;
2065		convert_nonlocal_reference_op
2066		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2067	    }
2068	  break;
2069
2070	case OMP_CLAUSE_DIST_SCHEDULE:
2071	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2072	    {
2073	      wi->val_only = true;
2074	      wi->is_lhs = false;
2075	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2076					  &dummy, wi);
2077	    }
2078	  break;
2079
2080	case OMP_CLAUSE_MAP:
2081	case OMP_CLAUSE_TO:
2082	case OMP_CLAUSE_FROM:
2083	  if (OMP_CLAUSE_SIZE (clause))
2084	    {
2085	      wi->val_only = true;
2086	      wi->is_lhs = false;
2087	      convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2088					  &dummy, wi);
2089	    }
2090	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
2091	    goto do_decl_clause;
2092	  wi->val_only = true;
2093	  wi->is_lhs = false;
2094	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2095		     wi, NULL);
2096	  break;
2097
2098	case OMP_CLAUSE_ALIGNED:
2099	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2100	    {
2101	      wi->val_only = true;
2102	      wi->is_lhs = false;
2103	      convert_local_reference_op
2104		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2105	    }
2106	  /* FALLTHRU */
2107	case OMP_CLAUSE_NONTEMPORAL:
2108	  /* Like do_decl_clause, but don't add any suppression.  */
2109	  decl = OMP_CLAUSE_DECL (clause);
2110	  if (VAR_P (decl)
2111	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2112	    break;
2113	  if (decl_function_context (decl) == info->context
2114	      && !use_pointer_in_frame (decl))
2115	    {
2116	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2117	      if (field)
2118		{
2119		  OMP_CLAUSE_DECL (clause)
2120		    = get_local_debug_decl (info, decl, field);
2121		  need_frame = true;
2122		}
2123	    }
2124	  break;
2125
2126	case OMP_CLAUSE_NOWAIT:
2127	case OMP_CLAUSE_ORDERED:
2128	case OMP_CLAUSE_DEFAULT:
2129	case OMP_CLAUSE_COPYIN:
2130	case OMP_CLAUSE_COLLAPSE:
2131	case OMP_CLAUSE_TILE:
2132	case OMP_CLAUSE_UNTIED:
2133	case OMP_CLAUSE_MERGEABLE:
2134	case OMP_CLAUSE_PROC_BIND:
2135	case OMP_CLAUSE_NOGROUP:
2136	case OMP_CLAUSE_THREADS:
2137	case OMP_CLAUSE_SIMD:
2138	case OMP_CLAUSE_DEFAULTMAP:
2139	case OMP_CLAUSE_ORDER:
2140	case OMP_CLAUSE_SEQ:
2141	case OMP_CLAUSE_INDEPENDENT:
2142	case OMP_CLAUSE_AUTO:
2143	case OMP_CLAUSE_IF_PRESENT:
2144	case OMP_CLAUSE_FINALIZE:
2145	case OMP_CLAUSE__CONDTEMP_:
2146	case OMP_CLAUSE__SCANTEMP_:
2147	  break;
2148
2149	  /* The following clause belongs to the OpenACC cache directive, which
2150	     is discarded during gimplification.  */
2151	case OMP_CLAUSE__CACHE_:
2152	  /* The following clauses are only allowed in the OpenMP declare simd
2153	     directive, so not seen here.  */
2154	case OMP_CLAUSE_UNIFORM:
2155	case OMP_CLAUSE_INBRANCH:
2156	case OMP_CLAUSE_NOTINBRANCH:
2157	  /* The following clauses are only allowed on OpenMP cancel and
2158	     cancellation point directives, which at this point have already
2159	     been lowered into a function call.  */
2160	case OMP_CLAUSE_FOR:
2161	case OMP_CLAUSE_PARALLEL:
2162	case OMP_CLAUSE_SECTIONS:
2163	case OMP_CLAUSE_TASKGROUP:
2164	  /* The following clauses are only added during OMP lowering; nested
2165	     function decomposition happens before that.  */
2166	case OMP_CLAUSE__LOOPTEMP_:
2167	case OMP_CLAUSE__REDUCTEMP_:
2168	case OMP_CLAUSE__SIMDUID_:
2169	case OMP_CLAUSE__GRIDDIM_:
2170	case OMP_CLAUSE__SIMT_:
2171	  /* Anything else.  */
2172	default:
2173	  gcc_unreachable ();
2174	}
2175    }
2176
2177  info->suppress_expansion = new_suppress;
2178
2179  if (need_stmts)
2180    for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2181      switch (OMP_CLAUSE_CODE (clause))
2182	{
2183	case OMP_CLAUSE_REDUCTION:
2184	case OMP_CLAUSE_IN_REDUCTION:
2185	case OMP_CLAUSE_TASK_REDUCTION:
2186	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2187	    {
2188	      tree old_context
2189		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2190	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2191		= info->context;
2192	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2193		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2194		  = info->context;
2195	      walk_body (convert_local_reference_stmt,
2196			 convert_local_reference_op, info,
2197			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2198	      walk_body (convert_local_reference_stmt,
2199			 convert_local_reference_op, info,
2200			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2201	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2202		= old_context;
2203	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2204		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2205		  = old_context;
2206	    }
2207	  break;
2208
2209	case OMP_CLAUSE_LASTPRIVATE:
2210	  walk_body (convert_local_reference_stmt,
2211		     convert_local_reference_op, info,
2212		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2213	  break;
2214
2215	case OMP_CLAUSE_LINEAR:
2216	  walk_body (convert_local_reference_stmt,
2217		     convert_local_reference_op, info,
2218		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2219	  break;
2220
2221	default:
2222	  break;
2223	}
2224
2225  return need_frame;
2226}
2227
2228
2229/* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2230   and PARM_DECLs that were referenced by inner nested functions.
2231   The rewrite will be a structure reference to the local frame variable.  */
2232
2233static tree
2234convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2235			      struct walk_stmt_info *wi)
2236{
2237  struct nesting_info *info = (struct nesting_info *) wi->info;
2238  tree save_local_var_chain;
2239  bitmap save_suppress;
2240  char save_static_chain_added;
2241  bool frame_decl_added;
2242  gimple *stmt = gsi_stmt (*gsi);
2243
2244  switch (gimple_code (stmt))
2245    {
2246    case GIMPLE_OMP_TEAMS:
2247      if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2248	{
2249	  save_suppress = info->suppress_expansion;
2250	  convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2251	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2252		     info, gimple_omp_body_ptr (stmt));
2253	  info->suppress_expansion = save_suppress;
2254	  break;
2255	}
2256      /* FALLTHRU */
2257
2258    case GIMPLE_OMP_PARALLEL:
2259    case GIMPLE_OMP_TASK:
2260      save_suppress = info->suppress_expansion;
2261      frame_decl_added = false;
2262      if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2263	                             wi))
2264	{
2265	  tree c = build_omp_clause (gimple_location (stmt),
2266				     OMP_CLAUSE_SHARED);
2267	  (void) get_frame_type (info);
2268	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2269	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2270	  gimple_omp_taskreg_set_clauses (stmt, c);
2271	  info->static_chain_added |= 4;
2272	  frame_decl_added = true;
2273	}
2274
2275      save_local_var_chain = info->new_local_var_chain;
2276      save_static_chain_added = info->static_chain_added;
2277      info->new_local_var_chain = NULL;
2278      info->static_chain_added = 0;
2279
2280      walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2281	         gimple_omp_body_ptr (stmt));
2282
2283      if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2284	{
2285	  tree c = build_omp_clause (gimple_location (stmt),
2286				     OMP_CLAUSE_SHARED);
2287	  (void) get_frame_type (info);
2288	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2289	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2290	  info->static_chain_added |= 4;
2291	  gimple_omp_taskreg_set_clauses (stmt, c);
2292	}
2293      if (info->new_local_var_chain)
2294	declare_vars (info->new_local_var_chain,
2295		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2296      info->new_local_var_chain = save_local_var_chain;
2297      info->suppress_expansion = save_suppress;
2298      info->static_chain_added |= save_static_chain_added;
2299      break;
2300
2301    case GIMPLE_OMP_FOR:
2302      save_suppress = info->suppress_expansion;
2303      convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2304      walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2305			   convert_local_reference_stmt,
2306			   convert_local_reference_op, info);
2307      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2308		 info, gimple_omp_body_ptr (stmt));
2309      info->suppress_expansion = save_suppress;
2310      break;
2311
2312    case GIMPLE_OMP_SECTIONS:
2313      save_suppress = info->suppress_expansion;
2314      convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2315      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2316		 info, gimple_omp_body_ptr (stmt));
2317      info->suppress_expansion = save_suppress;
2318      break;
2319
2320    case GIMPLE_OMP_SINGLE:
2321      save_suppress = info->suppress_expansion;
2322      convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2323      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2324		 info, gimple_omp_body_ptr (stmt));
2325      info->suppress_expansion = save_suppress;
2326      break;
2327
2328    case GIMPLE_OMP_TASKGROUP:
2329      save_suppress = info->suppress_expansion;
2330      convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2331      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2332		 info, gimple_omp_body_ptr (stmt));
2333      info->suppress_expansion = save_suppress;
2334      break;
2335
2336    case GIMPLE_OMP_TARGET:
2337      if (!is_gimple_omp_offloaded (stmt))
2338	{
2339	  save_suppress = info->suppress_expansion;
2340	  convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2341	  info->suppress_expansion = save_suppress;
2342	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2343		     info, gimple_omp_body_ptr (stmt));
2344	  break;
2345	}
2346      save_suppress = info->suppress_expansion;
2347      frame_decl_added = false;
2348      if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2349	{
2350	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2351	  (void) get_frame_type (info);
2352	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2353	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2354	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2355	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2356	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2357	  info->static_chain_added |= 4;
2358	  frame_decl_added = true;
2359	}
2360
2361      save_local_var_chain = info->new_local_var_chain;
2362      save_static_chain_added = info->static_chain_added;
2363      info->new_local_var_chain = NULL;
2364      info->static_chain_added = 0;
2365
2366      walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2367		 gimple_omp_body_ptr (stmt));
2368
2369      if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2370	{
2371	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2372	  (void) get_frame_type (info);
2373	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2374	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2375	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2376	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2377	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2378	  info->static_chain_added |= 4;
2379	}
2380
2381      if (info->new_local_var_chain)
2382	declare_vars (info->new_local_var_chain,
2383		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2384      info->new_local_var_chain = save_local_var_chain;
2385      info->suppress_expansion = save_suppress;
2386      info->static_chain_added |= save_static_chain_added;
2387      break;
2388
2389    case GIMPLE_OMP_SECTION:
2390    case GIMPLE_OMP_MASTER:
2391    case GIMPLE_OMP_ORDERED:
2392    case GIMPLE_OMP_SCAN:
2393      walk_body (convert_local_reference_stmt, convert_local_reference_op,
2394		 info, gimple_omp_body_ptr (stmt));
2395      break;
2396
2397    case GIMPLE_COND:
2398      wi->val_only = true;
2399      wi->is_lhs = false;
2400      *handled_ops_p = false;
2401      return NULL_TREE;
2402
2403    case GIMPLE_ASSIGN:
2404      if (gimple_clobber_p (stmt))
2405	{
2406	  tree lhs = gimple_assign_lhs (stmt);
2407	  if (DECL_P (lhs)
2408	      && decl_function_context (lhs) == info->context
2409	      && !use_pointer_in_frame (lhs)
2410	      && lookup_field_for_decl (info, lhs, NO_INSERT))
2411	    {
2412	      gsi_replace (gsi, gimple_build_nop (), true);
2413	      break;
2414	    }
2415	}
2416      *handled_ops_p = false;
2417      return NULL_TREE;
2418
2419    case GIMPLE_BIND:
2420      for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2421	   var;
2422	   var = DECL_CHAIN (var))
2423	if (TREE_CODE (var) == NAMELIST_DECL)
2424	  {
2425	    /* Adjust decls mentioned in NAMELIST_DECL.  */
2426	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2427	    tree decl;
2428	    unsigned int i;
2429
2430	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2431	      {
2432		if (VAR_P (decl)
2433		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2434		  continue;
2435		if (decl_function_context (decl) == info->context
2436		    && !use_pointer_in_frame (decl))
2437		  {
2438		    tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2439		    if (field)
2440		      {
2441			CONSTRUCTOR_ELT (decls, i)->value
2442			  = get_local_debug_decl (info, decl, field);
2443		      }
2444		  }
2445	      }
2446	  }
2447
2448      *handled_ops_p = false;
2449      return NULL_TREE;
2450
2451    default:
2452      /* For every other statement that we are not interested in
2453	 handling here, let the walker traverse the operands.  */
2454      *handled_ops_p = false;
2455      return NULL_TREE;
2456    }
2457
2458  /* Indicate that we have handled all the operands ourselves.  */
2459  *handled_ops_p = true;
2460  return NULL_TREE;
2461}
2462
2463
2464/* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2465   that reference labels from outer functions.  The rewrite will be a
2466   call to __builtin_nonlocal_goto.  */
2467
2468static tree
2469convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2470			   struct walk_stmt_info *wi)
2471{
2472  struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2473  tree label, new_label, target_context, x, field;
2474  gcall *call;
2475  gimple *stmt = gsi_stmt (*gsi);
2476
2477  if (gimple_code (stmt) != GIMPLE_GOTO)
2478    {
2479      *handled_ops_p = false;
2480      return NULL_TREE;
2481    }
2482
2483  label = gimple_goto_dest (stmt);
2484  if (TREE_CODE (label) != LABEL_DECL)
2485    {
2486      *handled_ops_p = false;
2487      return NULL_TREE;
2488    }
2489
2490  target_context = decl_function_context (label);
2491  if (target_context == info->context)
2492    {
2493      *handled_ops_p = false;
2494      return NULL_TREE;
2495    }
2496
2497  for (i = info->outer; target_context != i->context; i = i->outer)
2498    continue;
2499
2500  /* The original user label may also be use for a normal goto, therefore
2501     we must create a new label that will actually receive the abnormal
2502     control transfer.  This new label will be marked LABEL_NONLOCAL; this
2503     mark will trigger proper behavior in the cfg, as well as cause the
2504     (hairy target-specific) non-local goto receiver code to be generated
2505     when we expand rtl.  Enter this association into var_map so that we
2506     can insert the new label into the IL during a second pass.  */
2507  tree *slot = &i->var_map->get_or_insert (label);
2508  if (*slot == NULL)
2509    {
2510      new_label = create_artificial_label (UNKNOWN_LOCATION);
2511      DECL_NONLOCAL (new_label) = 1;
2512      *slot = new_label;
2513    }
2514  else
2515    new_label = *slot;
2516
2517  /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
2518  field = get_nl_goto_field (i);
2519  x = get_frame_field (info, target_context, field, gsi);
2520  x = build_addr (x);
2521  x = gsi_gimplify_val (info, x, gsi);
2522  call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2523			    2, build_addr (new_label), x);
2524  gsi_replace (gsi, call, false);
2525
2526  /* We have handled all of STMT's operands, no need to keep going.  */
2527  *handled_ops_p = true;
2528  return NULL_TREE;
2529}
2530
2531
2532/* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2533   are referenced via nonlocal goto from a nested function.  The rewrite
2534   will involve installing a newly generated DECL_NONLOCAL label, and
2535   (potentially) a branch around the rtl gunk that is assumed to be
2536   attached to such a label.  */
2537
2538static tree
2539convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2540			  struct walk_stmt_info *wi)
2541{
2542  struct nesting_info *const info = (struct nesting_info *) wi->info;
2543  tree label, new_label;
2544  gimple_stmt_iterator tmp_gsi;
2545  glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2546
2547  if (!stmt)
2548    {
2549      *handled_ops_p = false;
2550      return NULL_TREE;
2551    }
2552
2553  label = gimple_label_label (stmt);
2554
2555  tree *slot = info->var_map->get (label);
2556  if (!slot)
2557    {
2558      *handled_ops_p = false;
2559      return NULL_TREE;
2560    }
2561
2562  /* If there's any possibility that the previous statement falls through,
2563     then we must branch around the new non-local label.  */
2564  tmp_gsi = wi->gsi;
2565  gsi_prev (&tmp_gsi);
2566  if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2567    {
2568      gimple *stmt = gimple_build_goto (label);
2569      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2570    }
2571
2572  new_label = (tree) *slot;
2573  stmt = gimple_build_label (new_label);
2574  gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2575
2576  *handled_ops_p = true;
2577  return NULL_TREE;
2578}
2579
2580
2581/* Called via walk_function+walk_stmt, rewrite all references to addresses
2582   of nested functions that require the use of trampolines.  The rewrite
2583   will involve a reference a trampoline generated for the occasion.  */
2584
2585static tree
2586convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2587{
2588  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2589  struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2590  tree t = *tp, decl, target_context, x, builtin;
2591  bool descr;
2592  gcall *call;
2593
2594  *walk_subtrees = 0;
2595  switch (TREE_CODE (t))
2596    {
2597    case ADDR_EXPR:
2598      /* Build
2599	   T.1 = &CHAIN->tramp;
2600	   T.2 = __builtin_adjust_trampoline (T.1);
2601	   T.3 = (func_type)T.2;
2602      */
2603
2604      decl = TREE_OPERAND (t, 0);
2605      if (TREE_CODE (decl) != FUNCTION_DECL)
2606	break;
2607
2608      /* Only need to process nested functions.  */
2609      target_context = decl_function_context (decl);
2610      if (!target_context)
2611	break;
2612
2613      /* If the nested function doesn't use a static chain, then
2614	 it doesn't need a trampoline.  */
2615      if (!DECL_STATIC_CHAIN (decl))
2616	break;
2617
2618      /* If we don't want a trampoline, then don't build one.  */
2619      if (TREE_NO_TRAMPOLINE (t))
2620	break;
2621
2622      /* Lookup the immediate parent of the callee, as that's where
2623	 we need to insert the trampoline.  */
2624      for (i = info; i->context != target_context; i = i->outer)
2625	continue;
2626
2627      /* Decide whether to generate a descriptor or a trampoline. */
2628      descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2629
2630      if (descr)
2631	x = lookup_descr_for_decl (i, decl, INSERT);
2632      else
2633	x = lookup_tramp_for_decl (i, decl, INSERT);
2634
2635      /* Compute the address of the field holding the trampoline.  */
2636      x = get_frame_field (info, target_context, x, &wi->gsi);
2637      x = build_addr (x);
2638      x = gsi_gimplify_val (info, x, &wi->gsi);
2639
2640      /* Do machine-specific ugliness.  Normally this will involve
2641	 computing extra alignment, but it can really be anything.  */
2642      if (descr)
2643	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2644      else
2645	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2646      call = gimple_build_call (builtin, 1, x);
2647      x = init_tmp_var_with_call (info, &wi->gsi, call);
2648
2649      /* Cast back to the proper function type.  */
2650      x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2651      x = init_tmp_var (info, x, &wi->gsi);
2652
2653      *tp = x;
2654      break;
2655
2656    default:
2657      if (!IS_TYPE_OR_DECL_P (t))
2658	*walk_subtrees = 1;
2659      break;
2660    }
2661
2662  return NULL_TREE;
2663}
2664
2665
2666/* Called via walk_function+walk_gimple_stmt, rewrite all references
2667   to addresses of nested functions that require the use of
2668   trampolines.  The rewrite will involve a reference a trampoline
2669   generated for the occasion.  */
2670
2671static tree
2672convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2673			      struct walk_stmt_info *wi)
2674{
2675  struct nesting_info *info = (struct nesting_info *) wi->info;
2676  gimple *stmt = gsi_stmt (*gsi);
2677
2678  switch (gimple_code (stmt))
2679    {
2680    case GIMPLE_CALL:
2681      {
2682	/* Only walk call arguments, lest we generate trampolines for
2683	   direct calls.  */
2684	unsigned long i, nargs = gimple_call_num_args (stmt);
2685	for (i = 0; i < nargs; i++)
2686	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2687		     wi, NULL);
2688	break;
2689      }
2690
2691    case GIMPLE_OMP_TEAMS:
2692      if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2693	{
2694	  *handled_ops_p = false;
2695	  return NULL_TREE;
2696	}
2697      goto do_parallel;
2698
2699    case GIMPLE_OMP_TARGET:
2700      if (!is_gimple_omp_offloaded (stmt))
2701	{
2702	  *handled_ops_p = false;
2703	  return NULL_TREE;
2704	}
2705      /* FALLTHRU */
2706    case GIMPLE_OMP_PARALLEL:
2707    case GIMPLE_OMP_TASK:
2708    do_parallel:
2709      {
2710	tree save_local_var_chain = info->new_local_var_chain;
2711        walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2712	info->new_local_var_chain = NULL;
2713	char save_static_chain_added = info->static_chain_added;
2714	info->static_chain_added = 0;
2715        walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2716		   info, gimple_omp_body_ptr (stmt));
2717	if (info->new_local_var_chain)
2718	  declare_vars (info->new_local_var_chain,
2719			gimple_seq_first_stmt (gimple_omp_body (stmt)),
2720			false);
2721	for (int i = 0; i < 2; i++)
2722	  {
2723	    tree c, decl;
2724	    if ((info->static_chain_added & (1 << i)) == 0)
2725	      continue;
2726	    decl = i ? get_chain_decl (info) : info->frame_decl;
2727	    /* Don't add CHAIN.* or FRAME.* twice.  */
2728	    for (c = gimple_omp_taskreg_clauses (stmt);
2729		 c;
2730		 c = OMP_CLAUSE_CHAIN (c))
2731	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2732		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2733		  && OMP_CLAUSE_DECL (c) == decl)
2734		break;
2735	    if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2736	      {
2737		c = build_omp_clause (gimple_location (stmt),
2738				      i ? OMP_CLAUSE_FIRSTPRIVATE
2739				      : OMP_CLAUSE_SHARED);
2740		OMP_CLAUSE_DECL (c) = decl;
2741		OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2742		gimple_omp_taskreg_set_clauses (stmt, c);
2743	      }
2744	    else if (c == NULL)
2745	      {
2746		c = build_omp_clause (gimple_location (stmt),
2747				      OMP_CLAUSE_MAP);
2748		OMP_CLAUSE_DECL (c) = decl;
2749		OMP_CLAUSE_SET_MAP_KIND (c,
2750					 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2751		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2752		OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2753		gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2754					       c);
2755	      }
2756	  }
2757	info->new_local_var_chain = save_local_var_chain;
2758	info->static_chain_added |= save_static_chain_added;
2759      }
2760      break;
2761
2762    default:
2763      *handled_ops_p = false;
2764      return NULL_TREE;
2765    }
2766
2767  *handled_ops_p = true;
2768  return NULL_TREE;
2769}
2770
2771
2772
2773/* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2774   that reference nested functions to make sure that the static chain
2775   is set up properly for the call.  */
2776
2777static tree
2778convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2779                     struct walk_stmt_info *wi)
2780{
2781  struct nesting_info *const info = (struct nesting_info *) wi->info;
2782  tree decl, target_context;
2783  char save_static_chain_added;
2784  int i;
2785  gimple *stmt = gsi_stmt (*gsi);
2786
2787  switch (gimple_code (stmt))
2788    {
2789    case GIMPLE_CALL:
2790      if (gimple_call_chain (stmt))
2791	break;
2792      decl = gimple_call_fndecl (stmt);
2793      if (!decl)
2794	break;
2795      target_context = decl_function_context (decl);
2796      if (target_context && DECL_STATIC_CHAIN (decl))
2797	{
2798	  struct nesting_info *i = info;
2799	  while (i && i->context != target_context)
2800	    i = i->outer;
2801	  /* If none of the outer contexts is the target context, this means
2802	     that the function is called in a wrong context.  */
2803	  if (!i)
2804	    internal_error ("%s from %s called in %s",
2805			    IDENTIFIER_POINTER (DECL_NAME (decl)),
2806			    IDENTIFIER_POINTER (DECL_NAME (target_context)),
2807			    IDENTIFIER_POINTER (DECL_NAME (info->context)));
2808
2809	  gimple_call_set_chain (as_a <gcall *> (stmt),
2810				 get_static_chain (info, target_context,
2811						   &wi->gsi));
2812	  info->static_chain_added |= (1 << (info->context != target_context));
2813	}
2814      break;
2815
2816    case GIMPLE_OMP_TEAMS:
2817      if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2818	{
2819	  walk_body (convert_gimple_call, NULL, info,
2820		     gimple_omp_body_ptr (stmt));
2821	  break;
2822	}
2823      /* FALLTHRU */
2824
2825    case GIMPLE_OMP_PARALLEL:
2826    case GIMPLE_OMP_TASK:
2827      save_static_chain_added = info->static_chain_added;
2828      info->static_chain_added = 0;
2829      walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2830      for (i = 0; i < 2; i++)
2831	{
2832	  tree c, decl;
2833	  if ((info->static_chain_added & (1 << i)) == 0)
2834	    continue;
2835	  decl = i ? get_chain_decl (info) : info->frame_decl;
2836	  /* Don't add CHAIN.* or FRAME.* twice.  */
2837	  for (c = gimple_omp_taskreg_clauses (stmt);
2838	       c;
2839	       c = OMP_CLAUSE_CHAIN (c))
2840	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2841		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2842		&& OMP_CLAUSE_DECL (c) == decl)
2843	      break;
2844	  if (c == NULL)
2845	    {
2846	      c = build_omp_clause (gimple_location (stmt),
2847				    i ? OMP_CLAUSE_FIRSTPRIVATE
2848				    : OMP_CLAUSE_SHARED);
2849	      OMP_CLAUSE_DECL (c) = decl;
2850	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2851	      gimple_omp_taskreg_set_clauses (stmt, c);
2852	    }
2853	}
2854      info->static_chain_added |= save_static_chain_added;
2855      break;
2856
2857    case GIMPLE_OMP_TARGET:
2858      if (!is_gimple_omp_offloaded (stmt))
2859	{
2860	  walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2861	  break;
2862	}
2863      save_static_chain_added = info->static_chain_added;
2864      info->static_chain_added = 0;
2865      walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2866      for (i = 0; i < 2; i++)
2867	{
2868	  tree c, decl;
2869	  if ((info->static_chain_added & (1 << i)) == 0)
2870	    continue;
2871	  decl = i ? get_chain_decl (info) : info->frame_decl;
2872	  /* Don't add CHAIN.* or FRAME.* twice.  */
2873	  for (c = gimple_omp_target_clauses (stmt);
2874	       c;
2875	       c = OMP_CLAUSE_CHAIN (c))
2876	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2877		&& OMP_CLAUSE_DECL (c) == decl)
2878	      break;
2879	  if (c == NULL)
2880	    {
2881	      c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2882	      OMP_CLAUSE_DECL (c) = decl;
2883	      OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2884	      OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2885	      OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2886	      gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2887					     c);
2888	    }
2889	}
2890      info->static_chain_added |= save_static_chain_added;
2891      break;
2892
2893    case GIMPLE_OMP_FOR:
2894      walk_body (convert_gimple_call, NULL, info,
2895	  	 gimple_omp_for_pre_body_ptr (stmt));
2896      /* FALLTHRU */
2897    case GIMPLE_OMP_SECTIONS:
2898    case GIMPLE_OMP_SECTION:
2899    case GIMPLE_OMP_SINGLE:
2900    case GIMPLE_OMP_MASTER:
2901    case GIMPLE_OMP_TASKGROUP:
2902    case GIMPLE_OMP_ORDERED:
2903    case GIMPLE_OMP_SCAN:
2904    case GIMPLE_OMP_CRITICAL:
2905      walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2906      break;
2907
2908    default:
2909      /* Keep looking for other operands.  */
2910      *handled_ops_p = false;
2911      return NULL_TREE;
2912    }
2913
2914  *handled_ops_p = true;
2915  return NULL_TREE;
2916}
2917
2918/* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2919   call expressions.  At the same time, determine if a nested function
2920   actually uses its static chain; if not, remember that.  */
2921
2922static void
2923convert_all_function_calls (struct nesting_info *root)
2924{
2925  unsigned int chain_count = 0, old_chain_count, iter_count;
2926  struct nesting_info *n;
2927
2928  /* First, optimistically clear static_chain for all decls that haven't
2929     used the static chain already for variable access.  But always create
2930     it if not optimizing.  This makes it possible to reconstruct the static
2931     nesting tree at run time and thus to resolve up-level references from
2932     within the debugger.  */
2933  FOR_EACH_NEST_INFO (n, root)
2934    {
2935      if (n->thunk_p)
2936	continue;
2937      tree decl = n->context;
2938      if (!optimize)
2939	{
2940	  if (n->inner)
2941	    (void) get_frame_type (n);
2942	  if (n->outer)
2943	    (void) get_chain_decl (n);
2944	}
2945      else if (!n->outer || (!n->chain_decl && !n->chain_field))
2946	{
2947	  DECL_STATIC_CHAIN (decl) = 0;
2948	  if (dump_file && (dump_flags & TDF_DETAILS))
2949	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2950		     lang_hooks.decl_printable_name (decl, 2));
2951	}
2952      else
2953	DECL_STATIC_CHAIN (decl) = 1;
2954      chain_count += DECL_STATIC_CHAIN (decl);
2955    }
2956
2957  FOR_EACH_NEST_INFO (n, root)
2958    if (n->thunk_p)
2959      {
2960	tree decl = n->context;
2961	tree alias = cgraph_node::get (decl)->thunk.alias;
2962	DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2963      }
2964
2965  /* Walk the functions and perform transformations.  Note that these
2966     transformations can induce new uses of the static chain, which in turn
2967     require re-examining all users of the decl.  */
2968  /* ??? It would make sense to try to use the call graph to speed this up,
2969     but the call graph hasn't really been built yet.  Even if it did, we
2970     would still need to iterate in this loop since address-of references
2971     wouldn't show up in the callgraph anyway.  */
2972  iter_count = 0;
2973  do
2974    {
2975      old_chain_count = chain_count;
2976      chain_count = 0;
2977      iter_count++;
2978
2979      if (dump_file && (dump_flags & TDF_DETAILS))
2980	fputc ('\n', dump_file);
2981
2982      FOR_EACH_NEST_INFO (n, root)
2983	{
2984	  if (n->thunk_p)
2985	    continue;
2986	  tree decl = n->context;
2987	  walk_function (convert_tramp_reference_stmt,
2988			 convert_tramp_reference_op, n);
2989	  walk_function (convert_gimple_call, NULL, n);
2990	  chain_count += DECL_STATIC_CHAIN (decl);
2991	}
2992
2993      FOR_EACH_NEST_INFO (n, root)
2994	if (n->thunk_p)
2995	  {
2996	    tree decl = n->context;
2997	    tree alias = cgraph_node::get (decl)->thunk.alias;
2998	    DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2999	  }
3000    }
3001  while (chain_count != old_chain_count);
3002
3003  if (dump_file && (dump_flags & TDF_DETAILS))
3004    fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3005	     iter_count);
3006}
3007
3008struct nesting_copy_body_data
3009{
3010  copy_body_data cb;
3011  struct nesting_info *root;
3012};
3013
3014/* A helper subroutine for debug_var_chain type remapping.  */
3015
3016static tree
3017nesting_copy_decl (tree decl, copy_body_data *id)
3018{
3019  struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3020  tree *slot = nid->root->var_map->get (decl);
3021
3022  if (slot)
3023    return (tree) *slot;
3024
3025  if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3026    {
3027      tree new_decl = copy_decl_no_change (decl, id);
3028      DECL_ORIGINAL_TYPE (new_decl)
3029	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
3030      return new_decl;
3031    }
3032
3033  if (VAR_P (decl)
3034      || TREE_CODE (decl) == PARM_DECL
3035      || TREE_CODE (decl) == RESULT_DECL)
3036    return decl;
3037
3038  return copy_decl_no_change (decl, id);
3039}
3040
3041/* A helper function for remap_vla_decls.  See if *TP contains
3042   some remapped variables.  */
3043
3044static tree
3045contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3046{
3047  struct nesting_info *root = (struct nesting_info *) data;
3048  tree t = *tp;
3049
3050  if (DECL_P (t))
3051    {
3052      *walk_subtrees = 0;
3053      tree *slot = root->var_map->get (t);
3054
3055      if (slot)
3056	return *slot;
3057    }
3058  return NULL;
3059}
3060
3061/* Remap VLA decls in BLOCK and subblocks if remapped variables are
3062   involved.  */
3063
3064static void
3065remap_vla_decls (tree block, struct nesting_info *root)
3066{
3067  tree var, subblock, val, type;
3068  struct nesting_copy_body_data id;
3069
3070  for (subblock = BLOCK_SUBBLOCKS (block);
3071       subblock;
3072       subblock = BLOCK_CHAIN (subblock))
3073    remap_vla_decls (subblock, root);
3074
3075  for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3076    if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3077      {
3078	val = DECL_VALUE_EXPR (var);
3079	type = TREE_TYPE (var);
3080
3081	if (!(TREE_CODE (val) == INDIRECT_REF
3082	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3083	      && variably_modified_type_p (type, NULL)))
3084	  continue;
3085
3086	if (root->var_map->get (TREE_OPERAND (val, 0))
3087	    || walk_tree (&type, contains_remapped_vars, root, NULL))
3088	  break;
3089      }
3090
3091  if (var == NULL_TREE)
3092    return;
3093
3094  memset (&id, 0, sizeof (id));
3095  id.cb.copy_decl = nesting_copy_decl;
3096  id.cb.decl_map = new hash_map<tree, tree>;
3097  id.root = root;
3098
3099  for (; var; var = DECL_CHAIN (var))
3100    if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3101      {
3102	struct nesting_info *i;
3103	tree newt, context;
3104
3105	val = DECL_VALUE_EXPR (var);
3106	type = TREE_TYPE (var);
3107
3108	if (!(TREE_CODE (val) == INDIRECT_REF
3109	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3110	      && variably_modified_type_p (type, NULL)))
3111	  continue;
3112
3113	tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3114	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3115	  continue;
3116
3117	context = decl_function_context (var);
3118	for (i = root; i; i = i->outer)
3119	  if (i->context == context)
3120	    break;
3121
3122	if (i == NULL)
3123	  continue;
3124
3125	/* Fully expand value expressions.  This avoids having debug variables
3126	   only referenced from them and that can be swept during GC.  */
3127        if (slot)
3128	  {
3129	    tree t = (tree) *slot;
3130	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3131	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3132	  }
3133
3134	id.cb.src_fn = i->context;
3135	id.cb.dst_fn = i->context;
3136	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3137
3138	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3139	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3140	  {
3141	    newt = TREE_TYPE (newt);
3142	    type = TREE_TYPE (type);
3143	  }
3144	if (TYPE_NAME (newt)
3145	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3146	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3147	    && newt != type
3148	    && TYPE_NAME (newt) == TYPE_NAME (type))
3149	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3150
3151	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3152	if (val != DECL_VALUE_EXPR (var))
3153	  SET_DECL_VALUE_EXPR (var, val);
3154      }
3155
3156  delete id.cb.decl_map;
3157}
3158
3159/* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3160   involved.  */
3161
3162static void
3163fixup_vla_decls (tree block)
3164{
3165  for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3166    if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3167      {
3168	tree val = DECL_VALUE_EXPR (var);
3169
3170	if (!(TREE_CODE (val) == INDIRECT_REF
3171	      && VAR_P (TREE_OPERAND (val, 0))
3172	      && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3173	  continue;
3174
3175	/* Fully expand value expressions.  This avoids having debug variables
3176	   only referenced from them and that can be swept during GC.  */
3177	val = build1 (INDIRECT_REF, TREE_TYPE (val),
3178		      DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3179	SET_DECL_VALUE_EXPR (var, val);
3180      }
3181
3182  for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3183    fixup_vla_decls (sub);
3184}
3185
3186/* Fold the MEM_REF *E.  */
3187bool
3188fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3189{
3190  tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3191  *ref_p = fold (*ref_p);
3192  return true;
3193}
3194
3195/* Given DECL, a nested function, build an initialization call for FIELD,
3196   the trampoline or descriptor for DECL, using FUNC as the function.  */
3197
3198static gcall *
3199build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3200		      tree func)
3201{
3202  tree arg1, arg2, arg3, x;
3203
3204  gcc_assert (DECL_STATIC_CHAIN (decl));
3205  arg3 = build_addr (info->frame_decl);
3206
3207  arg2 = build_addr (decl);
3208
3209  x = build3 (COMPONENT_REF, TREE_TYPE (field),
3210	      info->frame_decl, field, NULL_TREE);
3211  arg1 = build_addr (x);
3212
3213  return gimple_build_call (func, 3, arg1, arg2, arg3);
3214}
3215
3216/* Do "everything else" to clean up or complete state collected by the various
3217   walking passes -- create a field to hold the frame base address, lay out the
3218   types and decls, generate code to initialize the frame decl, store critical
3219   expressions in the struct function for rtl to find.  */
3220
3221static void
3222finalize_nesting_tree_1 (struct nesting_info *root)
3223{
3224  gimple_seq stmt_list = NULL;
3225  gimple *stmt;
3226  tree context = root->context;
3227  struct function *sf;
3228
3229  if (root->thunk_p)
3230    return;
3231
3232  /* If we created a non-local frame type or decl, we need to lay them
3233     out at this time.  */
3234  if (root->frame_type)
3235    {
3236      /* Debugging information needs to compute the frame base address of the
3237	 parent frame out of the static chain from the nested frame.
3238
3239	 The static chain is the address of the FRAME record, so one could
3240	 imagine it would be possible to compute the frame base address just
3241	 adding a constant offset to this address.  Unfortunately, this is not
3242	 possible: if the FRAME object has alignment constraints that are
3243	 stronger than the stack, then the offset between the frame base and
3244	 the FRAME object will be dynamic.
3245
3246	 What we do instead is to append a field to the FRAME object that holds
3247	 the frame base address: then debug info just has to fetch this
3248	 field.  */
3249
3250      /* Debugging information will refer to the CFA as the frame base
3251	 address: we will do the same here.  */
3252      const tree frame_addr_fndecl
3253        = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3254
3255      /* Create a field in the FRAME record to hold the frame base address for
3256	 this stack frame.  Since it will be used only by the debugger, put it
3257	 at the end of the record in order not to shift all other offsets.  */
3258      tree fb_decl = make_node (FIELD_DECL);
3259
3260      DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3261      TREE_TYPE (fb_decl) = ptr_type_node;
3262      TREE_ADDRESSABLE (fb_decl) = 1;
3263      DECL_CONTEXT (fb_decl) = root->frame_type;
3264      TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3265						fb_decl);
3266
3267      /* In some cases the frame type will trigger the -Wpadded warning.
3268	 This is not helpful; suppress it. */
3269      int save_warn_padded = warn_padded;
3270      warn_padded = 0;
3271      layout_type (root->frame_type);
3272      warn_padded = save_warn_padded;
3273      layout_decl (root->frame_decl, 0);
3274
3275      /* Initialize the frame base address field.  If the builtin we need is
3276	 not available, set it to NULL so that debugging information does not
3277	 reference junk.  */
3278      tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3279			    root->frame_decl, fb_decl, NULL_TREE);
3280      tree fb_tmp;
3281
3282      if (frame_addr_fndecl != NULL_TREE)
3283	{
3284	  gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3285						integer_zero_node);
3286	  gimple_stmt_iterator gsi = gsi_last (stmt_list);
3287
3288	  fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3289	}
3290      else
3291	fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3292      gimple_seq_add_stmt (&stmt_list,
3293			   gimple_build_assign (fb_ref, fb_tmp));
3294
3295      declare_vars (root->frame_decl,
3296		    gimple_seq_first_stmt (gimple_body (context)), true);
3297    }
3298
3299  /* If any parameters were referenced non-locally, then we need to insert
3300     a copy or a pointer.  */
3301  if (root->any_parm_remapped)
3302    {
3303      tree p;
3304      for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3305	{
3306	  tree field, x, y;
3307
3308	  field = lookup_field_for_decl (root, p, NO_INSERT);
3309	  if (!field)
3310	    continue;
3311
3312	  if (use_pointer_in_frame (p))
3313	    x = build_addr (p);
3314	  else
3315	    x = p;
3316
3317	  /* If the assignment is from a non-register the stmt is
3318	     not valid gimple.  Make it so by using a temporary instead.  */
3319	  if (!is_gimple_reg (x)
3320	      && is_gimple_reg_type (TREE_TYPE (x)))
3321	    {
3322	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
3323	      x = init_tmp_var (root, x, &gsi);
3324	    }
3325
3326	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
3327		      root->frame_decl, field, NULL_TREE);
3328	  stmt = gimple_build_assign (y, x);
3329	  gimple_seq_add_stmt (&stmt_list, stmt);
3330	}
3331    }
3332
3333  /* If a chain_field was created, then it needs to be initialized
3334     from chain_decl.  */
3335  if (root->chain_field)
3336    {
3337      tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3338		       root->frame_decl, root->chain_field, NULL_TREE);
3339      stmt = gimple_build_assign (x, get_chain_decl (root));
3340      gimple_seq_add_stmt (&stmt_list, stmt);
3341    }
3342
3343  /* If trampolines were created, then we need to initialize them.  */
3344  if (root->any_tramp_created)
3345    {
3346      struct nesting_info *i;
3347      for (i = root->inner; i ; i = i->next)
3348	{
3349	  tree field, x;
3350
3351	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3352	  if (!field)
3353	    continue;
3354
3355	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3356	  stmt = build_init_call_stmt (root, i->context, field, x);
3357	  gimple_seq_add_stmt (&stmt_list, stmt);
3358	}
3359    }
3360
3361  /* If descriptors were created, then we need to initialize them.  */
3362  if (root->any_descr_created)
3363    {
3364      struct nesting_info *i;
3365      for (i = root->inner; i ; i = i->next)
3366	{
3367	  tree field, x;
3368
3369	  field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3370	  if (!field)
3371	    continue;
3372
3373	  x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3374	  stmt = build_init_call_stmt (root, i->context, field, x);
3375	  gimple_seq_add_stmt (&stmt_list, stmt);
3376	}
3377    }
3378
3379  /* If we created initialization statements, insert them.  */
3380  if (stmt_list)
3381    {
3382      gbind *bind;
3383      annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3384      bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3385      gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3386      gimple_bind_set_body (bind, stmt_list);
3387    }
3388
3389  /* If a chain_decl was created, then it needs to be registered with
3390     struct function so that it gets initialized from the static chain
3391     register at the beginning of the function.  */
3392  sf = DECL_STRUCT_FUNCTION (root->context);
3393  sf->static_chain_decl = root->chain_decl;
3394
3395  /* Similarly for the non-local goto save area.  */
3396  if (root->nl_goto_field)
3397    {
3398      sf->nonlocal_goto_save_area
3399	= get_frame_field (root, context, root->nl_goto_field, NULL);
3400      sf->has_nonlocal_label = 1;
3401    }
3402
3403  /* Make sure all new local variables get inserted into the
3404     proper BIND_EXPR.  */
3405  if (root->new_local_var_chain)
3406    declare_vars (root->new_local_var_chain,
3407		  gimple_seq_first_stmt (gimple_body (root->context)),
3408		  false);
3409
3410  if (root->debug_var_chain)
3411    {
3412      tree debug_var;
3413      gbind *scope;
3414
3415      remap_vla_decls (DECL_INITIAL (root->context), root);
3416
3417      for (debug_var = root->debug_var_chain; debug_var;
3418	   debug_var = DECL_CHAIN (debug_var))
3419	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3420	  break;
3421
3422      /* If there are any debug decls with variable length types,
3423	 remap those types using other debug_var_chain variables.  */
3424      if (debug_var)
3425	{
3426	  struct nesting_copy_body_data id;
3427
3428	  memset (&id, 0, sizeof (id));
3429	  id.cb.copy_decl = nesting_copy_decl;
3430	  id.cb.decl_map = new hash_map<tree, tree>;
3431	  id.root = root;
3432
3433	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3434	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3435	      {
3436		tree type = TREE_TYPE (debug_var);
3437		tree newt, t = type;
3438		struct nesting_info *i;
3439
3440		for (i = root; i; i = i->outer)
3441		  if (variably_modified_type_p (type, i->context))
3442		    break;
3443
3444		if (i == NULL)
3445		  continue;
3446
3447		id.cb.src_fn = i->context;
3448		id.cb.dst_fn = i->context;
3449		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3450
3451		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3452		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3453		  {
3454		    newt = TREE_TYPE (newt);
3455		    t = TREE_TYPE (t);
3456		  }
3457		if (TYPE_NAME (newt)
3458		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3459		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3460		    && newt != t
3461		    && TYPE_NAME (newt) == TYPE_NAME (t))
3462		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3463	      }
3464
3465	  delete id.cb.decl_map;
3466	}
3467
3468      scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3469      if (gimple_bind_block (scope))
3470	declare_vars (root->debug_var_chain, scope, true);
3471      else
3472	BLOCK_VARS (DECL_INITIAL (root->context))
3473	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3474		     root->debug_var_chain);
3475    }
3476  else
3477    fixup_vla_decls (DECL_INITIAL (root->context));
3478
3479  /* Fold the rewritten MEM_REF trees.  */
3480  root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3481
3482  /* Dump the translated tree function.  */
3483  if (dump_file)
3484    {
3485      fputs ("\n\n", dump_file);
3486      dump_function_to_file (root->context, dump_file, dump_flags);
3487    }
3488}
3489
3490static void
3491finalize_nesting_tree (struct nesting_info *root)
3492{
3493  struct nesting_info *n;
3494  FOR_EACH_NEST_INFO (n, root)
3495    finalize_nesting_tree_1 (n);
3496}
3497
3498/* Unnest the nodes and pass them to cgraph.  */
3499
3500static void
3501unnest_nesting_tree_1 (struct nesting_info *root)
3502{
3503  struct cgraph_node *node = cgraph_node::get (root->context);
3504
3505  /* For nested functions update the cgraph to reflect unnesting.
3506     We also delay finalizing of these functions up to this point.  */
3507  if (node->origin)
3508    {
3509       node->unnest ();
3510       if (!root->thunk_p)
3511	 cgraph_node::finalize_function (root->context, true);
3512    }
3513}
3514
3515static void
3516unnest_nesting_tree (struct nesting_info *root)
3517{
3518  struct nesting_info *n;
3519  FOR_EACH_NEST_INFO (n, root)
3520    unnest_nesting_tree_1 (n);
3521}
3522
3523/* Free the data structures allocated during this pass.  */
3524
3525static void
3526free_nesting_tree (struct nesting_info *root)
3527{
3528  struct nesting_info *node, *next;
3529
3530  node = iter_nestinfo_start (root);
3531  do
3532    {
3533      next = iter_nestinfo_next (node);
3534      delete node->var_map;
3535      delete node->field_map;
3536      delete node->mem_refs;
3537      free (node);
3538      node = next;
3539    }
3540  while (node);
3541}
3542
3543/* Gimplify a function and all its nested functions.  */
3544static void
3545gimplify_all_functions (struct cgraph_node *root)
3546{
3547  struct cgraph_node *iter;
3548  if (!gimple_body (root->decl))
3549    gimplify_function_tree (root->decl);
3550  for (iter = root->nested; iter; iter = iter->next_nested)
3551    if (!iter->thunk.thunk_p)
3552      gimplify_all_functions (iter);
3553}
3554
3555/* Main entry point for this pass.  Process FNDECL and all of its nested
3556   subroutines and turn them into something less tightly bound.  */
3557
3558void
3559lower_nested_functions (tree fndecl)
3560{
3561  struct cgraph_node *cgn;
3562  struct nesting_info *root;
3563
3564  /* If there are no nested functions, there's nothing to do.  */
3565  cgn = cgraph_node::get (fndecl);
3566  if (!cgn->nested)
3567    return;
3568
3569  gimplify_all_functions (cgn);
3570
3571  set_dump_file (dump_begin (TDI_nested, &dump_flags));
3572  if (dump_file)
3573    fprintf (dump_file, "\n;; Function %s\n\n",
3574	     lang_hooks.decl_printable_name (fndecl, 2));
3575
3576  bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3577  root = create_nesting_tree (cgn);
3578
3579  walk_all_functions (convert_nonlocal_reference_stmt,
3580                      convert_nonlocal_reference_op,
3581		      root);
3582  walk_all_functions (convert_local_reference_stmt,
3583                      convert_local_reference_op,
3584		      root);
3585  walk_all_functions (convert_nl_goto_reference, NULL, root);
3586  walk_all_functions (convert_nl_goto_receiver, NULL, root);
3587
3588  convert_all_function_calls (root);
3589  finalize_nesting_tree (root);
3590  unnest_nesting_tree (root);
3591
3592  free_nesting_tree (root);
3593  bitmap_obstack_release (&nesting_info_bitmap_obstack);
3594
3595  if (dump_file)
3596    {
3597      dump_end (TDI_nested, dump_file);
3598      set_dump_file (NULL);
3599    }
3600}
3601
3602#include "gt-tree-nested.h"
3603