1/* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2   tree representation into the GIMPLE form.
3   Copyright (C) 2002-2015 Free Software Foundation, Inc.
4   Major work done by Sebastian Pop <s.pop@laposte.net>,
5   Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3.  If not see
21<http://www.gnu.org/licenses/>.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "hash-set.h"
27#include "machmode.h"
28#include "vec.h"
29#include "double-int.h"
30#include "input.h"
31#include "alias.h"
32#include "symtab.h"
33#include "options.h"
34#include "wide-int.h"
35#include "inchash.h"
36#include "tree.h"
37#include "fold-const.h"
38#include "hashtab.h"
39#include "tm.h"
40#include "hard-reg-set.h"
41#include "function.h"
42#include "rtl.h"
43#include "flags.h"
44#include "statistics.h"
45#include "real.h"
46#include "fixed-value.h"
47#include "insn-config.h"
48#include "expmed.h"
49#include "dojump.h"
50#include "explow.h"
51#include "calls.h"
52#include "emit-rtl.h"
53#include "varasm.h"
54#include "stmt.h"
55#include "expr.h"
56#include "predict.h"
57#include "basic-block.h"
58#include "tree-ssa-alias.h"
59#include "internal-fn.h"
60#include "gimple-fold.h"
61#include "tree-eh.h"
62#include "gimple-expr.h"
63#include "is-a.h"
64#include "gimple.h"
65#include "gimplify.h"
66#include "gimple-iterator.h"
67#include "stringpool.h"
68#include "stor-layout.h"
69#include "print-tree.h"
70#include "tree-iterator.h"
71#include "tree-inline.h"
72#include "tree-pretty-print.h"
73#include "langhooks.h"
74#include "bitmap.h"
75#include "gimple-ssa.h"
76#include "hash-map.h"
77#include "plugin-api.h"
78#include "ipa-ref.h"
79#include "cgraph.h"
80#include "tree-cfg.h"
81#include "tree-ssanames.h"
82#include "tree-ssa.h"
83#include "diagnostic-core.h"
84#include "target.h"
85#include "splay-tree.h"
86#include "omp-low.h"
87#include "gimple-low.h"
88#include "cilk.h"
89#include "gomp-constants.h"
90
91#include "langhooks-def.h"	/* FIXME: for lhd_set_decl_assembler_name */
92#include "tree-pass.h"		/* FIXME: only for PROP_gimple_any */
93#include "builtins.h"
94
95enum gimplify_omp_var_data
96{
97  GOVD_SEEN = 1,
98  GOVD_EXPLICIT = 2,
99  GOVD_SHARED = 4,
100  GOVD_PRIVATE = 8,
101  GOVD_FIRSTPRIVATE = 16,
102  GOVD_LASTPRIVATE = 32,
103  GOVD_REDUCTION = 64,
104  GOVD_LOCAL = 128,
105  GOVD_MAP = 256,
106  GOVD_DEBUG_PRIVATE = 512,
107  GOVD_PRIVATE_OUTER_REF = 1024,
108  GOVD_LINEAR = 2048,
109  GOVD_ALIGNED = 4096,
110
111  /* Flag for GOVD_MAP: don't copy back.  */
112  GOVD_MAP_TO_ONLY = 8192,
113
114  /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference.  */
115  GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
116
117  GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
118			   | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
119			   | GOVD_LOCAL)
120};
121
122
123enum omp_region_type
124{
125  ORT_WORKSHARE = 0,
126  ORT_SIMD = 1,
127  ORT_PARALLEL = 2,
128  ORT_COMBINED_PARALLEL = 3,
129  ORT_TASK = 4,
130  ORT_UNTIED_TASK = 5,
131  ORT_TEAMS = 8,
132  ORT_COMBINED_TEAMS = 9,
133  /* Data region.  */
134  ORT_TARGET_DATA = 16,
135  /* Data region with offloading.  */
136  ORT_TARGET = 32
137};
138
139/* Gimplify hashtable helper.  */
140
141struct gimplify_hasher : typed_free_remove <elt_t>
142{
143  typedef elt_t value_type;
144  typedef elt_t compare_type;
145  static inline hashval_t hash (const value_type *);
146  static inline bool equal (const value_type *, const compare_type *);
147};
148
149struct gimplify_ctx
150{
151  struct gimplify_ctx *prev_context;
152
153  vec<gbind *> bind_expr_stack;
154  tree temps;
155  gimple_seq conditional_cleanups;
156  tree exit_label;
157  tree return_temp;
158
159  vec<tree> case_labels;
160  /* The formal temporary table.  Should this be persistent?  */
161  hash_table<gimplify_hasher> *temp_htab;
162
163  int conditions;
164  bool save_stack;
165  bool into_ssa;
166  bool allow_rhs_cond_expr;
167  bool in_cleanup_point_expr;
168};
169
170struct gimplify_omp_ctx
171{
172  struct gimplify_omp_ctx *outer_context;
173  splay_tree variables;
174  hash_set<tree> *privatized_types;
175  location_t location;
176  enum omp_clause_default_kind default_kind;
177  enum omp_region_type region_type;
178  bool combined_loop;
179  bool distribute;
180};
181
182static struct gimplify_ctx *gimplify_ctxp;
183static struct gimplify_omp_ctx *gimplify_omp_ctxp;
184
185/* Forward declaration.  */
186static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187
188/* Shorter alias name for the above function for use in gimplify.c
189   only.  */
190
191static inline void
192gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
193{
194  gimple_seq_add_stmt_without_update (seq_p, gs);
195}
196
197/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
198   NULL, a new sequence is allocated.   This function is
199   similar to gimple_seq_add_seq, but does not scan the operands.
200   During gimplification, we need to manipulate statement sequences
201   before the def/use vectors have been constructed.  */
202
203static void
204gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
205{
206  gimple_stmt_iterator si;
207
208  if (src == NULL)
209    return;
210
211  si = gsi_last (*dst_p);
212  gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
213}
214
215
216/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
217   and popping gimplify contexts.  */
218
219static struct gimplify_ctx *ctx_pool = NULL;
220
221/* Return a gimplify context struct from the pool.  */
222
223static inline struct gimplify_ctx *
224ctx_alloc (void)
225{
226  struct gimplify_ctx * c = ctx_pool;
227
228  if (c)
229    ctx_pool = c->prev_context;
230  else
231    c = XNEW (struct gimplify_ctx);
232
233  memset (c, '\0', sizeof (*c));
234  return c;
235}
236
237/* Put gimplify context C back into the pool.  */
238
239static inline void
240ctx_free (struct gimplify_ctx *c)
241{
242  c->prev_context = ctx_pool;
243  ctx_pool = c;
244}
245
246/* Free allocated ctx stack memory.  */
247
248void
249free_gimplify_stack (void)
250{
251  struct gimplify_ctx *c;
252
253  while ((c = ctx_pool))
254    {
255      ctx_pool = c->prev_context;
256      free (c);
257    }
258}
259
260
261/* Set up a context for the gimplifier.  */
262
263void
264push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
265{
266  struct gimplify_ctx *c = ctx_alloc ();
267
268  c->prev_context = gimplify_ctxp;
269  gimplify_ctxp = c;
270  gimplify_ctxp->into_ssa = in_ssa;
271  gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
272}
273
274/* Tear down a context for the gimplifier.  If BODY is non-null, then
275   put the temporaries into the outer BIND_EXPR.  Otherwise, put them
276   in the local_decls.
277
278   BODY is not a sequence, but the first tuple in a sequence.  */
279
280void
281pop_gimplify_context (gimple body)
282{
283  struct gimplify_ctx *c = gimplify_ctxp;
284
285  gcc_assert (c
286              && (!c->bind_expr_stack.exists ()
287		  || c->bind_expr_stack.is_empty ()));
288  c->bind_expr_stack.release ();
289  gimplify_ctxp = c->prev_context;
290
291  if (body)
292    declare_vars (c->temps, body, false);
293  else
294    record_vars (c->temps);
295
296  delete c->temp_htab;
297  c->temp_htab = NULL;
298  ctx_free (c);
299}
300
301/* Push a GIMPLE_BIND tuple onto the stack of bindings.  */
302
303static void
304gimple_push_bind_expr (gbind *bind_stmt)
305{
306  gimplify_ctxp->bind_expr_stack.reserve (8);
307  gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
308}
309
310/* Pop the first element off the stack of bindings.  */
311
312static void
313gimple_pop_bind_expr (void)
314{
315  gimplify_ctxp->bind_expr_stack.pop ();
316}
317
318/* Return the first element of the stack of bindings.  */
319
320gbind *
321gimple_current_bind_expr (void)
322{
323  return gimplify_ctxp->bind_expr_stack.last ();
324}
325
326/* Return the stack of bindings created during gimplification.  */
327
328vec<gbind *>
329gimple_bind_expr_stack (void)
330{
331  return gimplify_ctxp->bind_expr_stack;
332}
333
334/* Return true iff there is a COND_EXPR between us and the innermost
335   CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
336
337static bool
338gimple_conditional_context (void)
339{
340  return gimplify_ctxp->conditions > 0;
341}
342
343/* Note that we've entered a COND_EXPR.  */
344
345static void
346gimple_push_condition (void)
347{
348#ifdef ENABLE_GIMPLE_CHECKING
349  if (gimplify_ctxp->conditions == 0)
350    gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
351#endif
352  ++(gimplify_ctxp->conditions);
353}
354
355/* Note that we've left a COND_EXPR.  If we're back at unconditional scope
356   now, add any conditional cleanups we've seen to the prequeue.  */
357
358static void
359gimple_pop_condition (gimple_seq *pre_p)
360{
361  int conds = --(gimplify_ctxp->conditions);
362
363  gcc_assert (conds >= 0);
364  if (conds == 0)
365    {
366      gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
367      gimplify_ctxp->conditional_cleanups = NULL;
368    }
369}
370
371/* A stable comparison routine for use with splay trees and DECLs.  */
372
373static int
374splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
375{
376  tree a = (tree) xa;
377  tree b = (tree) xb;
378
379  return DECL_UID (a) - DECL_UID (b);
380}
381
382/* Create a new omp construct that deals with variable remapping.  */
383
384static struct gimplify_omp_ctx *
385new_omp_context (enum omp_region_type region_type)
386{
387  struct gimplify_omp_ctx *c;
388
389  c = XCNEW (struct gimplify_omp_ctx);
390  c->outer_context = gimplify_omp_ctxp;
391  c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
392  c->privatized_types = new hash_set<tree>;
393  c->location = input_location;
394  c->region_type = region_type;
395  if ((region_type & ORT_TASK) == 0)
396    c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
397  else
398    c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
399
400  return c;
401}
402
403/* Destroy an omp construct that deals with variable remapping.  */
404
405static void
406delete_omp_context (struct gimplify_omp_ctx *c)
407{
408  splay_tree_delete (c->variables);
409  delete c->privatized_types;
410  XDELETE (c);
411}
412
413static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
414static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
415
416/* Both gimplify the statement T and append it to *SEQ_P.  This function
417   behaves exactly as gimplify_stmt, but you don't have to pass T as a
418   reference.  */
419
420void
421gimplify_and_add (tree t, gimple_seq *seq_p)
422{
423  gimplify_stmt (&t, seq_p);
424}
425
426/* Gimplify statement T into sequence *SEQ_P, and return the first
427   tuple in the sequence of generated tuples for this statement.
428   Return NULL if gimplifying T produced no tuples.  */
429
430static gimple
431gimplify_and_return_first (tree t, gimple_seq *seq_p)
432{
433  gimple_stmt_iterator last = gsi_last (*seq_p);
434
435  gimplify_and_add (t, seq_p);
436
437  if (!gsi_end_p (last))
438    {
439      gsi_next (&last);
440      return gsi_stmt (last);
441    }
442  else
443    return gimple_seq_first_stmt (*seq_p);
444}
445
446/* Returns true iff T is a valid RHS for an assignment to an un-renamed
447   LHS, or for a call argument.  */
448
449static bool
450is_gimple_mem_rhs (tree t)
451{
452  /* If we're dealing with a renamable type, either source or dest must be
453     a renamed variable.  */
454  if (is_gimple_reg_type (TREE_TYPE (t)))
455    return is_gimple_val (t);
456  else
457    return is_gimple_val (t) || is_gimple_lvalue (t);
458}
459
460/* Return true if T is a CALL_EXPR or an expression that can be
461   assigned to a temporary.  Note that this predicate should only be
462   used during gimplification.  See the rationale for this in
463   gimplify_modify_expr.  */
464
465static bool
466is_gimple_reg_rhs_or_call (tree t)
467{
468  return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
469	  || TREE_CODE (t) == CALL_EXPR);
470}
471
472/* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
473   this predicate should only be used during gimplification.  See the
474   rationale for this in gimplify_modify_expr.  */
475
476static bool
477is_gimple_mem_rhs_or_call (tree t)
478{
479  /* If we're dealing with a renamable type, either source or dest must be
480     a renamed variable.  */
481  if (is_gimple_reg_type (TREE_TYPE (t)))
482    return is_gimple_val (t);
483  else
484    return (is_gimple_val (t) || is_gimple_lvalue (t)
485	    || TREE_CODE (t) == CALL_EXPR);
486}
487
488/* Create a temporary with a name derived from VAL.  Subroutine of
489   lookup_tmp_var; nobody else should call this function.  */
490
491static inline tree
492create_tmp_from_val (tree val)
493{
494  /* Drop all qualifiers and address-space information from the value type.  */
495  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
496  tree var = create_tmp_var (type, get_name (val));
497  if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
498      || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
499    DECL_GIMPLE_REG_P (var) = 1;
500  return var;
501}
502
503/* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
504   an existing expression temporary.  */
505
506static tree
507lookup_tmp_var (tree val, bool is_formal)
508{
509  tree ret;
510
511  /* If not optimizing, never really reuse a temporary.  local-alloc
512     won't allocate any variable that is used in more than one basic
513     block, which means it will go into memory, causing much extra
514     work in reload and final and poorer code generation, outweighing
515     the extra memory allocation here.  */
516  if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
517    ret = create_tmp_from_val (val);
518  else
519    {
520      elt_t elt, *elt_p;
521      elt_t **slot;
522
523      elt.val = val;
524      if (!gimplify_ctxp->temp_htab)
525        gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
526      slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
527      if (*slot == NULL)
528	{
529	  elt_p = XNEW (elt_t);
530	  elt_p->val = val;
531	  elt_p->temp = ret = create_tmp_from_val (val);
532	  *slot = elt_p;
533	}
534      else
535	{
536	  elt_p = *slot;
537          ret = elt_p->temp;
538	}
539    }
540
541  return ret;
542}
543
544/* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
545
546static tree
547internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
548                      bool is_formal)
549{
550  tree t, mod;
551
552  /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
553     can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
554  gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
555		 fb_rvalue);
556
557  if (gimplify_ctxp->into_ssa
558      && is_gimple_reg_type (TREE_TYPE (val)))
559    t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
560  else
561    t = lookup_tmp_var (val, is_formal);
562
563  mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
564
565  SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
566
567  /* gimplify_modify_expr might want to reduce this further.  */
568  gimplify_and_add (mod, pre_p);
569  ggc_free (mod);
570
571  return t;
572}
573
574/* Return a formal temporary variable initialized with VAL.  PRE_P is as
575   in gimplify_expr.  Only use this function if:
576
577   1) The value of the unfactored expression represented by VAL will not
578      change between the initialization and use of the temporary, and
579   2) The temporary will not be otherwise modified.
580
581   For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
582   and #2 means it is inappropriate for && temps.
583
584   For other cases, use get_initialized_tmp_var instead.  */
585
586tree
587get_formal_tmp_var (tree val, gimple_seq *pre_p)
588{
589  return internal_get_tmp_var (val, pre_p, NULL, true);
590}
591
592/* Return a temporary variable initialized with VAL.  PRE_P and POST_P
593   are as in gimplify_expr.  */
594
595tree
596get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
597{
598  return internal_get_tmp_var (val, pre_p, post_p, false);
599}
600
601/* Declare all the variables in VARS in SCOPE.  If DEBUG_INFO is true,
602   generate debug info for them; otherwise don't.  */
603
604void
605declare_vars (tree vars, gimple gs, bool debug_info)
606{
607  tree last = vars;
608  if (last)
609    {
610      tree temps, block;
611
612      gbind *scope = as_a <gbind *> (gs);
613
614      temps = nreverse (last);
615
616      block = gimple_bind_block (scope);
617      gcc_assert (!block || TREE_CODE (block) == BLOCK);
618      if (!block || !debug_info)
619	{
620	  DECL_CHAIN (last) = gimple_bind_vars (scope);
621	  gimple_bind_set_vars (scope, temps);
622	}
623      else
624	{
625	  /* We need to attach the nodes both to the BIND_EXPR and to its
626	     associated BLOCK for debugging purposes.  The key point here
627	     is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
628	     is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
629	  if (BLOCK_VARS (block))
630	    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
631	  else
632	    {
633	      gimple_bind_set_vars (scope,
634	      			    chainon (gimple_bind_vars (scope), temps));
635	      BLOCK_VARS (block) = temps;
636	    }
637	}
638    }
639}
640
641/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
642   for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
643   no such upper bound can be obtained.  */
644
645static void
646force_constant_size (tree var)
647{
648  /* The only attempt we make is by querying the maximum size of objects
649     of the variable's type.  */
650
651  HOST_WIDE_INT max_size;
652
653  gcc_assert (TREE_CODE (var) == VAR_DECL);
654
655  max_size = max_int_size_in_bytes (TREE_TYPE (var));
656
657  gcc_assert (max_size >= 0);
658
659  DECL_SIZE_UNIT (var)
660    = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
661  DECL_SIZE (var)
662    = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
663}
664
665/* Push the temporary variable TMP into the current binding.  */
666
667void
668gimple_add_tmp_var_fn (struct function *fn, tree tmp)
669{
670  gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
671
672  /* Later processing assumes that the object size is constant, which might
673     not be true at this point.  Force the use of a constant upper bound in
674     this case.  */
675  if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
676    force_constant_size (tmp);
677
678  DECL_CONTEXT (tmp) = fn->decl;
679  DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
680
681  record_vars_into (tmp, fn->decl);
682}
683
684/* Push the temporary variable TMP into the current binding.  */
685
686void
687gimple_add_tmp_var (tree tmp)
688{
689  gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
690
691  /* Later processing assumes that the object size is constant, which might
692     not be true at this point.  Force the use of a constant upper bound in
693     this case.  */
694  if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
695    force_constant_size (tmp);
696
697  DECL_CONTEXT (tmp) = current_function_decl;
698  DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
699
700  if (gimplify_ctxp)
701    {
702      DECL_CHAIN (tmp) = gimplify_ctxp->temps;
703      gimplify_ctxp->temps = tmp;
704
705      /* Mark temporaries local within the nearest enclosing parallel.  */
706      if (gimplify_omp_ctxp)
707	{
708	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
709	  while (ctx
710		 && (ctx->region_type == ORT_WORKSHARE
711		     || ctx->region_type == ORT_SIMD))
712	    ctx = ctx->outer_context;
713	  if (ctx)
714	    omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
715	}
716    }
717  else if (cfun)
718    record_vars (tmp);
719  else
720    {
721      gimple_seq body_seq;
722
723      /* This case is for nested functions.  We need to expose the locals
724	 they create.  */
725      body_seq = gimple_body (current_function_decl);
726      declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
727    }
728}
729
730
731
732/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
733   nodes that are referenced more than once in GENERIC functions.  This is
734   necessary because gimplification (translation into GIMPLE) is performed
735   by modifying tree nodes in-place, so gimplication of a shared node in a
736   first context could generate an invalid GIMPLE form in a second context.
737
738   This is achieved with a simple mark/copy/unmark algorithm that walks the
739   GENERIC representation top-down, marks nodes with TREE_VISITED the first
740   time it encounters them, duplicates them if they already have TREE_VISITED
741   set, and finally removes the TREE_VISITED marks it has set.
742
743   The algorithm works only at the function level, i.e. it generates a GENERIC
744   representation of a function with no nodes shared within the function when
745   passed a GENERIC function (except for nodes that are allowed to be shared).
746
747   At the global level, it is also necessary to unshare tree nodes that are
748   referenced in more than one function, for the same aforementioned reason.
749   This requires some cooperation from the front-end.  There are 2 strategies:
750
751     1. Manual unsharing.  The front-end needs to call unshare_expr on every
752        expression that might end up being shared across functions.
753
754     2. Deep unsharing.  This is an extension of regular unsharing.  Instead
755        of calling unshare_expr on expressions that might be shared across
756        functions, the front-end pre-marks them with TREE_VISITED.  This will
757        ensure that they are unshared on the first reference within functions
758        when the regular unsharing algorithm runs.  The counterpart is that
759        this algorithm must look deeper than for manual unsharing, which is
760        specified by LANG_HOOKS_DEEP_UNSHARING.
761
762  If there are only few specific cases of node sharing across functions, it is
763  probably easier for a front-end to unshare the expressions manually.  On the
764  contrary, if the expressions generated at the global level are as widespread
765  as expressions generated within functions, deep unsharing is very likely the
766  way to go.  */
767
768/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
769   These nodes model computations that must be done once.  If we were to
770   unshare something like SAVE_EXPR(i++), the gimplification process would
771   create wrong code.  However, if DATA is non-null, it must hold a pointer
772   set that is used to unshare the subtrees of these nodes.  */
773
774static tree
775mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
776{
777  tree t = *tp;
778  enum tree_code code = TREE_CODE (t);
779
780  /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
781     copy their subtrees if we can make sure to do it only once.  */
782  if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
783    {
784      if (data && !((hash_set<tree> *)data)->add (t))
785	;
786      else
787	*walk_subtrees = 0;
788    }
789
790  /* Stop at types, decls, constants like copy_tree_r.  */
791  else if (TREE_CODE_CLASS (code) == tcc_type
792	   || TREE_CODE_CLASS (code) == tcc_declaration
793	   || TREE_CODE_CLASS (code) == tcc_constant
794	   /* We can't do anything sensible with a BLOCK used as an
795	      expression, but we also can't just die when we see it
796	      because of non-expression uses.  So we avert our eyes
797	      and cross our fingers.  Silly Java.  */
798	   || code == BLOCK)
799    *walk_subtrees = 0;
800
801  /* Cope with the statement expression extension.  */
802  else if (code == STATEMENT_LIST)
803    ;
804
805  /* Leave the bulk of the work to copy_tree_r itself.  */
806  else
807    copy_tree_r (tp, walk_subtrees, NULL);
808
809  return NULL_TREE;
810}
811
812/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
813   If *TP has been visited already, then *TP is deeply copied by calling
814   mostly_copy_tree_r.  DATA is passed to mostly_copy_tree_r unmodified.  */
815
816static tree
817copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
818{
819  tree t = *tp;
820  enum tree_code code = TREE_CODE (t);
821
822  /* Skip types, decls, and constants.  But we do want to look at their
823     types and the bounds of types.  Mark them as visited so we properly
824     unmark their subtrees on the unmark pass.  If we've already seen them,
825     don't look down further.  */
826  if (TREE_CODE_CLASS (code) == tcc_type
827      || TREE_CODE_CLASS (code) == tcc_declaration
828      || TREE_CODE_CLASS (code) == tcc_constant)
829    {
830      if (TREE_VISITED (t))
831	*walk_subtrees = 0;
832      else
833	TREE_VISITED (t) = 1;
834    }
835
836  /* If this node has been visited already, unshare it and don't look
837     any deeper.  */
838  else if (TREE_VISITED (t))
839    {
840      walk_tree (tp, mostly_copy_tree_r, data, NULL);
841      *walk_subtrees = 0;
842    }
843
844  /* Otherwise, mark the node as visited and keep looking.  */
845  else
846    TREE_VISITED (t) = 1;
847
848  return NULL_TREE;
849}
850
851/* Unshare most of the shared trees rooted at *TP.  DATA is passed to the
852   copy_if_shared_r callback unmodified.  */
853
854static inline void
855copy_if_shared (tree *tp, void *data)
856{
857  walk_tree (tp, copy_if_shared_r, data, NULL);
858}
859
860/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
861   any nested functions.  */
862
863static void
864unshare_body (tree fndecl)
865{
866  struct cgraph_node *cgn = cgraph_node::get (fndecl);
867  /* If the language requires deep unsharing, we need a pointer set to make
868     sure we don't repeatedly unshare subtrees of unshareable nodes.  */
869  hash_set<tree> *visited
870    = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
871
872  copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
873  copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
874  copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
875
876  delete visited;
877
878  if (cgn)
879    for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
880      unshare_body (cgn->decl);
881}
882
883/* Callback for walk_tree to unmark the visited trees rooted at *TP.
884   Subtrees are walked until the first unvisited node is encountered.  */
885
886static tree
887unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
888{
889  tree t = *tp;
890
891  /* If this node has been visited, unmark it and keep looking.  */
892  if (TREE_VISITED (t))
893    TREE_VISITED (t) = 0;
894
895  /* Otherwise, don't look any deeper.  */
896  else
897    *walk_subtrees = 0;
898
899  return NULL_TREE;
900}
901
902/* Unmark the visited trees rooted at *TP.  */
903
904static inline void
905unmark_visited (tree *tp)
906{
907  walk_tree (tp, unmark_visited_r, NULL, NULL);
908}
909
910/* Likewise, but mark all trees as not visited.  */
911
912static void
913unvisit_body (tree fndecl)
914{
915  struct cgraph_node *cgn = cgraph_node::get (fndecl);
916
917  unmark_visited (&DECL_SAVED_TREE (fndecl));
918  unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
919  unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
920
921  if (cgn)
922    for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
923      unvisit_body (cgn->decl);
924}
925
926/* Unconditionally make an unshared copy of EXPR.  This is used when using
927   stored expressions which span multiple functions, such as BINFO_VTABLE,
928   as the normal unsharing process can't tell that they're shared.  */
929
930tree
931unshare_expr (tree expr)
932{
933  walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
934  return expr;
935}
936
937/* Worker for unshare_expr_without_location.  */
938
939static tree
940prune_expr_location (tree *tp, int *walk_subtrees, void *)
941{
942  if (EXPR_P (*tp))
943    SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
944  else
945    *walk_subtrees = 0;
946  return NULL_TREE;
947}
948
949/* Similar to unshare_expr but also prune all expression locations
950   from EXPR.  */
951
952tree
953unshare_expr_without_location (tree expr)
954{
955  walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
956  if (EXPR_P (expr))
957    walk_tree (&expr, prune_expr_location, NULL, NULL);
958  return expr;
959}
960
961/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
962   contain statements and have a value.  Assign its value to a temporary
963   and give it void_type_node.  Return the temporary, or NULL_TREE if
964   WRAPPER was already void.  */
965
966tree
967voidify_wrapper_expr (tree wrapper, tree temp)
968{
969  tree type = TREE_TYPE (wrapper);
970  if (type && !VOID_TYPE_P (type))
971    {
972      tree *p;
973
974      /* Set p to point to the body of the wrapper.  Loop until we find
975	 something that isn't a wrapper.  */
976      for (p = &wrapper; p && *p; )
977	{
978	  switch (TREE_CODE (*p))
979	    {
980	    case BIND_EXPR:
981	      TREE_SIDE_EFFECTS (*p) = 1;
982	      TREE_TYPE (*p) = void_type_node;
983	      /* For a BIND_EXPR, the body is operand 1.  */
984	      p = &BIND_EXPR_BODY (*p);
985	      break;
986
987	    case CLEANUP_POINT_EXPR:
988	    case TRY_FINALLY_EXPR:
989	    case TRY_CATCH_EXPR:
990	      TREE_SIDE_EFFECTS (*p) = 1;
991	      TREE_TYPE (*p) = void_type_node;
992	      p = &TREE_OPERAND (*p, 0);
993	      break;
994
995	    case STATEMENT_LIST:
996	      {
997		tree_stmt_iterator i = tsi_last (*p);
998		TREE_SIDE_EFFECTS (*p) = 1;
999		TREE_TYPE (*p) = void_type_node;
1000		p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1001	      }
1002	      break;
1003
1004	    case COMPOUND_EXPR:
1005	      /* Advance to the last statement.  Set all container types to
1006		 void.  */
1007	      for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1008		{
1009		  TREE_SIDE_EFFECTS (*p) = 1;
1010		  TREE_TYPE (*p) = void_type_node;
1011		}
1012	      break;
1013
1014	    case TRANSACTION_EXPR:
1015	      TREE_SIDE_EFFECTS (*p) = 1;
1016	      TREE_TYPE (*p) = void_type_node;
1017	      p = &TRANSACTION_EXPR_BODY (*p);
1018	      break;
1019
1020	    default:
1021	      /* Assume that any tree upon which voidify_wrapper_expr is
1022		 directly called is a wrapper, and that its body is op0.  */
1023	      if (p == &wrapper)
1024		{
1025		  TREE_SIDE_EFFECTS (*p) = 1;
1026		  TREE_TYPE (*p) = void_type_node;
1027		  p = &TREE_OPERAND (*p, 0);
1028		  break;
1029		}
1030	      goto out;
1031	    }
1032	}
1033
1034    out:
1035      if (p == NULL || IS_EMPTY_STMT (*p))
1036	temp = NULL_TREE;
1037      else if (temp)
1038	{
1039	  /* The wrapper is on the RHS of an assignment that we're pushing
1040	     down.  */
1041	  gcc_assert (TREE_CODE (temp) == INIT_EXPR
1042		      || TREE_CODE (temp) == MODIFY_EXPR);
1043	  TREE_OPERAND (temp, 1) = *p;
1044	  *p = temp;
1045	}
1046      else
1047	{
1048	  temp = create_tmp_var (type, "retval");
1049	  *p = build2 (INIT_EXPR, type, temp, *p);
1050	}
1051
1052      return temp;
1053    }
1054
1055  return NULL_TREE;
1056}
1057
1058/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1059   a temporary through which they communicate.  */
1060
1061static void
1062build_stack_save_restore (gcall **save, gcall **restore)
1063{
1064  tree tmp_var;
1065
1066  *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1067  tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1068  gimple_call_set_lhs (*save, tmp_var);
1069
1070  *restore
1071    = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1072			 1, tmp_var);
1073}
1074
1075/* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1076
1077static enum gimplify_status
1078gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1079{
1080  tree bind_expr = *expr_p;
1081  bool old_save_stack = gimplify_ctxp->save_stack;
1082  tree t;
1083  gbind *bind_stmt;
1084  gimple_seq body, cleanup;
1085  gcall *stack_save;
1086  location_t start_locus = 0, end_locus = 0;
1087
1088  tree temp = voidify_wrapper_expr (bind_expr, NULL);
1089
1090  /* Mark variables seen in this bind expr.  */
1091  for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1092    {
1093      if (TREE_CODE (t) == VAR_DECL)
1094	{
1095	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1096
1097	  /* Mark variable as local.  */
1098	  if (ctx && !DECL_EXTERNAL (t)
1099	      && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1100		  || splay_tree_lookup (ctx->variables,
1101					(splay_tree_key) t) == NULL))
1102	    {
1103	      if (ctx->region_type == ORT_SIMD
1104		  && TREE_ADDRESSABLE (t)
1105		  && !TREE_STATIC (t))
1106		omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1107	      else
1108		omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1109	    }
1110
1111	  DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1112
1113	  if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1114	    cfun->has_local_explicit_reg_vars = true;
1115	}
1116
1117      /* Preliminarily mark non-addressed complex variables as eligible
1118	 for promotion to gimple registers.  We'll transform their uses
1119	 as we find them.  */
1120      if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1121	   || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1122	  && !TREE_THIS_VOLATILE (t)
1123	  && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1124	  && !needs_to_live_in_memory (t))
1125	DECL_GIMPLE_REG_P (t) = 1;
1126    }
1127
1128  bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1129                                   BIND_EXPR_BLOCK (bind_expr));
1130  gimple_push_bind_expr (bind_stmt);
1131
1132  gimplify_ctxp->save_stack = false;
1133
1134  /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1135  body = NULL;
1136  gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1137  gimple_bind_set_body (bind_stmt, body);
1138
1139  /* Source location wise, the cleanup code (stack_restore and clobbers)
1140     belongs to the end of the block, so propagate what we have.  The
1141     stack_save operation belongs to the beginning of block, which we can
1142     infer from the bind_expr directly if the block has no explicit
1143     assignment.  */
1144  if (BIND_EXPR_BLOCK (bind_expr))
1145    {
1146      end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1147      start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1148    }
1149  if (start_locus == 0)
1150    start_locus = EXPR_LOCATION (bind_expr);
1151
1152  cleanup = NULL;
1153  stack_save = NULL;
1154  if (gimplify_ctxp->save_stack)
1155    {
1156      gcall *stack_restore;
1157
1158      /* Save stack on entry and restore it on exit.  Add a try_finally
1159	 block to achieve this.  */
1160      build_stack_save_restore (&stack_save, &stack_restore);
1161
1162      gimple_set_location (stack_save, start_locus);
1163      gimple_set_location (stack_restore, end_locus);
1164
1165      gimplify_seq_add_stmt (&cleanup, stack_restore);
1166    }
1167
1168  /* Add clobbers for all variables that go out of scope.  */
1169  for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1170    {
1171      if (TREE_CODE (t) == VAR_DECL
1172	  && !is_global_var (t)
1173	  && DECL_CONTEXT (t) == current_function_decl
1174	  && !DECL_HARD_REGISTER (t)
1175	  && !TREE_THIS_VOLATILE (t)
1176	  && !DECL_HAS_VALUE_EXPR_P (t)
1177	  /* Only care for variables that have to be in memory.  Others
1178	     will be rewritten into SSA names, hence moved to the top-level.  */
1179	  && !is_gimple_reg (t)
1180	  && flag_stack_reuse != SR_NONE)
1181	{
1182	  tree clobber = build_constructor (TREE_TYPE (t), NULL);
1183	  gimple clobber_stmt;
1184	  TREE_THIS_VOLATILE (clobber) = 1;
1185	  clobber_stmt = gimple_build_assign (t, clobber);
1186	  gimple_set_location (clobber_stmt, end_locus);
1187	  gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1188	}
1189    }
1190
1191  if (cleanup)
1192    {
1193      gtry *gs;
1194      gimple_seq new_body;
1195
1196      new_body = NULL;
1197      gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1198	  		     GIMPLE_TRY_FINALLY);
1199
1200      if (stack_save)
1201	gimplify_seq_add_stmt (&new_body, stack_save);
1202      gimplify_seq_add_stmt (&new_body, gs);
1203      gimple_bind_set_body (bind_stmt, new_body);
1204    }
1205
1206  gimplify_ctxp->save_stack = old_save_stack;
1207  gimple_pop_bind_expr ();
1208
1209  gimplify_seq_add_stmt (pre_p, bind_stmt);
1210
1211  if (temp)
1212    {
1213      *expr_p = temp;
1214      return GS_OK;
1215    }
1216
1217  *expr_p = NULL_TREE;
1218  return GS_ALL_DONE;
1219}
1220
1221/* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1222   GIMPLE value, it is assigned to a new temporary and the statement is
1223   re-written to return the temporary.
1224
1225   PRE_P points to the sequence where side effects that must happen before
1226   STMT should be stored.  */
1227
1228static enum gimplify_status
1229gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1230{
1231  greturn *ret;
1232  tree ret_expr = TREE_OPERAND (stmt, 0);
1233  tree result_decl, result;
1234
1235  if (ret_expr == error_mark_node)
1236    return GS_ERROR;
1237
1238  /* Implicit _Cilk_sync must be inserted right before any return statement
1239     if there is a _Cilk_spawn in the function.  If the user has provided a
1240     _Cilk_sync, the optimizer should remove this duplicate one.  */
1241  if (fn_contains_cilk_spawn_p (cfun))
1242    {
1243      tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1244      gimplify_and_add (impl_sync, pre_p);
1245    }
1246
1247  if (!ret_expr
1248      || TREE_CODE (ret_expr) == RESULT_DECL
1249      || ret_expr == error_mark_node)
1250    {
1251      greturn *ret = gimple_build_return (ret_expr);
1252      gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1253      gimplify_seq_add_stmt (pre_p, ret);
1254      return GS_ALL_DONE;
1255    }
1256
1257  if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1258    result_decl = NULL_TREE;
1259  else
1260    {
1261      result_decl = TREE_OPERAND (ret_expr, 0);
1262
1263      /* See through a return by reference.  */
1264      if (TREE_CODE (result_decl) == INDIRECT_REF)
1265	result_decl = TREE_OPERAND (result_decl, 0);
1266
1267      gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1268		   || TREE_CODE (ret_expr) == INIT_EXPR)
1269		  && TREE_CODE (result_decl) == RESULT_DECL);
1270    }
1271
1272  /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1273     Recall that aggregate_value_p is FALSE for any aggregate type that is
1274     returned in registers.  If we're returning values in registers, then
1275     we don't want to extend the lifetime of the RESULT_DECL, particularly
1276     across another call.  In addition, for those aggregates for which
1277     hard_function_value generates a PARALLEL, we'll die during normal
1278     expansion of structure assignments; there's special code in expand_return
1279     to handle this case that does not exist in expand_expr.  */
1280  if (!result_decl)
1281    result = NULL_TREE;
1282  else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1283    {
1284      if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1285	{
1286	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1287	    gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1288	  /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1289	     should be effectively allocated by the caller, i.e. all calls to
1290	     this function must be subject to the Return Slot Optimization.  */
1291	  gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1292	  gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1293	}
1294      result = result_decl;
1295    }
1296  else if (gimplify_ctxp->return_temp)
1297    result = gimplify_ctxp->return_temp;
1298  else
1299    {
1300      result = create_tmp_reg (TREE_TYPE (result_decl));
1301
1302      /* ??? With complex control flow (usually involving abnormal edges),
1303	 we can wind up warning about an uninitialized value for this.  Due
1304	 to how this variable is constructed and initialized, this is never
1305	 true.  Give up and never warn.  */
1306      TREE_NO_WARNING (result) = 1;
1307
1308      gimplify_ctxp->return_temp = result;
1309    }
1310
1311  /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1312     Then gimplify the whole thing.  */
1313  if (result != result_decl)
1314    TREE_OPERAND (ret_expr, 0) = result;
1315
1316  gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1317
1318  ret = gimple_build_return (result);
1319  gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1320  gimplify_seq_add_stmt (pre_p, ret);
1321
1322  return GS_ALL_DONE;
1323}
1324
1325/* Gimplify a variable-length array DECL.  */
1326
1327static void
1328gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1329{
1330  /* This is a variable-sized decl.  Simplify its size and mark it
1331     for deferred expansion.  */
1332  tree t, addr, ptr_type;
1333
1334  gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1335  gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1336
1337  /* Don't mess with a DECL_VALUE_EXPR set by the front-end.  */
1338  if (DECL_HAS_VALUE_EXPR_P (decl))
1339    return;
1340
1341  /* All occurrences of this decl in final gimplified code will be
1342     replaced by indirection.  Setting DECL_VALUE_EXPR does two
1343     things: First, it lets the rest of the gimplifier know what
1344     replacement to use.  Second, it lets the debug info know
1345     where to find the value.  */
1346  ptr_type = build_pointer_type (TREE_TYPE (decl));
1347  addr = create_tmp_var (ptr_type, get_name (decl));
1348  DECL_IGNORED_P (addr) = 0;
1349  t = build_fold_indirect_ref (addr);
1350  TREE_THIS_NOTRAP (t) = 1;
1351  SET_DECL_VALUE_EXPR (decl, t);
1352  DECL_HAS_VALUE_EXPR_P (decl) = 1;
1353
1354  t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1355  t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1356		       size_int (DECL_ALIGN (decl)));
1357  /* The call has been built for a variable-sized object.  */
1358  CALL_ALLOCA_FOR_VAR_P (t) = 1;
1359  t = fold_convert (ptr_type, t);
1360  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1361
1362  gimplify_and_add (t, seq_p);
1363
1364  /* Indicate that we need to restore the stack level when the
1365     enclosing BIND_EXPR is exited.  */
1366  gimplify_ctxp->save_stack = true;
1367}
1368
1369/* A helper function to be called via walk_tree.  Mark all labels under *TP
1370   as being forced.  To be called for DECL_INITIAL of static variables.  */
1371
1372static tree
1373force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1374{
1375  if (TYPE_P (*tp))
1376    *walk_subtrees = 0;
1377  if (TREE_CODE (*tp) == LABEL_DECL)
1378    FORCED_LABEL (*tp) = 1;
1379
1380  return NULL_TREE;
1381}
1382
1383/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1384   and initialization explicit.  */
1385
1386static enum gimplify_status
1387gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1388{
1389  tree stmt = *stmt_p;
1390  tree decl = DECL_EXPR_DECL (stmt);
1391
1392  *stmt_p = NULL_TREE;
1393
1394  if (TREE_TYPE (decl) == error_mark_node)
1395    return GS_ERROR;
1396
1397  if ((TREE_CODE (decl) == TYPE_DECL
1398       || TREE_CODE (decl) == VAR_DECL)
1399      && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1400    gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1401
1402  /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1403     in case its size expressions contain problematic nodes like CALL_EXPR.  */
1404  if (TREE_CODE (decl) == TYPE_DECL
1405      && DECL_ORIGINAL_TYPE (decl)
1406      && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1407    gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1408
1409  if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1410    {
1411      tree init = DECL_INITIAL (decl);
1412
1413      if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1414	  || (!TREE_STATIC (decl)
1415	      && flag_stack_check == GENERIC_STACK_CHECK
1416	      && compare_tree_int (DECL_SIZE_UNIT (decl),
1417				   STACK_CHECK_MAX_VAR_SIZE) > 0))
1418	gimplify_vla_decl (decl, seq_p);
1419
1420      /* Some front ends do not explicitly declare all anonymous
1421	 artificial variables.  We compensate here by declaring the
1422	 variables, though it would be better if the front ends would
1423	 explicitly declare them.  */
1424      if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1425	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1426	gimple_add_tmp_var (decl);
1427
1428      if (init && init != error_mark_node)
1429	{
1430	  if (!TREE_STATIC (decl))
1431	    {
1432	      DECL_INITIAL (decl) = NULL_TREE;
1433	      init = build2 (INIT_EXPR, void_type_node, decl, init);
1434	      gimplify_and_add (init, seq_p);
1435	      ggc_free (init);
1436	    }
1437	  else
1438	    /* We must still examine initializers for static variables
1439	       as they may contain a label address.  */
1440	    walk_tree (&init, force_labels_r, NULL, NULL);
1441	}
1442    }
1443
1444  return GS_ALL_DONE;
1445}
1446
1447/* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1448   and replacing the LOOP_EXPR with goto, but if the loop contains an
1449   EXIT_EXPR, we need to append a label for it to jump to.  */
1450
1451static enum gimplify_status
1452gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1453{
1454  tree saved_label = gimplify_ctxp->exit_label;
1455  tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1456
1457  gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1458
1459  gimplify_ctxp->exit_label = NULL_TREE;
1460
1461  gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1462
1463  gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1464
1465  if (gimplify_ctxp->exit_label)
1466    gimplify_seq_add_stmt (pre_p,
1467			   gimple_build_label (gimplify_ctxp->exit_label));
1468
1469  gimplify_ctxp->exit_label = saved_label;
1470
1471  *expr_p = NULL;
1472  return GS_ALL_DONE;
1473}
1474
1475/* Gimplify a statement list onto a sequence.  These may be created either
1476   by an enlightened front-end, or by shortcut_cond_expr.  */
1477
1478static enum gimplify_status
1479gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1480{
1481  tree temp = voidify_wrapper_expr (*expr_p, NULL);
1482
1483  tree_stmt_iterator i = tsi_start (*expr_p);
1484
1485  while (!tsi_end_p (i))
1486    {
1487      gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1488      tsi_delink (&i);
1489    }
1490
1491  if (temp)
1492    {
1493      *expr_p = temp;
1494      return GS_OK;
1495    }
1496
1497  return GS_ALL_DONE;
1498}
1499
1500
1501/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1502   branch to.  */
1503
1504static enum gimplify_status
1505gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1506{
1507  tree switch_expr = *expr_p;
1508  gimple_seq switch_body_seq = NULL;
1509  enum gimplify_status ret;
1510  tree index_type = TREE_TYPE (switch_expr);
1511  if (index_type == NULL_TREE)
1512    index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1513
1514  ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1515                       fb_rvalue);
1516  if (ret == GS_ERROR || ret == GS_UNHANDLED)
1517    return ret;
1518
1519  if (SWITCH_BODY (switch_expr))
1520    {
1521      vec<tree> labels;
1522      vec<tree> saved_labels;
1523      tree default_case = NULL_TREE;
1524      gswitch *switch_stmt;
1525
1526      /* If someone can be bothered to fill in the labels, they can
1527	 be bothered to null out the body too.  */
1528      gcc_assert (!SWITCH_LABELS (switch_expr));
1529
1530      /* Save old labels, get new ones from body, then restore the old
1531         labels.  Save all the things from the switch body to append after.  */
1532      saved_labels = gimplify_ctxp->case_labels;
1533      gimplify_ctxp->case_labels.create (8);
1534
1535      gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1536      labels = gimplify_ctxp->case_labels;
1537      gimplify_ctxp->case_labels = saved_labels;
1538
1539      preprocess_case_label_vec_for_gimple (labels, index_type,
1540					    &default_case);
1541
1542      if (!default_case)
1543	{
1544	  glabel *new_default;
1545
1546	  default_case
1547	    = build_case_label (NULL_TREE, NULL_TREE,
1548				create_artificial_label (UNKNOWN_LOCATION));
1549	  new_default = gimple_build_label (CASE_LABEL (default_case));
1550	  gimplify_seq_add_stmt (&switch_body_seq, new_default);
1551	}
1552
1553      switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1554					   default_case, labels);
1555      gimplify_seq_add_stmt (pre_p, switch_stmt);
1556      gimplify_seq_add_seq (pre_p, switch_body_seq);
1557      labels.release ();
1558    }
1559  else
1560    gcc_assert (SWITCH_LABELS (switch_expr));
1561
1562  return GS_ALL_DONE;
1563}
1564
1565/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P.  */
1566
1567static enum gimplify_status
1568gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1569{
1570  struct gimplify_ctx *ctxp;
1571  glabel *label_stmt;
1572
1573  /* Invalid programs can play Duff's Device type games with, for example,
1574     #pragma omp parallel.  At least in the C front end, we don't
1575     detect such invalid branches until after gimplification, in the
1576     diagnose_omp_blocks pass.  */
1577  for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1578    if (ctxp->case_labels.exists ())
1579      break;
1580
1581  label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1582  ctxp->case_labels.safe_push (*expr_p);
1583  gimplify_seq_add_stmt (pre_p, label_stmt);
1584
1585  return GS_ALL_DONE;
1586}
1587
1588/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1589   if necessary.  */
1590
1591tree
1592build_and_jump (tree *label_p)
1593{
1594  if (label_p == NULL)
1595    /* If there's nowhere to jump, just fall through.  */
1596    return NULL_TREE;
1597
1598  if (*label_p == NULL_TREE)
1599    {
1600      tree label = create_artificial_label (UNKNOWN_LOCATION);
1601      *label_p = label;
1602    }
1603
1604  return build1 (GOTO_EXPR, void_type_node, *label_p);
1605}
1606
1607/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1608   This also involves building a label to jump to and communicating it to
1609   gimplify_loop_expr through gimplify_ctxp->exit_label.  */
1610
1611static enum gimplify_status
1612gimplify_exit_expr (tree *expr_p)
1613{
1614  tree cond = TREE_OPERAND (*expr_p, 0);
1615  tree expr;
1616
1617  expr = build_and_jump (&gimplify_ctxp->exit_label);
1618  expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1619  *expr_p = expr;
1620
1621  return GS_OK;
1622}
1623
1624/* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
1625   different from its canonical type, wrap the whole thing inside a
1626   NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1627   type.
1628
1629   The canonical type of a COMPONENT_REF is the type of the field being
1630   referenced--unless the field is a bit-field which can be read directly
1631   in a smaller mode, in which case the canonical type is the
1632   sign-appropriate type corresponding to that mode.  */
1633
1634static void
1635canonicalize_component_ref (tree *expr_p)
1636{
1637  tree expr = *expr_p;
1638  tree type;
1639
1640  gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1641
1642  if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1643    type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1644  else
1645    type = TREE_TYPE (TREE_OPERAND (expr, 1));
1646
1647  /* One could argue that all the stuff below is not necessary for
1648     the non-bitfield case and declare it a FE error if type
1649     adjustment would be needed.  */
1650  if (TREE_TYPE (expr) != type)
1651    {
1652#ifdef ENABLE_TYPES_CHECKING
1653      tree old_type = TREE_TYPE (expr);
1654#endif
1655      int type_quals;
1656
1657      /* We need to preserve qualifiers and propagate them from
1658	 operand 0.  */
1659      type_quals = TYPE_QUALS (type)
1660	| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1661      if (TYPE_QUALS (type) != type_quals)
1662	type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1663
1664      /* Set the type of the COMPONENT_REF to the underlying type.  */
1665      TREE_TYPE (expr) = type;
1666
1667#ifdef ENABLE_TYPES_CHECKING
1668      /* It is now a FE error, if the conversion from the canonical
1669	 type to the original expression type is not useless.  */
1670      gcc_assert (useless_type_conversion_p (old_type, type));
1671#endif
1672    }
1673}
1674
1675/* If a NOP conversion is changing a pointer to array of foo to a pointer
1676   to foo, embed that change in the ADDR_EXPR by converting
1677      T array[U];
1678      (T *)&array
1679   ==>
1680      &array[L]
1681   where L is the lower bound.  For simplicity, only do this for constant
1682   lower bound.
1683   The constraint is that the type of &array[L] is trivially convertible
1684   to T *.  */
1685
1686static void
1687canonicalize_addr_expr (tree *expr_p)
1688{
1689  tree expr = *expr_p;
1690  tree addr_expr = TREE_OPERAND (expr, 0);
1691  tree datype, ddatype, pddatype;
1692
1693  /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
1694  if (!POINTER_TYPE_P (TREE_TYPE (expr))
1695      || TREE_CODE (addr_expr) != ADDR_EXPR)
1696    return;
1697
1698  /* The addr_expr type should be a pointer to an array.  */
1699  datype = TREE_TYPE (TREE_TYPE (addr_expr));
1700  if (TREE_CODE (datype) != ARRAY_TYPE)
1701    return;
1702
1703  /* The pointer to element type shall be trivially convertible to
1704     the expression pointer type.  */
1705  ddatype = TREE_TYPE (datype);
1706  pddatype = build_pointer_type (ddatype);
1707  if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1708				  pddatype))
1709    return;
1710
1711  /* The lower bound and element sizes must be constant.  */
1712  if (!TYPE_SIZE_UNIT (ddatype)
1713      || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1714      || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1715      || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1716    return;
1717
1718  /* All checks succeeded.  Build a new node to merge the cast.  */
1719  *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1720		    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1721		    NULL_TREE, NULL_TREE);
1722  *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1723
1724  /* We can have stripped a required restrict qualifier above.  */
1725  if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1726    *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1727}
1728
1729/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
1730   underneath as appropriate.  */
1731
1732static enum gimplify_status
1733gimplify_conversion (tree *expr_p)
1734{
1735  location_t loc = EXPR_LOCATION (*expr_p);
1736  gcc_assert (CONVERT_EXPR_P (*expr_p));
1737
1738  /* Then strip away all but the outermost conversion.  */
1739  STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1740
1741  /* And remove the outermost conversion if it's useless.  */
1742  if (tree_ssa_useless_type_conversion (*expr_p))
1743    *expr_p = TREE_OPERAND (*expr_p, 0);
1744
1745  /* If we still have a conversion at the toplevel,
1746     then canonicalize some constructs.  */
1747  if (CONVERT_EXPR_P (*expr_p))
1748    {
1749      tree sub = TREE_OPERAND (*expr_p, 0);
1750
1751      /* If a NOP conversion is changing the type of a COMPONENT_REF
1752	 expression, then canonicalize its type now in order to expose more
1753	 redundant conversions.  */
1754      if (TREE_CODE (sub) == COMPONENT_REF)
1755	canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1756
1757      /* If a NOP conversion is changing a pointer to array of foo
1758	 to a pointer to foo, embed that change in the ADDR_EXPR.  */
1759      else if (TREE_CODE (sub) == ADDR_EXPR)
1760	canonicalize_addr_expr (expr_p);
1761    }
1762
1763  /* If we have a conversion to a non-register type force the
1764     use of a VIEW_CONVERT_EXPR instead.  */
1765  if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1766    *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1767			       TREE_OPERAND (*expr_p, 0));
1768
1769  /* Canonicalize CONVERT_EXPR to NOP_EXPR.  */
1770  if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1771    TREE_SET_CODE (*expr_p, NOP_EXPR);
1772
1773  return GS_OK;
1774}
1775
1776/* Nonlocal VLAs seen in the current function.  */
1777static hash_set<tree> *nonlocal_vlas;
1778
1779/* The VAR_DECLs created for nonlocal VLAs for debug info purposes.  */
1780static tree nonlocal_vla_vars;
1781
1782/* Gimplify a VAR_DECL or PARM_DECL.  Return GS_OK if we expanded a
1783   DECL_VALUE_EXPR, and it's worth re-examining things.  */
1784
1785static enum gimplify_status
1786gimplify_var_or_parm_decl (tree *expr_p)
1787{
1788  tree decl = *expr_p;
1789
1790  /* ??? If this is a local variable, and it has not been seen in any
1791     outer BIND_EXPR, then it's probably the result of a duplicate
1792     declaration, for which we've already issued an error.  It would
1793     be really nice if the front end wouldn't leak these at all.
1794     Currently the only known culprit is C++ destructors, as seen
1795     in g++.old-deja/g++.jason/binding.C.  */
1796  if (TREE_CODE (decl) == VAR_DECL
1797      && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1798      && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1799      && decl_function_context (decl) == current_function_decl)
1800    {
1801      gcc_assert (seen_error ());
1802      return GS_ERROR;
1803    }
1804
1805  /* When within an OMP context, notice uses of variables.  */
1806  if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1807    return GS_ALL_DONE;
1808
1809  /* If the decl is an alias for another expression, substitute it now.  */
1810  if (DECL_HAS_VALUE_EXPR_P (decl))
1811    {
1812      tree value_expr = DECL_VALUE_EXPR (decl);
1813
1814      /* For referenced nonlocal VLAs add a decl for debugging purposes
1815	 to the current function.  */
1816      if (TREE_CODE (decl) == VAR_DECL
1817	  && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1818	  && nonlocal_vlas != NULL
1819	  && TREE_CODE (value_expr) == INDIRECT_REF
1820	  && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1821	  && decl_function_context (decl) != current_function_decl)
1822	{
1823	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1824	  while (ctx
1825		 && (ctx->region_type == ORT_WORKSHARE
1826		     || ctx->region_type == ORT_SIMD))
1827	    ctx = ctx->outer_context;
1828	  if (!ctx && !nonlocal_vlas->add (decl))
1829	    {
1830	      tree copy = copy_node (decl);
1831
1832	      lang_hooks.dup_lang_specific_decl (copy);
1833	      SET_DECL_RTL (copy, 0);
1834	      TREE_USED (copy) = 1;
1835	      DECL_CHAIN (copy) = nonlocal_vla_vars;
1836	      nonlocal_vla_vars = copy;
1837	      SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1838	      DECL_HAS_VALUE_EXPR_P (copy) = 1;
1839	    }
1840	}
1841
1842      *expr_p = unshare_expr (value_expr);
1843      return GS_OK;
1844    }
1845
1846  return GS_ALL_DONE;
1847}
1848
1849/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T.  */
1850
1851static void
1852recalculate_side_effects (tree t)
1853{
1854  enum tree_code code = TREE_CODE (t);
1855  int len = TREE_OPERAND_LENGTH (t);
1856  int i;
1857
1858  switch (TREE_CODE_CLASS (code))
1859    {
1860    case tcc_expression:
1861      switch (code)
1862	{
1863	case INIT_EXPR:
1864	case MODIFY_EXPR:
1865	case VA_ARG_EXPR:
1866	case PREDECREMENT_EXPR:
1867	case PREINCREMENT_EXPR:
1868	case POSTDECREMENT_EXPR:
1869	case POSTINCREMENT_EXPR:
1870	  /* All of these have side-effects, no matter what their
1871	     operands are.  */
1872	  return;
1873
1874	default:
1875	  break;
1876	}
1877      /* Fall through.  */
1878
1879    case tcc_comparison:  /* a comparison expression */
1880    case tcc_unary:       /* a unary arithmetic expression */
1881    case tcc_binary:      /* a binary arithmetic expression */
1882    case tcc_reference:   /* a reference */
1883    case tcc_vl_exp:        /* a function call */
1884      TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1885      for (i = 0; i < len; ++i)
1886	{
1887	  tree op = TREE_OPERAND (t, i);
1888	  if (op && TREE_SIDE_EFFECTS (op))
1889	    TREE_SIDE_EFFECTS (t) = 1;
1890	}
1891      break;
1892
1893    case tcc_constant:
1894      /* No side-effects.  */
1895      return;
1896
1897    default:
1898      gcc_unreachable ();
1899   }
1900}
1901
1902/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1903   node *EXPR_P.
1904
1905      compound_lval
1906	      : min_lval '[' val ']'
1907	      | min_lval '.' ID
1908	      | compound_lval '[' val ']'
1909	      | compound_lval '.' ID
1910
1911   This is not part of the original SIMPLE definition, which separates
1912   array and member references, but it seems reasonable to handle them
1913   together.  Also, this way we don't run into problems with union
1914   aliasing; gcc requires that for accesses through a union to alias, the
1915   union reference must be explicit, which was not always the case when we
1916   were splitting up array and member refs.
1917
1918   PRE_P points to the sequence where side effects that must happen before
1919     *EXPR_P should be stored.
1920
1921   POST_P points to the sequence where side effects that must happen after
1922     *EXPR_P should be stored.  */
1923
1924static enum gimplify_status
1925gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1926			fallback_t fallback)
1927{
1928  tree *p;
1929  enum gimplify_status ret = GS_ALL_DONE, tret;
1930  int i;
1931  location_t loc = EXPR_LOCATION (*expr_p);
1932  tree expr = *expr_p;
1933
1934  /* Create a stack of the subexpressions so later we can walk them in
1935     order from inner to outer.  */
1936  auto_vec<tree, 10> expr_stack;
1937
1938  /* We can handle anything that get_inner_reference can deal with.  */
1939  for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1940    {
1941    restart:
1942      /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
1943      if (TREE_CODE (*p) == INDIRECT_REF)
1944	*p = fold_indirect_ref_loc (loc, *p);
1945
1946      if (handled_component_p (*p))
1947	;
1948      /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
1949	 additional COMPONENT_REFs.  */
1950      else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1951	       && gimplify_var_or_parm_decl (p) == GS_OK)
1952	goto restart;
1953      else
1954	break;
1955
1956      expr_stack.safe_push (*p);
1957    }
1958
1959  gcc_assert (expr_stack.length ());
1960
1961  /* Now EXPR_STACK is a stack of pointers to all the refs we've
1962     walked through and P points to the innermost expression.
1963
1964     Java requires that we elaborated nodes in source order.  That
1965     means we must gimplify the inner expression followed by each of
1966     the indices, in order.  But we can't gimplify the inner
1967     expression until we deal with any variable bounds, sizes, or
1968     positions in order to deal with PLACEHOLDER_EXPRs.
1969
1970     So we do this in three steps.  First we deal with the annotations
1971     for any variables in the components, then we gimplify the base,
1972     then we gimplify any indices, from left to right.  */
1973  for (i = expr_stack.length () - 1; i >= 0; i--)
1974    {
1975      tree t = expr_stack[i];
1976
1977      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1978	{
1979	  /* Gimplify the low bound and element type size and put them into
1980	     the ARRAY_REF.  If these values are set, they have already been
1981	     gimplified.  */
1982	  if (TREE_OPERAND (t, 2) == NULL_TREE)
1983	    {
1984	      tree low = unshare_expr (array_ref_low_bound (t));
1985	      if (!is_gimple_min_invariant (low))
1986		{
1987		  TREE_OPERAND (t, 2) = low;
1988		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1989					post_p, is_gimple_reg,
1990					fb_rvalue);
1991		  ret = MIN (ret, tret);
1992		}
1993	    }
1994	  else
1995	    {
1996	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1997				    is_gimple_reg, fb_rvalue);
1998	      ret = MIN (ret, tret);
1999	    }
2000
2001	  if (TREE_OPERAND (t, 3) == NULL_TREE)
2002	    {
2003	      tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2004	      tree elmt_size = unshare_expr (array_ref_element_size (t));
2005	      tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2006
2007	      /* Divide the element size by the alignment of the element
2008		 type (above).  */
2009	      elmt_size
2010		= size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2011
2012	      if (!is_gimple_min_invariant (elmt_size))
2013		{
2014		  TREE_OPERAND (t, 3) = elmt_size;
2015		  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2016					post_p, is_gimple_reg,
2017					fb_rvalue);
2018		  ret = MIN (ret, tret);
2019		}
2020	    }
2021	  else
2022	    {
2023	      tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2024				    is_gimple_reg, fb_rvalue);
2025	      ret = MIN (ret, tret);
2026	    }
2027	}
2028      else if (TREE_CODE (t) == COMPONENT_REF)
2029	{
2030	  /* Set the field offset into T and gimplify it.  */
2031	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2032	    {
2033	      tree offset = unshare_expr (component_ref_field_offset (t));
2034	      tree field = TREE_OPERAND (t, 1);
2035	      tree factor
2036		= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2037
2038	      /* Divide the offset by its alignment.  */
2039	      offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2040
2041	      if (!is_gimple_min_invariant (offset))
2042		{
2043		  TREE_OPERAND (t, 2) = offset;
2044		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2045					post_p, is_gimple_reg,
2046					fb_rvalue);
2047		  ret = MIN (ret, tret);
2048		}
2049	    }
2050	  else
2051	    {
2052	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2053				    is_gimple_reg, fb_rvalue);
2054	      ret = MIN (ret, tret);
2055	    }
2056	}
2057    }
2058
2059  /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2060     so as to match the min_lval predicate.  Failure to do so may result
2061     in the creation of large aggregate temporaries.  */
2062  tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2063			fallback | fb_lvalue);
2064  ret = MIN (ret, tret);
2065
2066  /* And finally, the indices and operands of ARRAY_REF.  During this
2067     loop we also remove any useless conversions.  */
2068  for (; expr_stack.length () > 0; )
2069    {
2070      tree t = expr_stack.pop ();
2071
2072      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2073	{
2074	  /* Gimplify the dimension.  */
2075	  if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2076	    {
2077	      tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2078				    is_gimple_val, fb_rvalue);
2079	      ret = MIN (ret, tret);
2080	    }
2081	}
2082
2083      STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2084
2085      /* The innermost expression P may have originally had
2086	 TREE_SIDE_EFFECTS set which would have caused all the outer
2087	 expressions in *EXPR_P leading to P to also have had
2088	 TREE_SIDE_EFFECTS set.  */
2089      recalculate_side_effects (t);
2090    }
2091
2092  /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
2093  if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2094    {
2095      canonicalize_component_ref (expr_p);
2096    }
2097
2098  expr_stack.release ();
2099
2100  gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2101
2102  return ret;
2103}
2104
2105/*  Gimplify the self modifying expression pointed to by EXPR_P
2106    (++, --, +=, -=).
2107
2108    PRE_P points to the list where side effects that must happen before
2109	*EXPR_P should be stored.
2110
2111    POST_P points to the list where side effects that must happen after
2112	*EXPR_P should be stored.
2113
2114    WANT_VALUE is nonzero iff we want to use the value of this expression
2115	in another expression.
2116
2117    ARITH_TYPE is the type the computation should be performed in.  */
2118
2119enum gimplify_status
2120gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2121			bool want_value, tree arith_type)
2122{
2123  enum tree_code code;
2124  tree lhs, lvalue, rhs, t1;
2125  gimple_seq post = NULL, *orig_post_p = post_p;
2126  bool postfix;
2127  enum tree_code arith_code;
2128  enum gimplify_status ret;
2129  location_t loc = EXPR_LOCATION (*expr_p);
2130
2131  code = TREE_CODE (*expr_p);
2132
2133  gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2134	      || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2135
2136  /* Prefix or postfix?  */
2137  if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2138    /* Faster to treat as prefix if result is not used.  */
2139    postfix = want_value;
2140  else
2141    postfix = false;
2142
2143  /* For postfix, make sure the inner expression's post side effects
2144     are executed after side effects from this expression.  */
2145  if (postfix)
2146    post_p = &post;
2147
2148  /* Add or subtract?  */
2149  if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2150    arith_code = PLUS_EXPR;
2151  else
2152    arith_code = MINUS_EXPR;
2153
2154  /* Gimplify the LHS into a GIMPLE lvalue.  */
2155  lvalue = TREE_OPERAND (*expr_p, 0);
2156  ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2157  if (ret == GS_ERROR)
2158    return ret;
2159
2160  /* Extract the operands to the arithmetic operation.  */
2161  lhs = lvalue;
2162  rhs = TREE_OPERAND (*expr_p, 1);
2163
2164  /* For postfix operator, we evaluate the LHS to an rvalue and then use
2165     that as the result value and in the postqueue operation.  */
2166  if (postfix)
2167    {
2168      ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2169      if (ret == GS_ERROR)
2170	return ret;
2171
2172      lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2173    }
2174
2175  /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
2176  if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2177    {
2178      rhs = convert_to_ptrofftype_loc (loc, rhs);
2179      if (arith_code == MINUS_EXPR)
2180	rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2181      t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2182    }
2183  else
2184    t1 = fold_convert (TREE_TYPE (*expr_p),
2185		       fold_build2 (arith_code, arith_type,
2186				    fold_convert (arith_type, lhs),
2187				    fold_convert (arith_type, rhs)));
2188
2189  if (postfix)
2190    {
2191      gimplify_assign (lvalue, t1, pre_p);
2192      gimplify_seq_add_seq (orig_post_p, post);
2193      *expr_p = lhs;
2194      return GS_ALL_DONE;
2195    }
2196  else
2197    {
2198      *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2199      return GS_OK;
2200    }
2201}
2202
2203/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
2204
2205static void
2206maybe_with_size_expr (tree *expr_p)
2207{
2208  tree expr = *expr_p;
2209  tree type = TREE_TYPE (expr);
2210  tree size;
2211
2212  /* If we've already wrapped this or the type is error_mark_node, we can't do
2213     anything.  */
2214  if (TREE_CODE (expr) == WITH_SIZE_EXPR
2215      || type == error_mark_node)
2216    return;
2217
2218  /* If the size isn't known or is a constant, we have nothing to do.  */
2219  size = TYPE_SIZE_UNIT (type);
2220  if (!size || TREE_CODE (size) == INTEGER_CST)
2221    return;
2222
2223  /* Otherwise, make a WITH_SIZE_EXPR.  */
2224  size = unshare_expr (size);
2225  size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2226  *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2227}
2228
2229/* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
2230   Store any side-effects in PRE_P.  CALL_LOCATION is the location of
2231   the CALL_EXPR.  */
2232
2233enum gimplify_status
2234gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2235{
2236  bool (*test) (tree);
2237  fallback_t fb;
2238
2239  /* In general, we allow lvalues for function arguments to avoid
2240     extra overhead of copying large aggregates out of even larger
2241     aggregates into temporaries only to copy the temporaries to
2242     the argument list.  Make optimizers happy by pulling out to
2243     temporaries those types that fit in registers.  */
2244  if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2245    test = is_gimple_val, fb = fb_rvalue;
2246  else
2247    {
2248      test = is_gimple_lvalue, fb = fb_either;
2249      /* Also strip a TARGET_EXPR that would force an extra copy.  */
2250      if (TREE_CODE (*arg_p) == TARGET_EXPR)
2251	{
2252	  tree init = TARGET_EXPR_INITIAL (*arg_p);
2253	  if (init
2254	      && !VOID_TYPE_P (TREE_TYPE (init)))
2255	    *arg_p = init;
2256	}
2257    }
2258
2259  /* If this is a variable sized type, we must remember the size.  */
2260  maybe_with_size_expr (arg_p);
2261
2262  /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
2263  /* Make sure arguments have the same location as the function call
2264     itself.  */
2265  protected_set_expr_location (*arg_p, call_location);
2266
2267  /* There is a sequence point before a function call.  Side effects in
2268     the argument list must occur before the actual call. So, when
2269     gimplifying arguments, force gimplify_expr to use an internal
2270     post queue which is then appended to the end of PRE_P.  */
2271  return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2272}
2273
2274/* Don't fold inside offloading or taskreg regions: it can break code by
2275   adding decl references that weren't in the source.  We'll do it during
2276   omplower pass instead.  */
2277
2278static bool
2279maybe_fold_stmt (gimple_stmt_iterator *gsi)
2280{
2281  struct gimplify_omp_ctx *ctx;
2282  for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2283    if (ctx->region_type == ORT_TARGET
2284	|| (ctx->region_type & (ORT_PARALLEL | ORT_TASK)) != 0)
2285      return false;
2286  return fold_stmt (gsi);
2287}
2288
2289/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2290   WANT_VALUE is true if the result of the call is desired.  */
2291
2292static enum gimplify_status
2293gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2294{
2295  tree fndecl, parms, p, fnptrtype;
2296  enum gimplify_status ret;
2297  int i, nargs;
2298  gcall *call;
2299  bool builtin_va_start_p = false;
2300  location_t loc = EXPR_LOCATION (*expr_p);
2301
2302  gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2303
2304  /* For reliable diagnostics during inlining, it is necessary that
2305     every call_expr be annotated with file and line.  */
2306  if (! EXPR_HAS_LOCATION (*expr_p))
2307    SET_EXPR_LOCATION (*expr_p, input_location);
2308
2309  /* Gimplify internal functions created in the FEs.  */
2310  if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2311    {
2312      if (want_value)
2313	return GS_ALL_DONE;
2314
2315      nargs = call_expr_nargs (*expr_p);
2316      enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2317      auto_vec<tree> vargs (nargs);
2318
2319      for (i = 0; i < nargs; i++)
2320	{
2321	  gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2322			EXPR_LOCATION (*expr_p));
2323	  vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2324	}
2325      gimple call = gimple_build_call_internal_vec (ifn, vargs);
2326      gimplify_seq_add_stmt (pre_p, call);
2327      return GS_ALL_DONE;
2328    }
2329
2330  /* This may be a call to a builtin function.
2331
2332     Builtin function calls may be transformed into different
2333     (and more efficient) builtin function calls under certain
2334     circumstances.  Unfortunately, gimplification can muck things
2335     up enough that the builtin expanders are not aware that certain
2336     transformations are still valid.
2337
2338     So we attempt transformation/gimplification of the call before
2339     we gimplify the CALL_EXPR.  At this time we do not manage to
2340     transform all calls in the same manner as the expanders do, but
2341     we do transform most of them.  */
2342  fndecl = get_callee_fndecl (*expr_p);
2343  if (fndecl
2344      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2345    switch (DECL_FUNCTION_CODE (fndecl))
2346      {
2347      case BUILT_IN_VA_START:
2348        {
2349	  builtin_va_start_p = TRUE;
2350	  if (call_expr_nargs (*expr_p) < 2)
2351	    {
2352	      error ("too few arguments to function %<va_start%>");
2353	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2354	      return GS_OK;
2355	    }
2356
2357	  if (fold_builtin_next_arg (*expr_p, true))
2358	    {
2359	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2360	      return GS_OK;
2361	    }
2362	  break;
2363	}
2364      case BUILT_IN_LINE:
2365	{
2366	  *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2367				   LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2368	  return GS_OK;
2369	}
2370      case BUILT_IN_FILE:
2371	{
2372	  const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2373	  *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2374	  return GS_OK;
2375	}
2376      case BUILT_IN_FUNCTION:
2377	{
2378	  const char *function;
2379	  function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2380	  *expr_p = build_string_literal (strlen (function) + 1, function);
2381	  return GS_OK;
2382	}
2383      default:
2384        ;
2385      }
2386  if (fndecl && DECL_BUILT_IN (fndecl))
2387    {
2388      tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2389      if (new_tree && new_tree != *expr_p)
2390	{
2391	  /* There was a transformation of this call which computes the
2392	     same value, but in a more efficient way.  Return and try
2393	     again.  */
2394	  *expr_p = new_tree;
2395	  return GS_OK;
2396	}
2397    }
2398
2399  /* Remember the original function pointer type.  */
2400  fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2401
2402  /* There is a sequence point before the call, so any side effects in
2403     the calling expression must occur before the actual call.  Force
2404     gimplify_expr to use an internal post queue.  */
2405  ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2406		       is_gimple_call_addr, fb_rvalue);
2407
2408  nargs = call_expr_nargs (*expr_p);
2409
2410  /* Get argument types for verification.  */
2411  fndecl = get_callee_fndecl (*expr_p);
2412  parms = NULL_TREE;
2413  if (fndecl)
2414    parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2415  else
2416    parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2417
2418  if (fndecl && DECL_ARGUMENTS (fndecl))
2419    p = DECL_ARGUMENTS (fndecl);
2420  else if (parms)
2421    p = parms;
2422  else
2423    p = NULL_TREE;
2424  for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2425    ;
2426
2427  /* If the last argument is __builtin_va_arg_pack () and it is not
2428     passed as a named argument, decrease the number of CALL_EXPR
2429     arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
2430  if (!p
2431      && i < nargs
2432      && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2433    {
2434      tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2435      tree last_arg_fndecl = get_callee_fndecl (last_arg);
2436
2437      if (last_arg_fndecl
2438	  && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2439	  && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2440	  && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2441	{
2442	  tree call = *expr_p;
2443
2444	  --nargs;
2445	  *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2446					  CALL_EXPR_FN (call),
2447					  nargs, CALL_EXPR_ARGP (call));
2448
2449	  /* Copy all CALL_EXPR flags, location and block, except
2450	     CALL_EXPR_VA_ARG_PACK flag.  */
2451	  CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2452	  CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2453	  CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2454	    = CALL_EXPR_RETURN_SLOT_OPT (call);
2455	  CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2456	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2457
2458	  /* Set CALL_EXPR_VA_ARG_PACK.  */
2459	  CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2460	}
2461    }
2462
2463  /* Gimplify the function arguments.  */
2464  if (nargs > 0)
2465    {
2466      for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2467           PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2468           PUSH_ARGS_REVERSED ? i-- : i++)
2469        {
2470          enum gimplify_status t;
2471
2472          /* Avoid gimplifying the second argument to va_start, which needs to
2473             be the plain PARM_DECL.  */
2474          if ((i != 1) || !builtin_va_start_p)
2475            {
2476              t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2477				EXPR_LOCATION (*expr_p));
2478
2479              if (t == GS_ERROR)
2480                ret = GS_ERROR;
2481            }
2482        }
2483    }
2484
2485  /* Gimplify the static chain.  */
2486  if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2487    {
2488      if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2489	CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2490      else
2491	{
2492	  enum gimplify_status t;
2493	  t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2494			    EXPR_LOCATION (*expr_p));
2495	  if (t == GS_ERROR)
2496	    ret = GS_ERROR;
2497	}
2498    }
2499
2500  /* Verify the function result.  */
2501  if (want_value && fndecl
2502      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2503    {
2504      error_at (loc, "using result of function returning %<void%>");
2505      ret = GS_ERROR;
2506    }
2507
2508  /* Try this again in case gimplification exposed something.  */
2509  if (ret != GS_ERROR)
2510    {
2511      tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2512
2513      if (new_tree && new_tree != *expr_p)
2514	{
2515	  /* There was a transformation of this call which computes the
2516	     same value, but in a more efficient way.  Return and try
2517	     again.  */
2518	  *expr_p = new_tree;
2519	  return GS_OK;
2520	}
2521    }
2522  else
2523    {
2524      *expr_p = error_mark_node;
2525      return GS_ERROR;
2526    }
2527
2528  /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2529     decl.  This allows us to eliminate redundant or useless
2530     calls to "const" functions.  */
2531  if (TREE_CODE (*expr_p) == CALL_EXPR)
2532    {
2533      int flags = call_expr_flags (*expr_p);
2534      if (flags & (ECF_CONST | ECF_PURE)
2535	  /* An infinite loop is considered a side effect.  */
2536	  && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2537	TREE_SIDE_EFFECTS (*expr_p) = 0;
2538    }
2539
2540  /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2541     and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
2542     form and delegate the creation of a GIMPLE_CALL to
2543     gimplify_modify_expr.  This is always possible because when
2544     WANT_VALUE is true, the caller wants the result of this call into
2545     a temporary, which means that we will emit an INIT_EXPR in
2546     internal_get_tmp_var which will then be handled by
2547     gimplify_modify_expr.  */
2548  if (!want_value)
2549    {
2550      /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2551	 have to do is replicate it as a GIMPLE_CALL tuple.  */
2552      gimple_stmt_iterator gsi;
2553      call = gimple_build_call_from_tree (*expr_p);
2554      gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2555      notice_special_calls (call);
2556      gimplify_seq_add_stmt (pre_p, call);
2557      gsi = gsi_last (*pre_p);
2558      maybe_fold_stmt (&gsi);
2559      *expr_p = NULL_TREE;
2560    }
2561  else
2562    /* Remember the original function type.  */
2563    CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2564				     CALL_EXPR_FN (*expr_p));
2565
2566  return ret;
2567}
2568
2569/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2570   rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2571
2572   TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2573   condition is true or false, respectively.  If null, we should generate
2574   our own to skip over the evaluation of this specific expression.
2575
2576   LOCUS is the source location of the COND_EXPR.
2577
2578   This function is the tree equivalent of do_jump.
2579
2580   shortcut_cond_r should only be called by shortcut_cond_expr.  */
2581
2582static tree
2583shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2584		 location_t locus)
2585{
2586  tree local_label = NULL_TREE;
2587  tree t, expr = NULL;
2588
2589  /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2590     retain the shortcut semantics.  Just insert the gotos here;
2591     shortcut_cond_expr will append the real blocks later.  */
2592  if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2593    {
2594      location_t new_locus;
2595
2596      /* Turn if (a && b) into
2597
2598	 if (a); else goto no;
2599	 if (b) goto yes; else goto no;
2600	 (no:) */
2601
2602      if (false_label_p == NULL)
2603	false_label_p = &local_label;
2604
2605      /* Keep the original source location on the first 'if'.  */
2606      t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2607      append_to_statement_list (t, &expr);
2608
2609      /* Set the source location of the && on the second 'if'.  */
2610      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2611      t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2612			   new_locus);
2613      append_to_statement_list (t, &expr);
2614    }
2615  else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2616    {
2617      location_t new_locus;
2618
2619      /* Turn if (a || b) into
2620
2621	 if (a) goto yes;
2622	 if (b) goto yes; else goto no;
2623	 (yes:) */
2624
2625      if (true_label_p == NULL)
2626	true_label_p = &local_label;
2627
2628      /* Keep the original source location on the first 'if'.  */
2629      t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2630      append_to_statement_list (t, &expr);
2631
2632      /* Set the source location of the || on the second 'if'.  */
2633      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2634      t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2635			   new_locus);
2636      append_to_statement_list (t, &expr);
2637    }
2638  else if (TREE_CODE (pred) == COND_EXPR
2639	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2640	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2641    {
2642      location_t new_locus;
2643
2644      /* As long as we're messing with gotos, turn if (a ? b : c) into
2645	 if (a)
2646	   if (b) goto yes; else goto no;
2647	 else
2648	   if (c) goto yes; else goto no;
2649
2650	 Don't do this if one of the arms has void type, which can happen
2651	 in C++ when the arm is throw.  */
2652
2653      /* Keep the original source location on the first 'if'.  Set the source
2654	 location of the ? on the second 'if'.  */
2655      new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2656      expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2657		     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2658				      false_label_p, locus),
2659		     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2660				      false_label_p, new_locus));
2661    }
2662  else
2663    {
2664      expr = build3 (COND_EXPR, void_type_node, pred,
2665		     build_and_jump (true_label_p),
2666		     build_and_jump (false_label_p));
2667      SET_EXPR_LOCATION (expr, locus);
2668    }
2669
2670  if (local_label)
2671    {
2672      t = build1 (LABEL_EXPR, void_type_node, local_label);
2673      append_to_statement_list (t, &expr);
2674    }
2675
2676  return expr;
2677}
2678
2679/* Given a conditional expression EXPR with short-circuit boolean
2680   predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2681   predicate apart into the equivalent sequence of conditionals.  */
2682
2683static tree
2684shortcut_cond_expr (tree expr)
2685{
2686  tree pred = TREE_OPERAND (expr, 0);
2687  tree then_ = TREE_OPERAND (expr, 1);
2688  tree else_ = TREE_OPERAND (expr, 2);
2689  tree true_label, false_label, end_label, t;
2690  tree *true_label_p;
2691  tree *false_label_p;
2692  bool emit_end, emit_false, jump_over_else;
2693  bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2694  bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2695
2696  /* First do simple transformations.  */
2697  if (!else_se)
2698    {
2699      /* If there is no 'else', turn
2700	   if (a && b) then c
2701	 into
2702	   if (a) if (b) then c.  */
2703      while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2704	{
2705	  /* Keep the original source location on the first 'if'.  */
2706	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2707	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2708	  /* Set the source location of the && on the second 'if'.  */
2709	  if (EXPR_HAS_LOCATION (pred))
2710	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2711	  then_ = shortcut_cond_expr (expr);
2712	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
2713	  pred = TREE_OPERAND (pred, 0);
2714	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2715	  SET_EXPR_LOCATION (expr, locus);
2716	}
2717    }
2718
2719  if (!then_se)
2720    {
2721      /* If there is no 'then', turn
2722	   if (a || b); else d
2723	 into
2724	   if (a); else if (b); else d.  */
2725      while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2726	{
2727	  /* Keep the original source location on the first 'if'.  */
2728	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2729	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2730	  /* Set the source location of the || on the second 'if'.  */
2731	  if (EXPR_HAS_LOCATION (pred))
2732	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2733	  else_ = shortcut_cond_expr (expr);
2734	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
2735	  pred = TREE_OPERAND (pred, 0);
2736	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2737	  SET_EXPR_LOCATION (expr, locus);
2738	}
2739    }
2740
2741  /* If we're done, great.  */
2742  if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2743      && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2744    return expr;
2745
2746  /* Otherwise we need to mess with gotos.  Change
2747       if (a) c; else d;
2748     to
2749       if (a); else goto no;
2750       c; goto end;
2751       no: d; end:
2752     and recursively gimplify the condition.  */
2753
2754  true_label = false_label = end_label = NULL_TREE;
2755
2756  /* If our arms just jump somewhere, hijack those labels so we don't
2757     generate jumps to jumps.  */
2758
2759  if (then_
2760      && TREE_CODE (then_) == GOTO_EXPR
2761      && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2762    {
2763      true_label = GOTO_DESTINATION (then_);
2764      then_ = NULL;
2765      then_se = false;
2766    }
2767
2768  if (else_
2769      && TREE_CODE (else_) == GOTO_EXPR
2770      && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2771    {
2772      false_label = GOTO_DESTINATION (else_);
2773      else_ = NULL;
2774      else_se = false;
2775    }
2776
2777  /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
2778  if (true_label)
2779    true_label_p = &true_label;
2780  else
2781    true_label_p = NULL;
2782
2783  /* The 'else' branch also needs a label if it contains interesting code.  */
2784  if (false_label || else_se)
2785    false_label_p = &false_label;
2786  else
2787    false_label_p = NULL;
2788
2789  /* If there was nothing else in our arms, just forward the label(s).  */
2790  if (!then_se && !else_se)
2791    return shortcut_cond_r (pred, true_label_p, false_label_p,
2792			    EXPR_LOC_OR_LOC (expr, input_location));
2793
2794  /* If our last subexpression already has a terminal label, reuse it.  */
2795  if (else_se)
2796    t = expr_last (else_);
2797  else if (then_se)
2798    t = expr_last (then_);
2799  else
2800    t = NULL;
2801  if (t && TREE_CODE (t) == LABEL_EXPR)
2802    end_label = LABEL_EXPR_LABEL (t);
2803
2804  /* If we don't care about jumping to the 'else' branch, jump to the end
2805     if the condition is false.  */
2806  if (!false_label_p)
2807    false_label_p = &end_label;
2808
2809  /* We only want to emit these labels if we aren't hijacking them.  */
2810  emit_end = (end_label == NULL_TREE);
2811  emit_false = (false_label == NULL_TREE);
2812
2813  /* We only emit the jump over the else clause if we have to--if the
2814     then clause may fall through.  Otherwise we can wind up with a
2815     useless jump and a useless label at the end of gimplified code,
2816     which will cause us to think that this conditional as a whole
2817     falls through even if it doesn't.  If we then inline a function
2818     which ends with such a condition, that can cause us to issue an
2819     inappropriate warning about control reaching the end of a
2820     non-void function.  */
2821  jump_over_else = block_may_fallthru (then_);
2822
2823  pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2824			  EXPR_LOC_OR_LOC (expr, input_location));
2825
2826  expr = NULL;
2827  append_to_statement_list (pred, &expr);
2828
2829  append_to_statement_list (then_, &expr);
2830  if (else_se)
2831    {
2832      if (jump_over_else)
2833	{
2834	  tree last = expr_last (expr);
2835	  t = build_and_jump (&end_label);
2836	  if (EXPR_HAS_LOCATION (last))
2837	    SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2838	  append_to_statement_list (t, &expr);
2839	}
2840      if (emit_false)
2841	{
2842	  t = build1 (LABEL_EXPR, void_type_node, false_label);
2843	  append_to_statement_list (t, &expr);
2844	}
2845      append_to_statement_list (else_, &expr);
2846    }
2847  if (emit_end && end_label)
2848    {
2849      t = build1 (LABEL_EXPR, void_type_node, end_label);
2850      append_to_statement_list (t, &expr);
2851    }
2852
2853  return expr;
2854}
2855
2856/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
2857
2858tree
2859gimple_boolify (tree expr)
2860{
2861  tree type = TREE_TYPE (expr);
2862  location_t loc = EXPR_LOCATION (expr);
2863
2864  if (TREE_CODE (expr) == NE_EXPR
2865      && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2866      && integer_zerop (TREE_OPERAND (expr, 1)))
2867    {
2868      tree call = TREE_OPERAND (expr, 0);
2869      tree fn = get_callee_fndecl (call);
2870
2871      /* For __builtin_expect ((long) (x), y) recurse into x as well
2872	 if x is truth_value_p.  */
2873      if (fn
2874	  && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2875	  && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2876	  && call_expr_nargs (call) == 2)
2877	{
2878	  tree arg = CALL_EXPR_ARG (call, 0);
2879	  if (arg)
2880	    {
2881	      if (TREE_CODE (arg) == NOP_EXPR
2882		  && TREE_TYPE (arg) == TREE_TYPE (call))
2883		arg = TREE_OPERAND (arg, 0);
2884	      if (truth_value_p (TREE_CODE (arg)))
2885		{
2886		  arg = gimple_boolify (arg);
2887		  CALL_EXPR_ARG (call, 0)
2888		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
2889		}
2890	    }
2891	}
2892    }
2893
2894  switch (TREE_CODE (expr))
2895    {
2896    case TRUTH_AND_EXPR:
2897    case TRUTH_OR_EXPR:
2898    case TRUTH_XOR_EXPR:
2899    case TRUTH_ANDIF_EXPR:
2900    case TRUTH_ORIF_EXPR:
2901      /* Also boolify the arguments of truth exprs.  */
2902      TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2903      /* FALLTHRU */
2904
2905    case TRUTH_NOT_EXPR:
2906      TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2907
2908      /* These expressions always produce boolean results.  */
2909      if (TREE_CODE (type) != BOOLEAN_TYPE)
2910	TREE_TYPE (expr) = boolean_type_node;
2911      return expr;
2912
2913    case ANNOTATE_EXPR:
2914      switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2915	{
2916	case annot_expr_ivdep_kind:
2917	case annot_expr_no_vector_kind:
2918	case annot_expr_vector_kind:
2919	  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2920	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2921	    TREE_TYPE (expr) = boolean_type_node;
2922	  return expr;
2923	default:
2924	  gcc_unreachable ();
2925	}
2926
2927    default:
2928      if (COMPARISON_CLASS_P (expr))
2929	{
2930	  /* There expressions always prduce boolean results.  */
2931	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2932	    TREE_TYPE (expr) = boolean_type_node;
2933	  return expr;
2934	}
2935      /* Other expressions that get here must have boolean values, but
2936	 might need to be converted to the appropriate mode.  */
2937      if (TREE_CODE (type) == BOOLEAN_TYPE)
2938	return expr;
2939      return fold_convert_loc (loc, boolean_type_node, expr);
2940    }
2941}
2942
2943/* Given a conditional expression *EXPR_P without side effects, gimplify
2944   its operands.  New statements are inserted to PRE_P.  */
2945
2946static enum gimplify_status
2947gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2948{
2949  tree expr = *expr_p, cond;
2950  enum gimplify_status ret, tret;
2951  enum tree_code code;
2952
2953  cond = gimple_boolify (COND_EXPR_COND (expr));
2954
2955  /* We need to handle && and || specially, as their gimplification
2956     creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
2957  code = TREE_CODE (cond);
2958  if (code == TRUTH_ANDIF_EXPR)
2959    TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2960  else if (code == TRUTH_ORIF_EXPR)
2961    TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2962  ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2963  COND_EXPR_COND (*expr_p) = cond;
2964
2965  tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2966				   is_gimple_val, fb_rvalue);
2967  ret = MIN (ret, tret);
2968  tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2969				   is_gimple_val, fb_rvalue);
2970
2971  return MIN (ret, tret);
2972}
2973
2974/* Return true if evaluating EXPR could trap.
2975   EXPR is GENERIC, while tree_could_trap_p can be called
2976   only on GIMPLE.  */
2977
2978static bool
2979generic_expr_could_trap_p (tree expr)
2980{
2981  unsigned i, n;
2982
2983  if (!expr || is_gimple_val (expr))
2984    return false;
2985
2986  if (!EXPR_P (expr) || tree_could_trap_p (expr))
2987    return true;
2988
2989  n = TREE_OPERAND_LENGTH (expr);
2990  for (i = 0; i < n; i++)
2991    if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2992      return true;
2993
2994  return false;
2995}
2996
2997/*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2998    into
2999
3000    if (p)			if (p)
3001      t1 = a;			  a;
3002    else		or	else
3003      t1 = b;			  b;
3004    t1;
3005
3006    The second form is used when *EXPR_P is of type void.
3007
3008    PRE_P points to the list where side effects that must happen before
3009      *EXPR_P should be stored.  */
3010
3011static enum gimplify_status
3012gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3013{
3014  tree expr = *expr_p;
3015  tree type = TREE_TYPE (expr);
3016  location_t loc = EXPR_LOCATION (expr);
3017  tree tmp, arm1, arm2;
3018  enum gimplify_status ret;
3019  tree label_true, label_false, label_cont;
3020  bool have_then_clause_p, have_else_clause_p;
3021  gcond *cond_stmt;
3022  enum tree_code pred_code;
3023  gimple_seq seq = NULL;
3024
3025  /* If this COND_EXPR has a value, copy the values into a temporary within
3026     the arms.  */
3027  if (!VOID_TYPE_P (type))
3028    {
3029      tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3030      tree result;
3031
3032      /* If either an rvalue is ok or we do not require an lvalue, create the
3033	 temporary.  But we cannot do that if the type is addressable.  */
3034      if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3035	  && !TREE_ADDRESSABLE (type))
3036	{
3037	  if (gimplify_ctxp->allow_rhs_cond_expr
3038	      /* If either branch has side effects or could trap, it can't be
3039		 evaluated unconditionally.  */
3040	      && !TREE_SIDE_EFFECTS (then_)
3041	      && !generic_expr_could_trap_p (then_)
3042	      && !TREE_SIDE_EFFECTS (else_)
3043	      && !generic_expr_could_trap_p (else_))
3044	    return gimplify_pure_cond_expr (expr_p, pre_p);
3045
3046	  tmp = create_tmp_var (type, "iftmp");
3047	  result = tmp;
3048	}
3049
3050      /* Otherwise, only create and copy references to the values.  */
3051      else
3052	{
3053	  type = build_pointer_type (type);
3054
3055	  if (!VOID_TYPE_P (TREE_TYPE (then_)))
3056	    then_ = build_fold_addr_expr_loc (loc, then_);
3057
3058	  if (!VOID_TYPE_P (TREE_TYPE (else_)))
3059	    else_ = build_fold_addr_expr_loc (loc, else_);
3060
3061	  expr
3062	    = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3063
3064	  tmp = create_tmp_var (type, "iftmp");
3065	  result = build_simple_mem_ref_loc (loc, tmp);
3066	}
3067
3068      /* Build the new then clause, `tmp = then_;'.  But don't build the
3069	 assignment if the value is void; in C++ it can be if it's a throw.  */
3070      if (!VOID_TYPE_P (TREE_TYPE (then_)))
3071	TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3072
3073      /* Similarly, build the new else clause, `tmp = else_;'.  */
3074      if (!VOID_TYPE_P (TREE_TYPE (else_)))
3075	TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3076
3077      TREE_TYPE (expr) = void_type_node;
3078      recalculate_side_effects (expr);
3079
3080      /* Move the COND_EXPR to the prequeue.  */
3081      gimplify_stmt (&expr, pre_p);
3082
3083      *expr_p = result;
3084      return GS_ALL_DONE;
3085    }
3086
3087  /* Remove any COMPOUND_EXPR so the following cases will be caught.  */
3088  STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3089  if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3090    gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3091
3092  /* Make sure the condition has BOOLEAN_TYPE.  */
3093  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3094
3095  /* Break apart && and || conditions.  */
3096  if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3097      || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3098    {
3099      expr = shortcut_cond_expr (expr);
3100
3101      if (expr != *expr_p)
3102	{
3103	  *expr_p = expr;
3104
3105	  /* We can't rely on gimplify_expr to re-gimplify the expanded
3106	     form properly, as cleanups might cause the target labels to be
3107	     wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
3108	     set up a conditional context.  */
3109	  gimple_push_condition ();
3110	  gimplify_stmt (expr_p, &seq);
3111	  gimple_pop_condition (pre_p);
3112	  gimple_seq_add_seq (pre_p, seq);
3113
3114	  return GS_ALL_DONE;
3115	}
3116    }
3117
3118  /* Now do the normal gimplification.  */
3119
3120  /* Gimplify condition.  */
3121  ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3122		       fb_rvalue);
3123  if (ret == GS_ERROR)
3124    return GS_ERROR;
3125  gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3126
3127  gimple_push_condition ();
3128
3129  have_then_clause_p = have_else_clause_p = false;
3130  if (TREE_OPERAND (expr, 1) != NULL
3131      && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3132      && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3133      && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3134	  == current_function_decl)
3135      /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3136	 have different locations, otherwise we end up with incorrect
3137	 location information on the branches.  */
3138      && (optimize
3139	  || !EXPR_HAS_LOCATION (expr)
3140	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3141	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3142    {
3143      label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3144      have_then_clause_p = true;
3145    }
3146  else
3147    label_true = create_artificial_label (UNKNOWN_LOCATION);
3148  if (TREE_OPERAND (expr, 2) != NULL
3149      && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3150      && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3151      && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3152	  == current_function_decl)
3153      /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3154	 have different locations, otherwise we end up with incorrect
3155	 location information on the branches.  */
3156      && (optimize
3157	  || !EXPR_HAS_LOCATION (expr)
3158	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3159	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3160    {
3161      label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3162      have_else_clause_p = true;
3163    }
3164  else
3165    label_false = create_artificial_label (UNKNOWN_LOCATION);
3166
3167  gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3168				 &arm2);
3169
3170  cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3171                                   label_false);
3172
3173  gimplify_seq_add_stmt (&seq, cond_stmt);
3174  label_cont = NULL_TREE;
3175  if (!have_then_clause_p)
3176    {
3177      /* For if (...) {} else { code; } put label_true after
3178	 the else block.  */
3179      if (TREE_OPERAND (expr, 1) == NULL_TREE
3180	  && !have_else_clause_p
3181	  && TREE_OPERAND (expr, 2) != NULL_TREE)
3182	label_cont = label_true;
3183      else
3184	{
3185	  gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3186	  have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3187	  /* For if (...) { code; } else {} or
3188	     if (...) { code; } else goto label; or
3189	     if (...) { code; return; } else { ... }
3190	     label_cont isn't needed.  */
3191	  if (!have_else_clause_p
3192	      && TREE_OPERAND (expr, 2) != NULL_TREE
3193	      && gimple_seq_may_fallthru (seq))
3194	    {
3195	      gimple g;
3196	      label_cont = create_artificial_label (UNKNOWN_LOCATION);
3197
3198	      g = gimple_build_goto (label_cont);
3199
3200	      /* GIMPLE_COND's are very low level; they have embedded
3201		 gotos.  This particular embedded goto should not be marked
3202		 with the location of the original COND_EXPR, as it would
3203		 correspond to the COND_EXPR's condition, not the ELSE or the
3204		 THEN arms.  To avoid marking it with the wrong location, flag
3205		 it as "no location".  */
3206	      gimple_set_do_not_emit_location (g);
3207
3208	      gimplify_seq_add_stmt (&seq, g);
3209	    }
3210	}
3211    }
3212  if (!have_else_clause_p)
3213    {
3214      gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3215      have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3216    }
3217  if (label_cont)
3218    gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3219
3220  gimple_pop_condition (pre_p);
3221  gimple_seq_add_seq (pre_p, seq);
3222
3223  if (ret == GS_ERROR)
3224    ; /* Do nothing.  */
3225  else if (have_then_clause_p || have_else_clause_p)
3226    ret = GS_ALL_DONE;
3227  else
3228    {
3229      /* Both arms are empty; replace the COND_EXPR with its predicate.  */
3230      expr = TREE_OPERAND (expr, 0);
3231      gimplify_stmt (&expr, pre_p);
3232    }
3233
3234  *expr_p = NULL;
3235  return ret;
3236}
3237
3238/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3239   to be marked addressable.
3240
3241   We cannot rely on such an expression being directly markable if a temporary
3242   has been created by the gimplification.  In this case, we create another
3243   temporary and initialize it with a copy, which will become a store after we
3244   mark it addressable.  This can happen if the front-end passed us something
3245   that it could not mark addressable yet, like a Fortran pass-by-reference
3246   parameter (int) floatvar.  */
3247
3248static void
3249prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3250{
3251  while (handled_component_p (*expr_p))
3252    expr_p = &TREE_OPERAND (*expr_p, 0);
3253  if (is_gimple_reg (*expr_p))
3254    {
3255      tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3256      DECL_GIMPLE_REG_P (var) = 0;
3257      *expr_p = var;
3258    }
3259}
3260
3261/* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3262   a call to __builtin_memcpy.  */
3263
3264static enum gimplify_status
3265gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3266    				gimple_seq *seq_p)
3267{
3268  tree t, to, to_ptr, from, from_ptr;
3269  gcall *gs;
3270  location_t loc = EXPR_LOCATION (*expr_p);
3271
3272  to = TREE_OPERAND (*expr_p, 0);
3273  from = TREE_OPERAND (*expr_p, 1);
3274
3275  /* Mark the RHS addressable.  Beware that it may not be possible to do so
3276     directly if a temporary has been created by the gimplification.  */
3277  prepare_gimple_addressable (&from, seq_p);
3278
3279  mark_addressable (from);
3280  from_ptr = build_fold_addr_expr_loc (loc, from);
3281  gimplify_arg (&from_ptr, seq_p, loc);
3282
3283  mark_addressable (to);
3284  to_ptr = build_fold_addr_expr_loc (loc, to);
3285  gimplify_arg (&to_ptr, seq_p, loc);
3286
3287  t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3288
3289  gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3290
3291  if (want_value)
3292    {
3293      /* tmp = memcpy() */
3294      t = create_tmp_var (TREE_TYPE (to_ptr));
3295      gimple_call_set_lhs (gs, t);
3296      gimplify_seq_add_stmt (seq_p, gs);
3297
3298      *expr_p = build_simple_mem_ref (t);
3299      return GS_ALL_DONE;
3300    }
3301
3302  gimplify_seq_add_stmt (seq_p, gs);
3303  *expr_p = NULL;
3304  return GS_ALL_DONE;
3305}
3306
3307/* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3308   a call to __builtin_memset.  In this case we know that the RHS is
3309   a CONSTRUCTOR with an empty element list.  */
3310
3311static enum gimplify_status
3312gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3313    				gimple_seq *seq_p)
3314{
3315  tree t, from, to, to_ptr;
3316  gcall *gs;
3317  location_t loc = EXPR_LOCATION (*expr_p);
3318
3319  /* Assert our assumptions, to abort instead of producing wrong code
3320     silently if they are not met.  Beware that the RHS CONSTRUCTOR might
3321     not be immediately exposed.  */
3322  from = TREE_OPERAND (*expr_p, 1);
3323  if (TREE_CODE (from) == WITH_SIZE_EXPR)
3324    from = TREE_OPERAND (from, 0);
3325
3326  gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3327	      && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3328
3329  /* Now proceed.  */
3330  to = TREE_OPERAND (*expr_p, 0);
3331
3332  to_ptr = build_fold_addr_expr_loc (loc, to);
3333  gimplify_arg (&to_ptr, seq_p, loc);
3334  t = builtin_decl_implicit (BUILT_IN_MEMSET);
3335
3336  gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3337
3338  if (want_value)
3339    {
3340      /* tmp = memset() */
3341      t = create_tmp_var (TREE_TYPE (to_ptr));
3342      gimple_call_set_lhs (gs, t);
3343      gimplify_seq_add_stmt (seq_p, gs);
3344
3345      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3346      return GS_ALL_DONE;
3347    }
3348
3349  gimplify_seq_add_stmt (seq_p, gs);
3350  *expr_p = NULL;
3351  return GS_ALL_DONE;
3352}
3353
3354/* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
3355   determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3356   assignment.  Return non-null if we detect a potential overlap.  */
3357
3358struct gimplify_init_ctor_preeval_data
3359{
3360  /* The base decl of the lhs object.  May be NULL, in which case we
3361     have to assume the lhs is indirect.  */
3362  tree lhs_base_decl;
3363
3364  /* The alias set of the lhs object.  */
3365  alias_set_type lhs_alias_set;
3366};
3367
3368static tree
3369gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3370{
3371  struct gimplify_init_ctor_preeval_data *data
3372    = (struct gimplify_init_ctor_preeval_data *) xdata;
3373  tree t = *tp;
3374
3375  /* If we find the base object, obviously we have overlap.  */
3376  if (data->lhs_base_decl == t)
3377    return t;
3378
3379  /* If the constructor component is indirect, determine if we have a
3380     potential overlap with the lhs.  The only bits of information we
3381     have to go on at this point are addressability and alias sets.  */
3382  if ((INDIRECT_REF_P (t)
3383       || TREE_CODE (t) == MEM_REF)
3384      && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3385      && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3386    return t;
3387
3388  /* If the constructor component is a call, determine if it can hide a
3389     potential overlap with the lhs through an INDIRECT_REF like above.
3390     ??? Ugh - this is completely broken.  In fact this whole analysis
3391     doesn't look conservative.  */
3392  if (TREE_CODE (t) == CALL_EXPR)
3393    {
3394      tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3395
3396      for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3397	if (POINTER_TYPE_P (TREE_VALUE (type))
3398	    && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3399	    && alias_sets_conflict_p (data->lhs_alias_set,
3400				      get_alias_set
3401				        (TREE_TYPE (TREE_VALUE (type)))))
3402	  return t;
3403    }
3404
3405  if (IS_TYPE_OR_DECL_P (t))
3406    *walk_subtrees = 0;
3407  return NULL;
3408}
3409
3410/* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
3411   force values that overlap with the lhs (as described by *DATA)
3412   into temporaries.  */
3413
3414static void
3415gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3416			    struct gimplify_init_ctor_preeval_data *data)
3417{
3418  enum gimplify_status one;
3419
3420  /* If the value is constant, then there's nothing to pre-evaluate.  */
3421  if (TREE_CONSTANT (*expr_p))
3422    {
3423      /* Ensure it does not have side effects, it might contain a reference to
3424	 the object we're initializing.  */
3425      gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3426      return;
3427    }
3428
3429  /* If the type has non-trivial constructors, we can't pre-evaluate.  */
3430  if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3431    return;
3432
3433  /* Recurse for nested constructors.  */
3434  if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3435    {
3436      unsigned HOST_WIDE_INT ix;
3437      constructor_elt *ce;
3438      vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3439
3440      FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3441	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3442
3443      return;
3444    }
3445
3446  /* If this is a variable sized type, we must remember the size.  */
3447  maybe_with_size_expr (expr_p);
3448
3449  /* Gimplify the constructor element to something appropriate for the rhs
3450     of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
3451     the gimplifier will consider this a store to memory.  Doing this
3452     gimplification now means that we won't have to deal with complicated
3453     language-specific trees, nor trees like SAVE_EXPR that can induce
3454     exponential search behavior.  */
3455  one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3456  if (one == GS_ERROR)
3457    {
3458      *expr_p = NULL;
3459      return;
3460    }
3461
3462  /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3463     with the lhs, since "a = { .x=a }" doesn't make sense.  This will
3464     always be true for all scalars, since is_gimple_mem_rhs insists on a
3465     temporary variable for them.  */
3466  if (DECL_P (*expr_p))
3467    return;
3468
3469  /* If this is of variable size, we have no choice but to assume it doesn't
3470     overlap since we can't make a temporary for it.  */
3471  if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3472    return;
3473
3474  /* Otherwise, we must search for overlap ...  */
3475  if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3476    return;
3477
3478  /* ... and if found, force the value into a temporary.  */
3479  *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3480}
3481
3482/* A subroutine of gimplify_init_ctor_eval.  Create a loop for
3483   a RANGE_EXPR in a CONSTRUCTOR for an array.
3484
3485      var = lower;
3486    loop_entry:
3487      object[var] = value;
3488      if (var == upper)
3489	goto loop_exit;
3490      var = var + 1;
3491      goto loop_entry;
3492    loop_exit:
3493
3494   We increment var _after_ the loop exit check because we might otherwise
3495   fail if upper == TYPE_MAX_VALUE (type for upper).
3496
3497   Note that we never have to deal with SAVE_EXPRs here, because this has
3498   already been taken care of for us, in gimplify_init_ctor_preeval().  */
3499
3500static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3501				     gimple_seq *, bool);
3502
3503static void
3504gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3505			       tree value, tree array_elt_type,
3506			       gimple_seq *pre_p, bool cleared)
3507{
3508  tree loop_entry_label, loop_exit_label, fall_thru_label;
3509  tree var, var_type, cref, tmp;
3510
3511  loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3512  loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3513  fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3514
3515  /* Create and initialize the index variable.  */
3516  var_type = TREE_TYPE (upper);
3517  var = create_tmp_var (var_type);
3518  gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3519
3520  /* Add the loop entry label.  */
3521  gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3522
3523  /* Build the reference.  */
3524  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3525		 var, NULL_TREE, NULL_TREE);
3526
3527  /* If we are a constructor, just call gimplify_init_ctor_eval to do
3528     the store.  Otherwise just assign value to the reference.  */
3529
3530  if (TREE_CODE (value) == CONSTRUCTOR)
3531    /* NB we might have to call ourself recursively through
3532       gimplify_init_ctor_eval if the value is a constructor.  */
3533    gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3534			     pre_p, cleared);
3535  else
3536    gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3537
3538  /* We exit the loop when the index var is equal to the upper bound.  */
3539  gimplify_seq_add_stmt (pre_p,
3540			 gimple_build_cond (EQ_EXPR, var, upper,
3541					    loop_exit_label, fall_thru_label));
3542
3543  gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3544
3545  /* Otherwise, increment the index var...  */
3546  tmp = build2 (PLUS_EXPR, var_type, var,
3547		fold_convert (var_type, integer_one_node));
3548  gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3549
3550  /* ...and jump back to the loop entry.  */
3551  gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3552
3553  /* Add the loop exit label.  */
3554  gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3555}
3556
3557/* Return true if FDECL is accessing a field that is zero sized.  */
3558
3559static bool
3560zero_sized_field_decl (const_tree fdecl)
3561{
3562  if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3563      && integer_zerop (DECL_SIZE (fdecl)))
3564    return true;
3565  return false;
3566}
3567
3568/* Return true if TYPE is zero sized.  */
3569
3570static bool
3571zero_sized_type (const_tree type)
3572{
3573  if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3574      && integer_zerop (TYPE_SIZE (type)))
3575    return true;
3576  return false;
3577}
3578
3579/* A subroutine of gimplify_init_constructor.  Generate individual
3580   MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
3581   assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
3582   CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
3583   zeroed first.  */
3584
3585static void
3586gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3587			 gimple_seq *pre_p, bool cleared)
3588{
3589  tree array_elt_type = NULL;
3590  unsigned HOST_WIDE_INT ix;
3591  tree purpose, value;
3592
3593  if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3594    array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3595
3596  FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3597    {
3598      tree cref;
3599
3600      /* NULL values are created above for gimplification errors.  */
3601      if (value == NULL)
3602	continue;
3603
3604      if (cleared && initializer_zerop (value))
3605	continue;
3606
3607      /* ??? Here's to hoping the front end fills in all of the indices,
3608	 so we don't have to figure out what's missing ourselves.  */
3609      gcc_assert (purpose);
3610
3611      /* Skip zero-sized fields, unless value has side-effects.  This can
3612	 happen with calls to functions returning a zero-sized type, which
3613	 we shouldn't discard.  As a number of downstream passes don't
3614	 expect sets of zero-sized fields, we rely on the gimplification of
3615	 the MODIFY_EXPR we make below to drop the assignment statement.  */
3616      if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3617	continue;
3618
3619      /* If we have a RANGE_EXPR, we have to build a loop to assign the
3620	 whole range.  */
3621      if (TREE_CODE (purpose) == RANGE_EXPR)
3622	{
3623	  tree lower = TREE_OPERAND (purpose, 0);
3624	  tree upper = TREE_OPERAND (purpose, 1);
3625
3626	  /* If the lower bound is equal to upper, just treat it as if
3627	     upper was the index.  */
3628	  if (simple_cst_equal (lower, upper))
3629	    purpose = upper;
3630	  else
3631	    {
3632	      gimplify_init_ctor_eval_range (object, lower, upper, value,
3633					     array_elt_type, pre_p, cleared);
3634	      continue;
3635	    }
3636	}
3637
3638      if (array_elt_type)
3639	{
3640	  /* Do not use bitsizetype for ARRAY_REF indices.  */
3641	  if (TYPE_DOMAIN (TREE_TYPE (object)))
3642	    purpose
3643	      = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3644			      purpose);
3645	  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3646			 purpose, NULL_TREE, NULL_TREE);
3647	}
3648      else
3649	{
3650	  gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3651	  cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3652			 unshare_expr (object), purpose, NULL_TREE);
3653	}
3654
3655      if (TREE_CODE (value) == CONSTRUCTOR
3656	  && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3657	gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3658				 pre_p, cleared);
3659      else
3660	{
3661	  tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3662	  gimplify_and_add (init, pre_p);
3663	  ggc_free (init);
3664	}
3665    }
3666}
3667
3668/* Return the appropriate RHS predicate for this LHS.  */
3669
3670gimple_predicate
3671rhs_predicate_for (tree lhs)
3672{
3673  if (is_gimple_reg (lhs))
3674    return is_gimple_reg_rhs_or_call;
3675  else
3676    return is_gimple_mem_rhs_or_call;
3677}
3678
3679/* Gimplify a C99 compound literal expression.  This just means adding
3680   the DECL_EXPR before the current statement and using its anonymous
3681   decl instead.  */
3682
3683static enum gimplify_status
3684gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3685				bool (*gimple_test_f) (tree),
3686				fallback_t fallback)
3687{
3688  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3689  tree decl = DECL_EXPR_DECL (decl_s);
3690  tree init = DECL_INITIAL (decl);
3691  /* Mark the decl as addressable if the compound literal
3692     expression is addressable now, otherwise it is marked too late
3693     after we gimplify the initialization expression.  */
3694  if (TREE_ADDRESSABLE (*expr_p))
3695    TREE_ADDRESSABLE (decl) = 1;
3696  /* Otherwise, if we don't need an lvalue and have a literal directly
3697     substitute it.  Check if it matches the gimple predicate, as
3698     otherwise we'd generate a new temporary, and we can as well just
3699     use the decl we already have.  */
3700  else if (!TREE_ADDRESSABLE (decl)
3701	   && init
3702	   && (fallback & fb_lvalue) == 0
3703	   && gimple_test_f (init))
3704    {
3705      *expr_p = init;
3706      return GS_OK;
3707    }
3708
3709  /* Preliminarily mark non-addressed complex variables as eligible
3710     for promotion to gimple registers.  We'll transform their uses
3711     as we find them.  */
3712  if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3713       || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3714      && !TREE_THIS_VOLATILE (decl)
3715      && !needs_to_live_in_memory (decl))
3716    DECL_GIMPLE_REG_P (decl) = 1;
3717
3718  /* If the decl is not addressable, then it is being used in some
3719     expression or on the right hand side of a statement, and it can
3720     be put into a readonly data section.  */
3721  if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3722    TREE_READONLY (decl) = 1;
3723
3724  /* This decl isn't mentioned in the enclosing block, so add it to the
3725     list of temps.  FIXME it seems a bit of a kludge to say that
3726     anonymous artificial vars aren't pushed, but everything else is.  */
3727  if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3728    gimple_add_tmp_var (decl);
3729
3730  gimplify_and_add (decl_s, pre_p);
3731  *expr_p = decl;
3732  return GS_OK;
3733}
3734
3735/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3736   return a new CONSTRUCTOR if something changed.  */
3737
3738static tree
3739optimize_compound_literals_in_ctor (tree orig_ctor)
3740{
3741  tree ctor = orig_ctor;
3742  vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3743  unsigned int idx, num = vec_safe_length (elts);
3744
3745  for (idx = 0; idx < num; idx++)
3746    {
3747      tree value = (*elts)[idx].value;
3748      tree newval = value;
3749      if (TREE_CODE (value) == CONSTRUCTOR)
3750	newval = optimize_compound_literals_in_ctor (value);
3751      else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3752	{
3753	  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3754	  tree decl = DECL_EXPR_DECL (decl_s);
3755	  tree init = DECL_INITIAL (decl);
3756
3757	  if (!TREE_ADDRESSABLE (value)
3758	      && !TREE_ADDRESSABLE (decl)
3759	      && init
3760	      && TREE_CODE (init) == CONSTRUCTOR)
3761	    newval = optimize_compound_literals_in_ctor (init);
3762	}
3763      if (newval == value)
3764	continue;
3765
3766      if (ctor == orig_ctor)
3767	{
3768	  ctor = copy_node (orig_ctor);
3769	  CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3770	  elts = CONSTRUCTOR_ELTS (ctor);
3771	}
3772      (*elts)[idx].value = newval;
3773    }
3774  return ctor;
3775}
3776
3777/* A subroutine of gimplify_modify_expr.  Break out elements of a
3778   CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3779
3780   Note that we still need to clear any elements that don't have explicit
3781   initializers, so if not all elements are initialized we keep the
3782   original MODIFY_EXPR, we just remove all of the constructor elements.
3783
3784   If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3785   GS_ERROR if we would have to create a temporary when gimplifying
3786   this constructor.  Otherwise, return GS_OK.
3787
3788   If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
3789
3790static enum gimplify_status
3791gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3792			   bool want_value, bool notify_temp_creation)
3793{
3794  tree object, ctor, type;
3795  enum gimplify_status ret;
3796  vec<constructor_elt, va_gc> *elts;
3797
3798  gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3799
3800  if (!notify_temp_creation)
3801    {
3802      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3803			   is_gimple_lvalue, fb_lvalue);
3804      if (ret == GS_ERROR)
3805	return ret;
3806    }
3807
3808  object = TREE_OPERAND (*expr_p, 0);
3809  ctor = TREE_OPERAND (*expr_p, 1) =
3810    optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3811  type = TREE_TYPE (ctor);
3812  elts = CONSTRUCTOR_ELTS (ctor);
3813  ret = GS_ALL_DONE;
3814
3815  switch (TREE_CODE (type))
3816    {
3817    case RECORD_TYPE:
3818    case UNION_TYPE:
3819    case QUAL_UNION_TYPE:
3820    case ARRAY_TYPE:
3821      {
3822	struct gimplify_init_ctor_preeval_data preeval_data;
3823	HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3824	bool cleared, complete_p, valid_const_initializer;
3825
3826	/* Aggregate types must lower constructors to initialization of
3827	   individual elements.  The exception is that a CONSTRUCTOR node
3828	   with no elements indicates zero-initialization of the whole.  */
3829	if (vec_safe_is_empty (elts))
3830	  {
3831	    if (notify_temp_creation)
3832	      return GS_OK;
3833	    break;
3834	  }
3835
3836	/* Fetch information about the constructor to direct later processing.
3837	   We might want to make static versions of it in various cases, and
3838	   can only do so if it known to be a valid constant initializer.  */
3839	valid_const_initializer
3840	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
3841				      &num_ctor_elements, &complete_p);
3842
3843	/* If a const aggregate variable is being initialized, then it
3844	   should never be a lose to promote the variable to be static.  */
3845	if (valid_const_initializer
3846	    && num_nonzero_elements > 1
3847	    && TREE_READONLY (object)
3848	    && TREE_CODE (object) == VAR_DECL
3849	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3850	  {
3851	    if (notify_temp_creation)
3852	      return GS_ERROR;
3853	    DECL_INITIAL (object) = ctor;
3854	    TREE_STATIC (object) = 1;
3855	    if (!DECL_NAME (object))
3856	      DECL_NAME (object) = create_tmp_var_name ("C");
3857	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3858
3859	    /* ??? C++ doesn't automatically append a .<number> to the
3860	       assembler name, and even when it does, it looks at FE private
3861	       data structures to figure out what that number should be,
3862	       which are not set for this variable.  I suppose this is
3863	       important for local statics for inline functions, which aren't
3864	       "local" in the object file sense.  So in order to get a unique
3865	       TU-local symbol, we must invoke the lhd version now.  */
3866	    lhd_set_decl_assembler_name (object);
3867
3868	    *expr_p = NULL_TREE;
3869	    break;
3870	  }
3871
3872	/* If there are "lots" of initialized elements, even discounting
3873	   those that are not address constants (and thus *must* be
3874	   computed at runtime), then partition the constructor into
3875	   constant and non-constant parts.  Block copy the constant
3876	   parts in, then generate code for the non-constant parts.  */
3877	/* TODO.  There's code in cp/typeck.c to do this.  */
3878
3879	if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3880	  /* store_constructor will ignore the clearing of variable-sized
3881	     objects.  Initializers for such objects must explicitly set
3882	     every field that needs to be set.  */
3883	  cleared = false;
3884	else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3885	  /* If the constructor isn't complete, clear the whole object
3886	     beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3887
3888	     ??? This ought not to be needed.  For any element not present
3889	     in the initializer, we should simply set them to zero.  Except
3890	     we'd need to *find* the elements that are not present, and that
3891	     requires trickery to avoid quadratic compile-time behavior in
3892	     large cases or excessive memory use in small cases.  */
3893	  cleared = true;
3894	else if (num_ctor_elements - num_nonzero_elements
3895		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3896		 && num_nonzero_elements < num_ctor_elements / 4)
3897	  /* If there are "lots" of zeros, it's more efficient to clear
3898	     the memory and then set the nonzero elements.  */
3899	  cleared = true;
3900	else
3901	  cleared = false;
3902
3903	/* If there are "lots" of initialized elements, and all of them
3904	   are valid address constants, then the entire initializer can
3905	   be dropped to memory, and then memcpy'd out.  Don't do this
3906	   for sparse arrays, though, as it's more efficient to follow
3907	   the standard CONSTRUCTOR behavior of memset followed by
3908	   individual element initialization.  Also don't do this for small
3909	   all-zero initializers (which aren't big enough to merit
3910	   clearing), and don't try to make bitwise copies of
3911	   TREE_ADDRESSABLE types.
3912
3913	   We cannot apply such transformation when compiling chkp static
3914	   initializer because creation of initializer image in the memory
3915	   will require static initialization of bounds for it.  It should
3916	   result in another gimplification of similar initializer and we
3917	   may fall into infinite loop.  */
3918	if (valid_const_initializer
3919	    && !(cleared || num_nonzero_elements == 0)
3920	    && !TREE_ADDRESSABLE (type)
3921	    && (!current_function_decl
3922		|| !lookup_attribute ("chkp ctor",
3923				      DECL_ATTRIBUTES (current_function_decl))))
3924	  {
3925	    HOST_WIDE_INT size = int_size_in_bytes (type);
3926	    unsigned int align;
3927
3928	    /* ??? We can still get unbounded array types, at least
3929	       from the C++ front end.  This seems wrong, but attempt
3930	       to work around it for now.  */
3931	    if (size < 0)
3932	      {
3933		size = int_size_in_bytes (TREE_TYPE (object));
3934		if (size >= 0)
3935		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
3936	      }
3937
3938	    /* Find the maximum alignment we can assume for the object.  */
3939	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
3940	    if (DECL_P (object))
3941	      align = DECL_ALIGN (object);
3942	    else
3943	      align = TYPE_ALIGN (type);
3944
3945	    /* Do a block move either if the size is so small as to make
3946	       each individual move a sub-unit move on average, or if it
3947	       is so large as to make individual moves inefficient.  */
3948	    if (size > 0
3949		&& num_nonzero_elements > 1
3950		&& (size < num_nonzero_elements
3951		    || !can_move_by_pieces (size, align)))
3952	      {
3953		if (notify_temp_creation)
3954		  return GS_ERROR;
3955
3956		walk_tree (&ctor, force_labels_r, NULL, NULL);
3957		ctor = tree_output_constant_def (ctor);
3958		if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3959		  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3960		TREE_OPERAND (*expr_p, 1) = ctor;
3961
3962		/* This is no longer an assignment of a CONSTRUCTOR, but
3963		   we still may have processing to do on the LHS.  So
3964		   pretend we didn't do anything here to let that happen.  */
3965		return GS_UNHANDLED;
3966	      }
3967	  }
3968
3969	/* If the target is volatile, we have non-zero elements and more than
3970	   one field to assign, initialize the target from a temporary.  */
3971	if (TREE_THIS_VOLATILE (object)
3972	    && !TREE_ADDRESSABLE (type)
3973	    && num_nonzero_elements > 0
3974	    && vec_safe_length (elts) > 1)
3975	  {
3976	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
3977	    TREE_OPERAND (*expr_p, 0) = temp;
3978	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3979			      *expr_p,
3980			      build2 (MODIFY_EXPR, void_type_node,
3981				      object, temp));
3982	    return GS_OK;
3983	  }
3984
3985	if (notify_temp_creation)
3986	  return GS_OK;
3987
3988	/* If there are nonzero elements and if needed, pre-evaluate to capture
3989	   elements overlapping with the lhs into temporaries.  We must do this
3990	   before clearing to fetch the values before they are zeroed-out.  */
3991	if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3992	  {
3993	    preeval_data.lhs_base_decl = get_base_address (object);
3994	    if (!DECL_P (preeval_data.lhs_base_decl))
3995	      preeval_data.lhs_base_decl = NULL;
3996	    preeval_data.lhs_alias_set = get_alias_set (object);
3997
3998	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3999					pre_p, post_p, &preeval_data);
4000	  }
4001
4002	if (cleared)
4003	  {
4004	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
4005	       Note that we still have to gimplify, in order to handle the
4006	       case of variable sized types.  Avoid shared tree structures.  */
4007	    CONSTRUCTOR_ELTS (ctor) = NULL;
4008	    TREE_SIDE_EFFECTS (ctor) = 0;
4009	    object = unshare_expr (object);
4010	    gimplify_stmt (expr_p, pre_p);
4011	  }
4012
4013	/* If we have not block cleared the object, or if there are nonzero
4014	   elements in the constructor, add assignments to the individual
4015	   scalar fields of the object.  */
4016	if (!cleared || num_nonzero_elements > 0)
4017	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4018
4019	*expr_p = NULL_TREE;
4020      }
4021      break;
4022
4023    case COMPLEX_TYPE:
4024      {
4025	tree r, i;
4026
4027	if (notify_temp_creation)
4028	  return GS_OK;
4029
4030	/* Extract the real and imaginary parts out of the ctor.  */
4031	gcc_assert (elts->length () == 2);
4032	r = (*elts)[0].value;
4033	i = (*elts)[1].value;
4034	if (r == NULL || i == NULL)
4035	  {
4036	    tree zero = build_zero_cst (TREE_TYPE (type));
4037	    if (r == NULL)
4038	      r = zero;
4039	    if (i == NULL)
4040	      i = zero;
4041	  }
4042
4043	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4044	   represent creation of a complex value.  */
4045	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4046	  {
4047	    ctor = build_complex (type, r, i);
4048	    TREE_OPERAND (*expr_p, 1) = ctor;
4049	  }
4050	else
4051	  {
4052	    ctor = build2 (COMPLEX_EXPR, type, r, i);
4053	    TREE_OPERAND (*expr_p, 1) = ctor;
4054	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4055				 pre_p,
4056				 post_p,
4057				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4058				 fb_rvalue);
4059	  }
4060      }
4061      break;
4062
4063    case VECTOR_TYPE:
4064      {
4065	unsigned HOST_WIDE_INT ix;
4066	constructor_elt *ce;
4067
4068	if (notify_temp_creation)
4069	  return GS_OK;
4070
4071	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
4072	if (TREE_CONSTANT (ctor))
4073	  {
4074	    bool constant_p = true;
4075	    tree value;
4076
4077	    /* Even when ctor is constant, it might contain non-*_CST
4078	       elements, such as addresses or trapping values like
4079	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
4080	       in VECTOR_CST nodes.  */
4081	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4082	      if (!CONSTANT_CLASS_P (value))
4083		{
4084		  constant_p = false;
4085		  break;
4086		}
4087
4088	    if (constant_p)
4089	      {
4090		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4091		break;
4092	      }
4093
4094	    TREE_CONSTANT (ctor) = 0;
4095	  }
4096
4097	/* Vector types use CONSTRUCTOR all the way through gimple
4098	  compilation as a general initializer.  */
4099	FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4100	  {
4101	    enum gimplify_status tret;
4102	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4103				  fb_rvalue);
4104	    if (tret == GS_ERROR)
4105	      ret = GS_ERROR;
4106	  }
4107	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4108	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4109      }
4110      break;
4111
4112    default:
4113      /* So how did we get a CONSTRUCTOR for a scalar type?  */
4114      gcc_unreachable ();
4115    }
4116
4117  if (ret == GS_ERROR)
4118    return GS_ERROR;
4119  else if (want_value)
4120    {
4121      *expr_p = object;
4122      return GS_OK;
4123    }
4124  else
4125    {
4126      /* If we have gimplified both sides of the initializer but have
4127	 not emitted an assignment, do so now.  */
4128      if (*expr_p)
4129	{
4130	  tree lhs = TREE_OPERAND (*expr_p, 0);
4131	  tree rhs = TREE_OPERAND (*expr_p, 1);
4132	  gassign *init = gimple_build_assign (lhs, rhs);
4133	  gimplify_seq_add_stmt (pre_p, init);
4134	  *expr_p = NULL;
4135	}
4136
4137      return GS_ALL_DONE;
4138    }
4139}
4140
4141/* Given a pointer value OP0, return a simplified version of an
4142   indirection through OP0, or NULL_TREE if no simplification is
4143   possible.  This may only be applied to a rhs of an expression.
4144   Note that the resulting type may be different from the type pointed
4145   to in the sense that it is still compatible from the langhooks
4146   point of view. */
4147
4148static tree
4149gimple_fold_indirect_ref_rhs (tree t)
4150{
4151  return gimple_fold_indirect_ref (t);
4152}
4153
4154/* Subroutine of gimplify_modify_expr to do simplifications of
4155   MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
4156   something changes.  */
4157
4158static enum gimplify_status
4159gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4160			  gimple_seq *pre_p, gimple_seq *post_p,
4161			  bool want_value)
4162{
4163  enum gimplify_status ret = GS_UNHANDLED;
4164  bool changed;
4165
4166  do
4167    {
4168      changed = false;
4169      switch (TREE_CODE (*from_p))
4170	{
4171	case VAR_DECL:
4172	  /* If we're assigning from a read-only variable initialized with
4173	     a constructor, do the direct assignment from the constructor,
4174	     but only if neither source nor target are volatile since this
4175	     latter assignment might end up being done on a per-field basis.  */
4176	  if (DECL_INITIAL (*from_p)
4177	      && TREE_READONLY (*from_p)
4178	      && !TREE_THIS_VOLATILE (*from_p)
4179	      && !TREE_THIS_VOLATILE (*to_p)
4180	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4181	    {
4182	      tree old_from = *from_p;
4183	      enum gimplify_status subret;
4184
4185	      /* Move the constructor into the RHS.  */
4186	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
4187
4188	      /* Let's see if gimplify_init_constructor will need to put
4189		 it in memory.  */
4190	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
4191						  false, true);
4192	      if (subret == GS_ERROR)
4193		{
4194		  /* If so, revert the change.  */
4195		  *from_p = old_from;
4196		}
4197	      else
4198		{
4199		  ret = GS_OK;
4200		  changed = true;
4201		}
4202	    }
4203	  break;
4204	case INDIRECT_REF:
4205	  {
4206	    /* If we have code like
4207
4208	     *(const A*)(A*)&x
4209
4210	     where the type of "x" is a (possibly cv-qualified variant
4211	     of "A"), treat the entire expression as identical to "x".
4212	     This kind of code arises in C++ when an object is bound
4213	     to a const reference, and if "x" is a TARGET_EXPR we want
4214	     to take advantage of the optimization below.  */
4215	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4216	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4217	    if (t)
4218	      {
4219		if (TREE_THIS_VOLATILE (t) != volatile_p)
4220		  {
4221		    if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4222		      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4223						    build_fold_addr_expr (t));
4224		    if (REFERENCE_CLASS_P (t))
4225		      TREE_THIS_VOLATILE (t) = volatile_p;
4226		  }
4227		*from_p = t;
4228		ret = GS_OK;
4229		changed = true;
4230	      }
4231	    break;
4232	  }
4233
4234	case TARGET_EXPR:
4235	  {
4236	    /* If we are initializing something from a TARGET_EXPR, strip the
4237	       TARGET_EXPR and initialize it directly, if possible.  This can't
4238	       be done if the initializer is void, since that implies that the
4239	       temporary is set in some non-trivial way.
4240
4241	       ??? What about code that pulls out the temp and uses it
4242	       elsewhere? I think that such code never uses the TARGET_EXPR as
4243	       an initializer.  If I'm wrong, we'll die because the temp won't
4244	       have any RTL.  In that case, I guess we'll need to replace
4245	       references somehow.  */
4246	    tree init = TARGET_EXPR_INITIAL (*from_p);
4247
4248	    if (init
4249		&& !VOID_TYPE_P (TREE_TYPE (init)))
4250	      {
4251		*from_p = init;
4252		ret = GS_OK;
4253		changed = true;
4254	      }
4255	  }
4256	  break;
4257
4258	case COMPOUND_EXPR:
4259	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4260	     caught.  */
4261	  gimplify_compound_expr (from_p, pre_p, true);
4262	  ret = GS_OK;
4263	  changed = true;
4264	  break;
4265
4266	case CONSTRUCTOR:
4267	  /* If we already made some changes, let the front end have a
4268	     crack at this before we break it down.  */
4269	  if (ret != GS_UNHANDLED)
4270	    break;
4271	  /* If we're initializing from a CONSTRUCTOR, break this into
4272	     individual MODIFY_EXPRs.  */
4273	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4274					    false);
4275
4276	case COND_EXPR:
4277	  /* If we're assigning to a non-register type, push the assignment
4278	     down into the branches.  This is mandatory for ADDRESSABLE types,
4279	     since we cannot generate temporaries for such, but it saves a
4280	     copy in other cases as well.  */
4281	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4282	    {
4283	      /* This code should mirror the code in gimplify_cond_expr. */
4284	      enum tree_code code = TREE_CODE (*expr_p);
4285	      tree cond = *from_p;
4286	      tree result = *to_p;
4287
4288	      ret = gimplify_expr (&result, pre_p, post_p,
4289				   is_gimple_lvalue, fb_lvalue);
4290	      if (ret != GS_ERROR)
4291		ret = GS_OK;
4292
4293	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4294		TREE_OPERAND (cond, 1)
4295		  = build2 (code, void_type_node, result,
4296			    TREE_OPERAND (cond, 1));
4297	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4298		TREE_OPERAND (cond, 2)
4299		  = build2 (code, void_type_node, unshare_expr (result),
4300			    TREE_OPERAND (cond, 2));
4301
4302	      TREE_TYPE (cond) = void_type_node;
4303	      recalculate_side_effects (cond);
4304
4305	      if (want_value)
4306		{
4307		  gimplify_and_add (cond, pre_p);
4308		  *expr_p = unshare_expr (result);
4309		}
4310	      else
4311		*expr_p = cond;
4312	      return ret;
4313	    }
4314	  break;
4315
4316	case CALL_EXPR:
4317	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
4318	     return slot so that we don't generate a temporary.  */
4319	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4320	      && aggregate_value_p (*from_p, *from_p))
4321	    {
4322	      bool use_target;
4323
4324	      if (!(rhs_predicate_for (*to_p))(*from_p))
4325		/* If we need a temporary, *to_p isn't accurate.  */
4326		use_target = false;
4327	      /* It's OK to use the return slot directly unless it's an NRV. */
4328	      else if (TREE_CODE (*to_p) == RESULT_DECL
4329		       && DECL_NAME (*to_p) == NULL_TREE
4330		       && needs_to_live_in_memory (*to_p))
4331		use_target = true;
4332	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4333		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4334		/* Don't force regs into memory.  */
4335		use_target = false;
4336	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
4337		/* It's OK to use the target directly if it's being
4338		   initialized. */
4339		use_target = true;
4340	      else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4341		/* Always use the target and thus RSO for variable-sized types.
4342		   GIMPLE cannot deal with a variable-sized assignment
4343		   embedded in a call statement.  */
4344		use_target = true;
4345	      else if (TREE_CODE (*to_p) != SSA_NAME
4346		      && (!is_gimple_variable (*to_p)
4347			  || needs_to_live_in_memory (*to_p)))
4348		/* Don't use the original target if it's already addressable;
4349		   if its address escapes, and the called function uses the
4350		   NRV optimization, a conforming program could see *to_p
4351		   change before the called function returns; see c++/19317.
4352		   When optimizing, the return_slot pass marks more functions
4353		   as safe after we have escape info.  */
4354		use_target = false;
4355	      else
4356		use_target = true;
4357
4358	      if (use_target)
4359		{
4360		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4361		  mark_addressable (*to_p);
4362		}
4363	    }
4364	  break;
4365
4366	case WITH_SIZE_EXPR:
4367	  /* Likewise for calls that return an aggregate of non-constant size,
4368	     since we would not be able to generate a temporary at all.  */
4369	  if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4370	    {
4371	      *from_p = TREE_OPERAND (*from_p, 0);
4372	      /* We don't change ret in this case because the
4373		 WITH_SIZE_EXPR might have been added in
4374		 gimplify_modify_expr, so returning GS_OK would lead to an
4375		 infinite loop.  */
4376	      changed = true;
4377	    }
4378	  break;
4379
4380	  /* If we're initializing from a container, push the initialization
4381	     inside it.  */
4382	case CLEANUP_POINT_EXPR:
4383	case BIND_EXPR:
4384	case STATEMENT_LIST:
4385	  {
4386	    tree wrap = *from_p;
4387	    tree t;
4388
4389	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4390				 fb_lvalue);
4391	    if (ret != GS_ERROR)
4392	      ret = GS_OK;
4393
4394	    t = voidify_wrapper_expr (wrap, *expr_p);
4395	    gcc_assert (t == *expr_p);
4396
4397	    if (want_value)
4398	      {
4399		gimplify_and_add (wrap, pre_p);
4400		*expr_p = unshare_expr (*to_p);
4401	      }
4402	    else
4403	      *expr_p = wrap;
4404	    return GS_OK;
4405	  }
4406
4407	case COMPOUND_LITERAL_EXPR:
4408	  {
4409	    tree complit = TREE_OPERAND (*expr_p, 1);
4410	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4411	    tree decl = DECL_EXPR_DECL (decl_s);
4412	    tree init = DECL_INITIAL (decl);
4413
4414	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4415	       into struct T x = { 0, 1, 2 } if the address of the
4416	       compound literal has never been taken.  */
4417	    if (!TREE_ADDRESSABLE (complit)
4418		&& !TREE_ADDRESSABLE (decl)
4419		&& init)
4420	      {
4421		*expr_p = copy_node (*expr_p);
4422		TREE_OPERAND (*expr_p, 1) = init;
4423		return GS_OK;
4424	      }
4425	  }
4426
4427	default:
4428	  break;
4429	}
4430    }
4431  while (changed);
4432
4433  return ret;
4434}
4435
4436
4437/* Return true if T looks like a valid GIMPLE statement.  */
4438
4439static bool
4440is_gimple_stmt (tree t)
4441{
4442  const enum tree_code code = TREE_CODE (t);
4443
4444  switch (code)
4445    {
4446    case NOP_EXPR:
4447      /* The only valid NOP_EXPR is the empty statement.  */
4448      return IS_EMPTY_STMT (t);
4449
4450    case BIND_EXPR:
4451    case COND_EXPR:
4452      /* These are only valid if they're void.  */
4453      return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4454
4455    case SWITCH_EXPR:
4456    case GOTO_EXPR:
4457    case RETURN_EXPR:
4458    case LABEL_EXPR:
4459    case CASE_LABEL_EXPR:
4460    case TRY_CATCH_EXPR:
4461    case TRY_FINALLY_EXPR:
4462    case EH_FILTER_EXPR:
4463    case CATCH_EXPR:
4464    case ASM_EXPR:
4465    case STATEMENT_LIST:
4466    case OACC_PARALLEL:
4467    case OACC_KERNELS:
4468    case OACC_DATA:
4469    case OACC_HOST_DATA:
4470    case OACC_DECLARE:
4471    case OACC_UPDATE:
4472    case OACC_ENTER_DATA:
4473    case OACC_EXIT_DATA:
4474    case OACC_CACHE:
4475    case OMP_PARALLEL:
4476    case OMP_FOR:
4477    case OMP_SIMD:
4478    case CILK_SIMD:
4479    case OMP_DISTRIBUTE:
4480    case OACC_LOOP:
4481    case OMP_SECTIONS:
4482    case OMP_SECTION:
4483    case OMP_SINGLE:
4484    case OMP_MASTER:
4485    case OMP_TASKGROUP:
4486    case OMP_ORDERED:
4487    case OMP_CRITICAL:
4488    case OMP_TASK:
4489      /* These are always void.  */
4490      return true;
4491
4492    case CALL_EXPR:
4493    case MODIFY_EXPR:
4494    case PREDICT_EXPR:
4495      /* These are valid regardless of their type.  */
4496      return true;
4497
4498    default:
4499      return false;
4500    }
4501}
4502
4503
4504/* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
4505   a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4506   DECL_GIMPLE_REG_P set.
4507
4508   IMPORTANT NOTE: This promotion is performed by introducing a load of the
4509   other, unmodified part of the complex object just before the total store.
4510   As a consequence, if the object is still uninitialized, an undefined value
4511   will be loaded into a register, which may result in a spurious exception
4512   if the register is floating-point and the value happens to be a signaling
4513   NaN for example.  Then the fully-fledged complex operations lowering pass
4514   followed by a DCE pass are necessary in order to fix things up.  */
4515
4516static enum gimplify_status
4517gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4518                                   bool want_value)
4519{
4520  enum tree_code code, ocode;
4521  tree lhs, rhs, new_rhs, other, realpart, imagpart;
4522
4523  lhs = TREE_OPERAND (*expr_p, 0);
4524  rhs = TREE_OPERAND (*expr_p, 1);
4525  code = TREE_CODE (lhs);
4526  lhs = TREE_OPERAND (lhs, 0);
4527
4528  ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4529  other = build1 (ocode, TREE_TYPE (rhs), lhs);
4530  TREE_NO_WARNING (other) = 1;
4531  other = get_formal_tmp_var (other, pre_p);
4532
4533  realpart = code == REALPART_EXPR ? rhs : other;
4534  imagpart = code == REALPART_EXPR ? other : rhs;
4535
4536  if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4537    new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4538  else
4539    new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4540
4541  gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4542  *expr_p = (want_value) ? rhs : NULL_TREE;
4543
4544  return GS_ALL_DONE;
4545}
4546
4547/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4548
4549      modify_expr
4550	      : varname '=' rhs
4551	      | '*' ID '=' rhs
4552
4553    PRE_P points to the list where side effects that must happen before
4554	*EXPR_P should be stored.
4555
4556    POST_P points to the list where side effects that must happen after
4557	*EXPR_P should be stored.
4558
4559    WANT_VALUE is nonzero iff we want to use the value of this expression
4560	in another expression.  */
4561
4562static enum gimplify_status
4563gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4564		      bool want_value)
4565{
4566  tree *from_p = &TREE_OPERAND (*expr_p, 1);
4567  tree *to_p = &TREE_OPERAND (*expr_p, 0);
4568  enum gimplify_status ret = GS_UNHANDLED;
4569  gimple assign;
4570  location_t loc = EXPR_LOCATION (*expr_p);
4571  gimple_stmt_iterator gsi;
4572
4573  gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4574	      || TREE_CODE (*expr_p) == INIT_EXPR);
4575
4576  /* Trying to simplify a clobber using normal logic doesn't work,
4577     so handle it here.  */
4578  if (TREE_CLOBBER_P (*from_p))
4579    {
4580      ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4581      if (ret == GS_ERROR)
4582	return ret;
4583      gcc_assert (!want_value
4584		  && (TREE_CODE (*to_p) == VAR_DECL
4585		      || TREE_CODE (*to_p) == MEM_REF));
4586      gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4587      *expr_p = NULL;
4588      return GS_ALL_DONE;
4589    }
4590
4591  /* Insert pointer conversions required by the middle-end that are not
4592     required by the frontend.  This fixes middle-end type checking for
4593     for example gcc.dg/redecl-6.c.  */
4594  if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4595    {
4596      STRIP_USELESS_TYPE_CONVERSION (*from_p);
4597      if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4598	*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4599    }
4600
4601  /* See if any simplifications can be done based on what the RHS is.  */
4602  ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4603				  want_value);
4604  if (ret != GS_UNHANDLED)
4605    return ret;
4606
4607  /* For zero sized types only gimplify the left hand side and right hand
4608     side as statements and throw away the assignment.  Do this after
4609     gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4610     types properly.  */
4611  if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4612    {
4613      gimplify_stmt (from_p, pre_p);
4614      gimplify_stmt (to_p, pre_p);
4615      *expr_p = NULL_TREE;
4616      return GS_ALL_DONE;
4617    }
4618
4619  /* If the value being copied is of variable width, compute the length
4620     of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
4621     before gimplifying any of the operands so that we can resolve any
4622     PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
4623     the size of the expression to be copied, not of the destination, so
4624     that is what we must do here.  */
4625  maybe_with_size_expr (from_p);
4626
4627  ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4628  if (ret == GS_ERROR)
4629    return ret;
4630
4631  /* As a special case, we have to temporarily allow for assignments
4632     with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
4633     a toplevel statement, when gimplifying the GENERIC expression
4634     MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4635     GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4636
4637     Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
4638     prevent gimplify_expr from trying to create a new temporary for
4639     foo's LHS, we tell it that it should only gimplify until it
4640     reaches the CALL_EXPR.  On return from gimplify_expr, the newly
4641     created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4642     and all we need to do here is set 'a' to be its LHS.  */
4643  ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4644		       fb_rvalue);
4645  if (ret == GS_ERROR)
4646    return ret;
4647
4648  /* Now see if the above changed *from_p to something we handle specially.  */
4649  ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4650				  want_value);
4651  if (ret != GS_UNHANDLED)
4652    return ret;
4653
4654  /* If we've got a variable sized assignment between two lvalues (i.e. does
4655     not involve a call), then we can make things a bit more straightforward
4656     by converting the assignment to memcpy or memset.  */
4657  if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4658    {
4659      tree from = TREE_OPERAND (*from_p, 0);
4660      tree size = TREE_OPERAND (*from_p, 1);
4661
4662      if (TREE_CODE (from) == CONSTRUCTOR)
4663	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4664
4665      if (is_gimple_addressable (from))
4666	{
4667	  *from_p = from;
4668	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4669	      					 pre_p);
4670	}
4671    }
4672
4673  /* Transform partial stores to non-addressable complex variables into
4674     total stores.  This allows us to use real instead of virtual operands
4675     for these variables, which improves optimization.  */
4676  if ((TREE_CODE (*to_p) == REALPART_EXPR
4677       || TREE_CODE (*to_p) == IMAGPART_EXPR)
4678      && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4679    return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4680
4681  /* Try to alleviate the effects of the gimplification creating artificial
4682     temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
4683  if (!gimplify_ctxp->into_ssa
4684      && TREE_CODE (*from_p) == VAR_DECL
4685      && DECL_IGNORED_P (*from_p)
4686      && DECL_P (*to_p)
4687      && !DECL_IGNORED_P (*to_p))
4688    {
4689      if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4690	DECL_NAME (*from_p)
4691	  = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4692      DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4693      SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4694   }
4695
4696  if (want_value && TREE_THIS_VOLATILE (*to_p))
4697    *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4698
4699  if (TREE_CODE (*from_p) == CALL_EXPR)
4700    {
4701      /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4702	 instead of a GIMPLE_ASSIGN.  */
4703      gcall *call_stmt;
4704      if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4705	{
4706	  /* Gimplify internal functions created in the FEs.  */
4707	  int nargs = call_expr_nargs (*from_p), i;
4708	  enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4709	  auto_vec<tree> vargs (nargs);
4710
4711	  for (i = 0; i < nargs; i++)
4712	    {
4713	      gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4714			    EXPR_LOCATION (*from_p));
4715	      vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4716	    }
4717	  call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4718	  gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4719	}
4720      else
4721	{
4722	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4723	  CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4724	  STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4725	  tree fndecl = get_callee_fndecl (*from_p);
4726	  if (fndecl
4727	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4728	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4729	      && call_expr_nargs (*from_p) == 3)
4730	    call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4731						    CALL_EXPR_ARG (*from_p, 0),
4732						    CALL_EXPR_ARG (*from_p, 1),
4733						    CALL_EXPR_ARG (*from_p, 2));
4734	  else
4735	    {
4736	      call_stmt = gimple_build_call_from_tree (*from_p);
4737	      gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4738	    }
4739	}
4740      notice_special_calls (call_stmt);
4741      if (!gimple_call_noreturn_p (call_stmt))
4742	gimple_call_set_lhs (call_stmt, *to_p);
4743      assign = call_stmt;
4744    }
4745  else
4746    {
4747      assign = gimple_build_assign (*to_p, *from_p);
4748      gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4749    }
4750
4751  if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4752    {
4753      /* We should have got an SSA name from the start.  */
4754      gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4755    }
4756
4757  gimplify_seq_add_stmt (pre_p, assign);
4758  gsi = gsi_last (*pre_p);
4759  maybe_fold_stmt (&gsi);
4760
4761  if (want_value)
4762    {
4763      *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4764      return GS_OK;
4765    }
4766  else
4767    *expr_p = NULL;
4768
4769  return GS_ALL_DONE;
4770}
4771
4772/* Gimplify a comparison between two variable-sized objects.  Do this
4773   with a call to BUILT_IN_MEMCMP.  */
4774
4775static enum gimplify_status
4776gimplify_variable_sized_compare (tree *expr_p)
4777{
4778  location_t loc = EXPR_LOCATION (*expr_p);
4779  tree op0 = TREE_OPERAND (*expr_p, 0);
4780  tree op1 = TREE_OPERAND (*expr_p, 1);
4781  tree t, arg, dest, src, expr;
4782
4783  arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4784  arg = unshare_expr (arg);
4785  arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4786  src = build_fold_addr_expr_loc (loc, op1);
4787  dest = build_fold_addr_expr_loc (loc, op0);
4788  t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4789  t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4790
4791  expr
4792    = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4793  SET_EXPR_LOCATION (expr, loc);
4794  *expr_p = expr;
4795
4796  return GS_OK;
4797}
4798
4799/* Gimplify a comparison between two aggregate objects of integral scalar
4800   mode as a comparison between the bitwise equivalent scalar values.  */
4801
4802static enum gimplify_status
4803gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4804{
4805  location_t loc = EXPR_LOCATION (*expr_p);
4806  tree op0 = TREE_OPERAND (*expr_p, 0);
4807  tree op1 = TREE_OPERAND (*expr_p, 1);
4808
4809  tree type = TREE_TYPE (op0);
4810  tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4811
4812  op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4813  op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4814
4815  *expr_p
4816    = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4817
4818  return GS_OK;
4819}
4820
4821/* Gimplify an expression sequence.  This function gimplifies each
4822   expression and rewrites the original expression with the last
4823   expression of the sequence in GIMPLE form.
4824
4825   PRE_P points to the list where the side effects for all the
4826       expressions in the sequence will be emitted.
4827
4828   WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
4829
4830static enum gimplify_status
4831gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4832{
4833  tree t = *expr_p;
4834
4835  do
4836    {
4837      tree *sub_p = &TREE_OPERAND (t, 0);
4838
4839      if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4840	gimplify_compound_expr (sub_p, pre_p, false);
4841      else
4842	gimplify_stmt (sub_p, pre_p);
4843
4844      t = TREE_OPERAND (t, 1);
4845    }
4846  while (TREE_CODE (t) == COMPOUND_EXPR);
4847
4848  *expr_p = t;
4849  if (want_value)
4850    return GS_OK;
4851  else
4852    {
4853      gimplify_stmt (expr_p, pre_p);
4854      return GS_ALL_DONE;
4855    }
4856}
4857
4858/* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
4859   gimplify.  After gimplification, EXPR_P will point to a new temporary
4860   that holds the original value of the SAVE_EXPR node.
4861
4862   PRE_P points to the list where side effects that must happen before
4863   *EXPR_P should be stored.  */
4864
4865static enum gimplify_status
4866gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4867{
4868  enum gimplify_status ret = GS_ALL_DONE;
4869  tree val;
4870
4871  gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4872  val = TREE_OPERAND (*expr_p, 0);
4873
4874  /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
4875  if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4876    {
4877      /* The operand may be a void-valued expression such as SAVE_EXPRs
4878	 generated by the Java frontend for class initialization.  It is
4879	 being executed only for its side-effects.  */
4880      if (TREE_TYPE (val) == void_type_node)
4881	{
4882	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4883			       is_gimple_stmt, fb_none);
4884	  val = NULL;
4885	}
4886      else
4887	val = get_initialized_tmp_var (val, pre_p, post_p);
4888
4889      TREE_OPERAND (*expr_p, 0) = val;
4890      SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4891    }
4892
4893  *expr_p = val;
4894
4895  return ret;
4896}
4897
4898/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4899
4900      unary_expr
4901	      : ...
4902	      | '&' varname
4903	      ...
4904
4905    PRE_P points to the list where side effects that must happen before
4906	*EXPR_P should be stored.
4907
4908    POST_P points to the list where side effects that must happen after
4909	*EXPR_P should be stored.  */
4910
4911static enum gimplify_status
4912gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4913{
4914  tree expr = *expr_p;
4915  tree op0 = TREE_OPERAND (expr, 0);
4916  enum gimplify_status ret;
4917  location_t loc = EXPR_LOCATION (*expr_p);
4918
4919  switch (TREE_CODE (op0))
4920    {
4921    case INDIRECT_REF:
4922    do_indirect_ref:
4923      /* Check if we are dealing with an expression of the form '&*ptr'.
4924	 While the front end folds away '&*ptr' into 'ptr', these
4925	 expressions may be generated internally by the compiler (e.g.,
4926	 builtins like __builtin_va_end).  */
4927      /* Caution: the silent array decomposition semantics we allow for
4928	 ADDR_EXPR means we can't always discard the pair.  */
4929      /* Gimplification of the ADDR_EXPR operand may drop
4930	 cv-qualification conversions, so make sure we add them if
4931	 needed.  */
4932      {
4933	tree op00 = TREE_OPERAND (op0, 0);
4934	tree t_expr = TREE_TYPE (expr);
4935	tree t_op00 = TREE_TYPE (op00);
4936
4937        if (!useless_type_conversion_p (t_expr, t_op00))
4938	  op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4939        *expr_p = op00;
4940        ret = GS_OK;
4941      }
4942      break;
4943
4944    case VIEW_CONVERT_EXPR:
4945      /* Take the address of our operand and then convert it to the type of
4946	 this ADDR_EXPR.
4947
4948	 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4949	 all clear.  The impact of this transformation is even less clear.  */
4950
4951      /* If the operand is a useless conversion, look through it.  Doing so
4952	 guarantees that the ADDR_EXPR and its operand will remain of the
4953	 same type.  */
4954      if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4955	op0 = TREE_OPERAND (op0, 0);
4956
4957      *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4958				  build_fold_addr_expr_loc (loc,
4959							TREE_OPERAND (op0, 0)));
4960      ret = GS_OK;
4961      break;
4962
4963    default:
4964      /* If we see a call to a declared builtin or see its address
4965	 being taken (we can unify those cases here) then we can mark
4966	 the builtin for implicit generation by GCC.  */
4967      if (TREE_CODE (op0) == FUNCTION_DECL
4968	  && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
4969	  && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
4970	set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
4971
4972      /* We use fb_either here because the C frontend sometimes takes
4973	 the address of a call that returns a struct; see
4974	 gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
4975	 the implied temporary explicit.  */
4976
4977      /* Make the operand addressable.  */
4978      ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4979			   is_gimple_addressable, fb_either);
4980      if (ret == GS_ERROR)
4981	break;
4982
4983      /* Then mark it.  Beware that it may not be possible to do so directly
4984	 if a temporary has been created by the gimplification.  */
4985      prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4986
4987      op0 = TREE_OPERAND (expr, 0);
4988
4989      /* For various reasons, the gimplification of the expression
4990	 may have made a new INDIRECT_REF.  */
4991      if (TREE_CODE (op0) == INDIRECT_REF)
4992	goto do_indirect_ref;
4993
4994      mark_addressable (TREE_OPERAND (expr, 0));
4995
4996      /* The FEs may end up building ADDR_EXPRs early on a decl with
4997	 an incomplete type.  Re-build ADDR_EXPRs in canonical form
4998	 here.  */
4999      if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5000	*expr_p = build_fold_addr_expr (op0);
5001
5002      /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
5003      recompute_tree_invariant_for_addr_expr (*expr_p);
5004
5005      /* If we re-built the ADDR_EXPR add a conversion to the original type
5006         if required.  */
5007      if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5008	*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5009
5010      break;
5011    }
5012
5013  return ret;
5014}
5015
5016/* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
5017   value; output operands should be a gimple lvalue.  */
5018
5019static enum gimplify_status
5020gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5021{
5022  tree expr;
5023  int noutputs;
5024  const char **oconstraints;
5025  int i;
5026  tree link;
5027  const char *constraint;
5028  bool allows_mem, allows_reg, is_inout;
5029  enum gimplify_status ret, tret;
5030  gasm *stmt;
5031  vec<tree, va_gc> *inputs;
5032  vec<tree, va_gc> *outputs;
5033  vec<tree, va_gc> *clobbers;
5034  vec<tree, va_gc> *labels;
5035  tree link_next;
5036
5037  expr = *expr_p;
5038  noutputs = list_length (ASM_OUTPUTS (expr));
5039  oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5040
5041  inputs = NULL;
5042  outputs = NULL;
5043  clobbers = NULL;
5044  labels = NULL;
5045
5046  ret = GS_ALL_DONE;
5047  link_next = NULL_TREE;
5048  for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5049    {
5050      bool ok;
5051      size_t constraint_len;
5052
5053      link_next = TREE_CHAIN (link);
5054
5055      oconstraints[i]
5056	= constraint
5057	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5058      constraint_len = strlen (constraint);
5059      if (constraint_len == 0)
5060        continue;
5061
5062      ok = parse_output_constraint (&constraint, i, 0, 0,
5063				    &allows_mem, &allows_reg, &is_inout);
5064      if (!ok)
5065	{
5066	  ret = GS_ERROR;
5067	  is_inout = false;
5068	}
5069
5070      if (!allows_reg && allows_mem)
5071	mark_addressable (TREE_VALUE (link));
5072
5073      tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5074			    is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5075			    fb_lvalue | fb_mayfail);
5076      if (tret == GS_ERROR)
5077	{
5078	  error ("invalid lvalue in asm output %d", i);
5079	  ret = tret;
5080	}
5081
5082      vec_safe_push (outputs, link);
5083      TREE_CHAIN (link) = NULL_TREE;
5084
5085      if (is_inout)
5086	{
5087	  /* An input/output operand.  To give the optimizers more
5088	     flexibility, split it into separate input and output
5089 	     operands.  */
5090	  tree input;
5091	  char buf[10];
5092
5093	  /* Turn the in/out constraint into an output constraint.  */
5094	  char *p = xstrdup (constraint);
5095	  p[0] = '=';
5096	  TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5097
5098	  /* And add a matching input constraint.  */
5099	  if (allows_reg)
5100	    {
5101	      sprintf (buf, "%d", i);
5102
5103	      /* If there are multiple alternatives in the constraint,
5104		 handle each of them individually.  Those that allow register
5105		 will be replaced with operand number, the others will stay
5106		 unchanged.  */
5107	      if (strchr (p, ',') != NULL)
5108		{
5109		  size_t len = 0, buflen = strlen (buf);
5110		  char *beg, *end, *str, *dst;
5111
5112		  for (beg = p + 1;;)
5113		    {
5114		      end = strchr (beg, ',');
5115		      if (end == NULL)
5116			end = strchr (beg, '\0');
5117		      if ((size_t) (end - beg) < buflen)
5118			len += buflen + 1;
5119		      else
5120			len += end - beg + 1;
5121		      if (*end)
5122			beg = end + 1;
5123		      else
5124			break;
5125		    }
5126
5127		  str = (char *) alloca (len);
5128		  for (beg = p + 1, dst = str;;)
5129		    {
5130		      const char *tem;
5131		      bool mem_p, reg_p, inout_p;
5132
5133		      end = strchr (beg, ',');
5134		      if (end)
5135			*end = '\0';
5136		      beg[-1] = '=';
5137		      tem = beg - 1;
5138		      parse_output_constraint (&tem, i, 0, 0,
5139					       &mem_p, &reg_p, &inout_p);
5140		      if (dst != str)
5141			*dst++ = ',';
5142		      if (reg_p)
5143			{
5144			  memcpy (dst, buf, buflen);
5145			  dst += buflen;
5146			}
5147		      else
5148			{
5149			  if (end)
5150			    len = end - beg;
5151			  else
5152			    len = strlen (beg);
5153			  memcpy (dst, beg, len);
5154			  dst += len;
5155			}
5156		      if (end)
5157			beg = end + 1;
5158		      else
5159			break;
5160		    }
5161		  *dst = '\0';
5162		  input = build_string (dst - str, str);
5163		}
5164	      else
5165		input = build_string (strlen (buf), buf);
5166	    }
5167	  else
5168	    input = build_string (constraint_len - 1, constraint + 1);
5169
5170	  free (p);
5171
5172	  input = build_tree_list (build_tree_list (NULL_TREE, input),
5173				   unshare_expr (TREE_VALUE (link)));
5174	  ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5175	}
5176    }
5177
5178  link_next = NULL_TREE;
5179  for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5180    {
5181      link_next = TREE_CHAIN (link);
5182      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5183      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5184			      oconstraints, &allows_mem, &allows_reg);
5185
5186      /* If we can't make copies, we can only accept memory.  */
5187      if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5188	{
5189	  if (allows_mem)
5190	    allows_reg = 0;
5191	  else
5192	    {
5193	      error ("impossible constraint in %<asm%>");
5194	      error ("non-memory input %d must stay in memory", i);
5195	      return GS_ERROR;
5196	    }
5197	}
5198
5199      /* If the operand is a memory input, it should be an lvalue.  */
5200      if (!allows_reg && allows_mem)
5201	{
5202	  tree inputv = TREE_VALUE (link);
5203	  STRIP_NOPS (inputv);
5204	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5205	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
5206	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5207	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5208	    TREE_VALUE (link) = error_mark_node;
5209	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5210				is_gimple_lvalue, fb_lvalue | fb_mayfail);
5211	  if (tret != GS_ERROR)
5212	    {
5213	      /* Unlike output operands, memory inputs are not guaranteed
5214		 to be lvalues by the FE, and while the expressions are
5215		 marked addressable there, if it is e.g. a statement
5216		 expression, temporaries in it might not end up being
5217		 addressable.  They might be already used in the IL and thus
5218		 it is too late to make them addressable now though.  */
5219	      tree x = TREE_VALUE (link);
5220	      while (handled_component_p (x))
5221		x = TREE_OPERAND (x, 0);
5222	      if (TREE_CODE (x) == MEM_REF
5223		  && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
5224		x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
5225	      if ((TREE_CODE (x) == VAR_DECL
5226		   || TREE_CODE (x) == PARM_DECL
5227		   || TREE_CODE (x) == RESULT_DECL)
5228		  && !TREE_ADDRESSABLE (x)
5229		  && is_gimple_reg (x))
5230		{
5231		  warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
5232					       input_location), 0,
5233			      "memory input %d is not directly addressable",
5234			      i);
5235		  prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
5236		}
5237	    }
5238	  mark_addressable (TREE_VALUE (link));
5239	  if (tret == GS_ERROR)
5240	    {
5241	      error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
5242			"memory input %d is not directly addressable", i);
5243	      ret = tret;
5244	    }
5245	}
5246      else
5247	{
5248	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5249				is_gimple_asm_val, fb_rvalue);
5250	  if (tret == GS_ERROR)
5251	    ret = tret;
5252	}
5253
5254      TREE_CHAIN (link) = NULL_TREE;
5255      vec_safe_push (inputs, link);
5256    }
5257
5258  link_next = NULL_TREE;
5259  for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5260    {
5261      link_next = TREE_CHAIN (link);
5262      TREE_CHAIN (link) = NULL_TREE;
5263      vec_safe_push (clobbers, link);
5264    }
5265
5266  link_next = NULL_TREE;
5267  for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5268    {
5269      link_next = TREE_CHAIN (link);
5270      TREE_CHAIN (link) = NULL_TREE;
5271      vec_safe_push (labels, link);
5272    }
5273
5274  /* Do not add ASMs with errors to the gimple IL stream.  */
5275  if (ret != GS_ERROR)
5276    {
5277      stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5278				   inputs, outputs, clobbers, labels);
5279
5280      gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5281      gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5282
5283      gimplify_seq_add_stmt (pre_p, stmt);
5284    }
5285
5286  return ret;
5287}
5288
5289/* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
5290   GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5291   gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5292   return to this function.
5293
5294   FIXME should we complexify the prequeue handling instead?  Or use flags
5295   for all the cleanups and let the optimizer tighten them up?  The current
5296   code seems pretty fragile; it will break on a cleanup within any
5297   non-conditional nesting.  But any such nesting would be broken, anyway;
5298   we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5299   and continues out of it.  We can do that at the RTL level, though, so
5300   having an optimizer to tighten up try/finally regions would be a Good
5301   Thing.  */
5302
5303static enum gimplify_status
5304gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5305{
5306  gimple_stmt_iterator iter;
5307  gimple_seq body_sequence = NULL;
5308
5309  tree temp = voidify_wrapper_expr (*expr_p, NULL);
5310
5311  /* We only care about the number of conditions between the innermost
5312     CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
5313     any cleanups collected outside the CLEANUP_POINT_EXPR.  */
5314  int old_conds = gimplify_ctxp->conditions;
5315  gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5316  bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5317  gimplify_ctxp->conditions = 0;
5318  gimplify_ctxp->conditional_cleanups = NULL;
5319  gimplify_ctxp->in_cleanup_point_expr = true;
5320
5321  gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5322
5323  gimplify_ctxp->conditions = old_conds;
5324  gimplify_ctxp->conditional_cleanups = old_cleanups;
5325  gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5326
5327  for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5328    {
5329      gimple wce = gsi_stmt (iter);
5330
5331      if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5332	{
5333	  if (gsi_one_before_end_p (iter))
5334	    {
5335              /* Note that gsi_insert_seq_before and gsi_remove do not
5336                 scan operands, unlike some other sequence mutators.  */
5337	      if (!gimple_wce_cleanup_eh_only (wce))
5338		gsi_insert_seq_before_without_update (&iter,
5339						      gimple_wce_cleanup (wce),
5340						      GSI_SAME_STMT);
5341	      gsi_remove (&iter, true);
5342	      break;
5343	    }
5344	  else
5345	    {
5346	      gtry *gtry;
5347	      gimple_seq seq;
5348	      enum gimple_try_flags kind;
5349
5350	      if (gimple_wce_cleanup_eh_only (wce))
5351		kind = GIMPLE_TRY_CATCH;
5352	      else
5353		kind = GIMPLE_TRY_FINALLY;
5354	      seq = gsi_split_seq_after (iter);
5355
5356	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5357              /* Do not use gsi_replace here, as it may scan operands.
5358                 We want to do a simple structural modification only.  */
5359	      gsi_set_stmt (&iter, gtry);
5360	      iter = gsi_start (gtry->eval);
5361	    }
5362	}
5363      else
5364	gsi_next (&iter);
5365    }
5366
5367  gimplify_seq_add_seq (pre_p, body_sequence);
5368  if (temp)
5369    {
5370      *expr_p = temp;
5371      return GS_OK;
5372    }
5373  else
5374    {
5375      *expr_p = NULL;
5376      return GS_ALL_DONE;
5377    }
5378}
5379
5380/* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
5381   is the cleanup action required.  EH_ONLY is true if the cleanup should
5382   only be executed if an exception is thrown, not on normal exit.  */
5383
5384static void
5385gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5386{
5387  gimple wce;
5388  gimple_seq cleanup_stmts = NULL;
5389
5390  /* Errors can result in improperly nested cleanups.  Which results in
5391     confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
5392  if (seen_error ())
5393    return;
5394
5395  if (gimple_conditional_context ())
5396    {
5397      /* If we're in a conditional context, this is more complex.  We only
5398	 want to run the cleanup if we actually ran the initialization that
5399	 necessitates it, but we want to run it after the end of the
5400	 conditional context.  So we wrap the try/finally around the
5401	 condition and use a flag to determine whether or not to actually
5402	 run the destructor.  Thus
5403
5404	   test ? f(A()) : 0
5405
5406	 becomes (approximately)
5407
5408	   flag = 0;
5409	   try {
5410	     if (test) { A::A(temp); flag = 1; val = f(temp); }
5411	     else { val = 0; }
5412	   } finally {
5413	     if (flag) A::~A(temp);
5414	   }
5415	   val
5416      */
5417      tree flag = create_tmp_var (boolean_type_node, "cleanup");
5418      gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5419      gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5420
5421      cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5422      gimplify_stmt (&cleanup, &cleanup_stmts);
5423      wce = gimple_build_wce (cleanup_stmts);
5424
5425      gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5426      gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5427      gimplify_seq_add_stmt (pre_p, ftrue);
5428
5429      /* Because of this manipulation, and the EH edges that jump
5430	 threading cannot redirect, the temporary (VAR) will appear
5431	 to be used uninitialized.  Don't warn.  */
5432      TREE_NO_WARNING (var) = 1;
5433    }
5434  else
5435    {
5436      gimplify_stmt (&cleanup, &cleanup_stmts);
5437      wce = gimple_build_wce (cleanup_stmts);
5438      gimple_wce_set_cleanup_eh_only (wce, eh_only);
5439      gimplify_seq_add_stmt (pre_p, wce);
5440    }
5441}
5442
5443/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
5444
5445static enum gimplify_status
5446gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5447{
5448  tree targ = *expr_p;
5449  tree temp = TARGET_EXPR_SLOT (targ);
5450  tree init = TARGET_EXPR_INITIAL (targ);
5451  enum gimplify_status ret;
5452
5453  if (init)
5454    {
5455      tree cleanup = NULL_TREE;
5456
5457      /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5458	 to the temps list.  Handle also variable length TARGET_EXPRs.  */
5459      if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5460	{
5461	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5462	    gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5463	  gimplify_vla_decl (temp, pre_p);
5464	}
5465      else
5466	gimple_add_tmp_var (temp);
5467
5468      /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5469	 expression is supposed to initialize the slot.  */
5470      if (VOID_TYPE_P (TREE_TYPE (init)))
5471	ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5472      else
5473	{
5474	  tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5475	  init = init_expr;
5476	  ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5477	  init = NULL;
5478	  ggc_free (init_expr);
5479	}
5480      if (ret == GS_ERROR)
5481	{
5482	  /* PR c++/28266 Make sure this is expanded only once. */
5483	  TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5484	  return GS_ERROR;
5485	}
5486      if (init)
5487	gimplify_and_add (init, pre_p);
5488
5489      /* If needed, push the cleanup for the temp.  */
5490      if (TARGET_EXPR_CLEANUP (targ))
5491	{
5492	  if (CLEANUP_EH_ONLY (targ))
5493	    gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5494				 CLEANUP_EH_ONLY (targ), pre_p);
5495	  else
5496	    cleanup = TARGET_EXPR_CLEANUP (targ);
5497	}
5498
5499      /* Add a clobber for the temporary going out of scope, like
5500	 gimplify_bind_expr.  */
5501      if (gimplify_ctxp->in_cleanup_point_expr
5502	  && needs_to_live_in_memory (temp)
5503	  && flag_stack_reuse == SR_ALL)
5504	{
5505	  tree clobber = build_constructor (TREE_TYPE (temp),
5506					    NULL);
5507	  TREE_THIS_VOLATILE (clobber) = true;
5508	  clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5509	  if (cleanup)
5510	    cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5511			      clobber);
5512	  else
5513	    cleanup = clobber;
5514	}
5515
5516      if (cleanup)
5517	gimple_push_cleanup (temp, cleanup, false, pre_p);
5518
5519      /* Only expand this once.  */
5520      TREE_OPERAND (targ, 3) = init;
5521      TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5522    }
5523  else
5524    /* We should have expanded this before.  */
5525    gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5526
5527  *expr_p = temp;
5528  return GS_OK;
5529}
5530
5531/* Gimplification of expression trees.  */
5532
5533/* Gimplify an expression which appears at statement context.  The
5534   corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
5535   NULL, a new sequence is allocated.
5536
5537   Return true if we actually added a statement to the queue.  */
5538
5539bool
5540gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5541{
5542  gimple_seq_node last;
5543
5544  last = gimple_seq_last (*seq_p);
5545  gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5546  return last != gimple_seq_last (*seq_p);
5547}
5548
5549/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5550   to CTX.  If entries already exist, force them to be some flavor of private.
5551   If there is no enclosing parallel, do nothing.  */
5552
5553void
5554omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5555{
5556  splay_tree_node n;
5557
5558  if (decl == NULL || !DECL_P (decl))
5559    return;
5560
5561  do
5562    {
5563      n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5564      if (n != NULL)
5565	{
5566	  if (n->value & GOVD_SHARED)
5567	    n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5568	  else if (n->value & GOVD_MAP)
5569	    n->value |= GOVD_MAP_TO_ONLY;
5570	  else
5571	    return;
5572	}
5573      else if (ctx->region_type == ORT_TARGET)
5574	omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5575      else if (ctx->region_type != ORT_WORKSHARE
5576	       && ctx->region_type != ORT_SIMD
5577	       && ctx->region_type != ORT_TARGET_DATA)
5578	omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5579
5580      ctx = ctx->outer_context;
5581    }
5582  while (ctx);
5583}
5584
5585/* Similarly for each of the type sizes of TYPE.  */
5586
5587static void
5588omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5589{
5590  if (type == NULL || type == error_mark_node)
5591    return;
5592  type = TYPE_MAIN_VARIANT (type);
5593
5594  if (ctx->privatized_types->add (type))
5595    return;
5596
5597  switch (TREE_CODE (type))
5598    {
5599    case INTEGER_TYPE:
5600    case ENUMERAL_TYPE:
5601    case BOOLEAN_TYPE:
5602    case REAL_TYPE:
5603    case FIXED_POINT_TYPE:
5604      omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5605      omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5606      break;
5607
5608    case ARRAY_TYPE:
5609      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5610      omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5611      break;
5612
5613    case RECORD_TYPE:
5614    case UNION_TYPE:
5615    case QUAL_UNION_TYPE:
5616      {
5617	tree field;
5618	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5619	  if (TREE_CODE (field) == FIELD_DECL)
5620	    {
5621	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5622	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5623	    }
5624      }
5625      break;
5626
5627    case POINTER_TYPE:
5628    case REFERENCE_TYPE:
5629      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5630      break;
5631
5632    default:
5633      break;
5634    }
5635
5636  omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5637  omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5638  lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5639}
5640
5641/* Add an entry for DECL in the OMP context CTX with FLAGS.  */
5642
5643static void
5644omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5645{
5646  splay_tree_node n;
5647  unsigned int nflags;
5648  tree t;
5649
5650  if (error_operand_p (decl))
5651    return;
5652
5653  /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
5654     there are constructors involved somewhere.  */
5655  if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5656      || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5657    flags |= GOVD_SEEN;
5658
5659  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5660  if (n != NULL && n->value != GOVD_ALIGNED)
5661    {
5662      /* We shouldn't be re-adding the decl with the same data
5663	 sharing class.  */
5664      gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5665      /* The only combination of data sharing classes we should see is
5666	 FIRSTPRIVATE and LASTPRIVATE.  */
5667      nflags = n->value | flags;
5668      gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5669		  == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5670		  || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5671      n->value = nflags;
5672      return;
5673    }
5674
5675  /* When adding a variable-sized variable, we have to handle all sorts
5676     of additional bits of data: the pointer replacement variable, and
5677     the parameters of the type.  */
5678  if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5679    {
5680      /* Add the pointer replacement variable as PRIVATE if the variable
5681	 replacement is private, else FIRSTPRIVATE since we'll need the
5682	 address of the original variable either for SHARED, or for the
5683	 copy into or out of the context.  */
5684      if (!(flags & GOVD_LOCAL))
5685	{
5686	  if (flags & GOVD_MAP)
5687	    nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5688	  else if (flags & GOVD_PRIVATE)
5689	    nflags = GOVD_PRIVATE;
5690	  else
5691	    nflags = GOVD_FIRSTPRIVATE;
5692	  nflags |= flags & GOVD_SEEN;
5693	  t = DECL_VALUE_EXPR (decl);
5694	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5695	  t = TREE_OPERAND (t, 0);
5696	  gcc_assert (DECL_P (t));
5697	  omp_add_variable (ctx, t, nflags);
5698	}
5699
5700      /* Add all of the variable and type parameters (which should have
5701	 been gimplified to a formal temporary) as FIRSTPRIVATE.  */
5702      omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5703      omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5704      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5705
5706      /* The variable-sized variable itself is never SHARED, only some form
5707	 of PRIVATE.  The sharing would take place via the pointer variable
5708	 which we remapped above.  */
5709      if (flags & GOVD_SHARED)
5710	flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5711		| (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5712
5713      /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5714	 alloca statement we generate for the variable, so make sure it
5715	 is available.  This isn't automatically needed for the SHARED
5716	 case, since we won't be allocating local storage then.
5717	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5718	 in this case omp_notice_variable will be called later
5719	 on when it is gimplified.  */
5720      else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5721	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5722	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5723    }
5724  else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5725	   && lang_hooks.decls.omp_privatize_by_reference (decl))
5726    {
5727      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5728
5729      /* Similar to the direct variable sized case above, we'll need the
5730	 size of references being privatized.  */
5731      if ((flags & GOVD_SHARED) == 0)
5732	{
5733	  t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5734	  if (TREE_CODE (t) != INTEGER_CST)
5735	    omp_notice_variable (ctx, t, true);
5736	}
5737    }
5738
5739  if (n != NULL)
5740    n->value |= flags;
5741  else
5742    splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5743}
5744
5745/* Notice a threadprivate variable DECL used in OMP context CTX.
5746   This just prints out diagnostics about threadprivate variable uses
5747   in untied tasks.  If DECL2 is non-NULL, prevent this warning
5748   on that variable.  */
5749
5750static bool
5751omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5752				   tree decl2)
5753{
5754  splay_tree_node n;
5755  struct gimplify_omp_ctx *octx;
5756
5757  for (octx = ctx; octx; octx = octx->outer_context)
5758    if (octx->region_type == ORT_TARGET)
5759      {
5760	n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5761	if (n == NULL)
5762	  {
5763	    error ("threadprivate variable %qE used in target region",
5764		   DECL_NAME (decl));
5765	    error_at (octx->location, "enclosing target region");
5766	    splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5767	  }
5768	if (decl2)
5769	  splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5770      }
5771
5772  if (ctx->region_type != ORT_UNTIED_TASK)
5773    return false;
5774  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5775  if (n == NULL)
5776    {
5777      error ("threadprivate variable %qE used in untied task",
5778	     DECL_NAME (decl));
5779      error_at (ctx->location, "enclosing task");
5780      splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5781    }
5782  if (decl2)
5783    splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5784  return false;
5785}
5786
5787/* Record the fact that DECL was used within the OMP context CTX.
5788   IN_CODE is true when real code uses DECL, and false when we should
5789   merely emit default(none) errors.  Return true if DECL is going to
5790   be remapped and thus DECL shouldn't be gimplified into its
5791   DECL_VALUE_EXPR (if any).  */
5792
5793static bool
5794omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5795{
5796  splay_tree_node n;
5797  unsigned flags = in_code ? GOVD_SEEN : 0;
5798  bool ret = false, shared;
5799
5800  if (error_operand_p (decl))
5801    return false;
5802
5803  /* Threadprivate variables are predetermined.  */
5804  if (is_global_var (decl))
5805    {
5806      if (DECL_THREAD_LOCAL_P (decl))
5807	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5808
5809      if (DECL_HAS_VALUE_EXPR_P (decl))
5810	{
5811	  tree value = get_base_address (DECL_VALUE_EXPR (decl));
5812
5813	  if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5814	    return omp_notice_threadprivate_variable (ctx, decl, value);
5815	}
5816    }
5817
5818  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5819  if (ctx->region_type == ORT_TARGET)
5820    {
5821      ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5822      if (n == NULL)
5823	{
5824	  if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5825	    {
5826	      error ("%qD referenced in target region does not have "
5827		     "a mappable type", decl);
5828	      omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5829	    }
5830	  else
5831	    omp_add_variable (ctx, decl, GOVD_MAP | flags);
5832	}
5833      else
5834	{
5835	  /* If nothing changed, there's nothing left to do.  */
5836	  if ((n->value & flags) == flags)
5837	    return ret;
5838	  n->value |= flags;
5839	}
5840      goto do_outer;
5841    }
5842
5843  if (n == NULL)
5844    {
5845      enum omp_clause_default_kind default_kind, kind;
5846      struct gimplify_omp_ctx *octx;
5847
5848      if (ctx->region_type == ORT_WORKSHARE
5849	  || ctx->region_type == ORT_SIMD
5850	  || ctx->region_type == ORT_TARGET_DATA)
5851	goto do_outer;
5852
5853      /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5854	 remapped firstprivate instead of shared.  To some extent this is
5855	 addressed in omp_firstprivatize_type_sizes, but not effectively.  */
5856      default_kind = ctx->default_kind;
5857      kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5858      if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5859	default_kind = kind;
5860
5861      switch (default_kind)
5862	{
5863	case OMP_CLAUSE_DEFAULT_NONE:
5864	  if ((ctx->region_type & ORT_PARALLEL) != 0)
5865	    {
5866	      error ("%qE not specified in enclosing parallel",
5867		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5868	      error_at (ctx->location, "enclosing parallel");
5869	    }
5870	  else if ((ctx->region_type & ORT_TASK) != 0)
5871	    {
5872	      error ("%qE not specified in enclosing task",
5873		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5874	      error_at (ctx->location, "enclosing task");
5875	    }
5876	  else if (ctx->region_type & ORT_TEAMS)
5877	    {
5878	      error ("%qE not specified in enclosing teams construct",
5879		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5880	      error_at (ctx->location, "enclosing teams construct");
5881	    }
5882	  else
5883	    gcc_unreachable ();
5884	  /* FALLTHRU */
5885	case OMP_CLAUSE_DEFAULT_SHARED:
5886	  flags |= GOVD_SHARED;
5887	  break;
5888	case OMP_CLAUSE_DEFAULT_PRIVATE:
5889	  flags |= GOVD_PRIVATE;
5890	  break;
5891	case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5892	  flags |= GOVD_FIRSTPRIVATE;
5893	  break;
5894	case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5895	  /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
5896	  gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5897	  if (ctx->outer_context)
5898	    omp_notice_variable (ctx->outer_context, decl, in_code);
5899	  for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5900	    {
5901	      splay_tree_node n2;
5902
5903	      if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5904		continue;
5905	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5906	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5907		{
5908		  flags |= GOVD_FIRSTPRIVATE;
5909		  break;
5910		}
5911	      if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5912		break;
5913	    }
5914	  if (flags & GOVD_FIRSTPRIVATE)
5915	    break;
5916	  if (octx == NULL
5917	      && (TREE_CODE (decl) == PARM_DECL
5918		  || (!is_global_var (decl)
5919		      && DECL_CONTEXT (decl) == current_function_decl)))
5920	    {
5921	      flags |= GOVD_FIRSTPRIVATE;
5922	      break;
5923	    }
5924	  flags |= GOVD_SHARED;
5925	  break;
5926	default:
5927	  gcc_unreachable ();
5928	}
5929
5930      if ((flags & GOVD_PRIVATE)
5931	  && lang_hooks.decls.omp_private_outer_ref (decl))
5932	flags |= GOVD_PRIVATE_OUTER_REF;
5933
5934      omp_add_variable (ctx, decl, flags);
5935
5936      shared = (flags & GOVD_SHARED) != 0;
5937      ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5938      goto do_outer;
5939    }
5940
5941  if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5942      && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5943      && DECL_SIZE (decl)
5944      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5945    {
5946      splay_tree_node n2;
5947      tree t = DECL_VALUE_EXPR (decl);
5948      gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5949      t = TREE_OPERAND (t, 0);
5950      gcc_assert (DECL_P (t));
5951      n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5952      n2->value |= GOVD_SEEN;
5953    }
5954
5955  shared = ((flags | n->value) & GOVD_SHARED) != 0;
5956  ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5957
5958  /* If nothing changed, there's nothing left to do.  */
5959  if ((n->value & flags) == flags)
5960    return ret;
5961  flags |= n->value;
5962  n->value = flags;
5963
5964 do_outer:
5965  /* If the variable is private in the current context, then we don't
5966     need to propagate anything to an outer context.  */
5967  if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5968    return ret;
5969  if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5970      == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5971    return ret;
5972  if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5973		| GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5974      == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5975    return ret;
5976  if (ctx->outer_context
5977      && omp_notice_variable (ctx->outer_context, decl, in_code))
5978    return true;
5979  return ret;
5980}
5981
5982/* Verify that DECL is private within CTX.  If there's specific information
5983   to the contrary in the innermost scope, generate an error.  */
5984
5985static bool
5986omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
5987{
5988  splay_tree_node n;
5989
5990  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5991  if (n != NULL)
5992    {
5993      if (n->value & GOVD_SHARED)
5994	{
5995	  if (ctx == gimplify_omp_ctxp)
5996	    {
5997	      if (simd)
5998		error ("iteration variable %qE is predetermined linear",
5999		       DECL_NAME (decl));
6000	      else
6001		error ("iteration variable %qE should be private",
6002		       DECL_NAME (decl));
6003	      n->value = GOVD_PRIVATE;
6004	      return true;
6005	    }
6006	  else
6007	    return false;
6008	}
6009      else if ((n->value & GOVD_EXPLICIT) != 0
6010	       && (ctx == gimplify_omp_ctxp
6011		   || (ctx->region_type == ORT_COMBINED_PARALLEL
6012		       && gimplify_omp_ctxp->outer_context == ctx)))
6013	{
6014	  if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6015	    error ("iteration variable %qE should not be firstprivate",
6016		   DECL_NAME (decl));
6017	  else if ((n->value & GOVD_REDUCTION) != 0)
6018	    error ("iteration variable %qE should not be reduction",
6019		   DECL_NAME (decl));
6020	  else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6021	    error ("iteration variable %qE should not be lastprivate",
6022		   DECL_NAME (decl));
6023	  else if (simd && (n->value & GOVD_PRIVATE) != 0)
6024	    error ("iteration variable %qE should not be private",
6025		   DECL_NAME (decl));
6026	  else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6027	    error ("iteration variable %qE is predetermined linear",
6028		   DECL_NAME (decl));
6029	}
6030      return (ctx == gimplify_omp_ctxp
6031	      || (ctx->region_type == ORT_COMBINED_PARALLEL
6032		  && gimplify_omp_ctxp->outer_context == ctx));
6033    }
6034
6035  if (ctx->region_type != ORT_WORKSHARE
6036      && ctx->region_type != ORT_SIMD)
6037    return false;
6038  else if (ctx->outer_context)
6039    return omp_is_private (ctx->outer_context, decl, simd);
6040  return false;
6041}
6042
6043/* Return true if DECL is private within a parallel region
6044   that binds to the current construct's context or in parallel
6045   region's REDUCTION clause.  */
6046
6047static bool
6048omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6049{
6050  splay_tree_node n;
6051
6052  do
6053    {
6054      ctx = ctx->outer_context;
6055      if (ctx == NULL)
6056	return !(is_global_var (decl)
6057		 /* References might be private, but might be shared too,
6058		    when checking for copyprivate, assume they might be
6059		    private, otherwise assume they might be shared.  */
6060		 || (!copyprivate
6061		     && lang_hooks.decls.omp_privatize_by_reference (decl)));
6062
6063      if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
6064	continue;
6065
6066      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6067      if (n != NULL)
6068	return (n->value & GOVD_SHARED) == 0;
6069    }
6070  while (ctx->region_type == ORT_WORKSHARE
6071	 || ctx->region_type == ORT_SIMD);
6072  return false;
6073}
6074
6075/* Return true if the CTX is combined with distribute and thus
6076   lastprivate can't be supported.  */
6077
6078static bool
6079omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6080{
6081  do
6082    {
6083      if (ctx->outer_context == NULL)
6084	return false;
6085      ctx = ctx->outer_context;
6086      switch (ctx->region_type)
6087	{
6088	case ORT_WORKSHARE:
6089	  if (!ctx->combined_loop)
6090	    return false;
6091	  if (ctx->distribute)
6092	    return true;
6093	  break;
6094	case ORT_COMBINED_PARALLEL:
6095	  break;
6096	case ORT_COMBINED_TEAMS:
6097	  return true;
6098	default:
6099	  return false;
6100	}
6101    }
6102  while (1);
6103}
6104
6105/* Scan the OMP clauses in *LIST_P, installing mappings into a new
6106   and previous omp contexts.  */
6107
6108static void
6109gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6110			   enum omp_region_type region_type)
6111{
6112  struct gimplify_omp_ctx *ctx, *outer_ctx;
6113  tree c;
6114
6115  ctx = new_omp_context (region_type);
6116  outer_ctx = ctx->outer_context;
6117
6118  while ((c = *list_p) != NULL)
6119    {
6120      bool remove = false;
6121      bool notice_outer = true;
6122      const char *check_non_private = NULL;
6123      unsigned int flags;
6124      tree decl;
6125
6126      switch (OMP_CLAUSE_CODE (c))
6127	{
6128	case OMP_CLAUSE_PRIVATE:
6129	  flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6130	  if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6131	    {
6132	      flags |= GOVD_PRIVATE_OUTER_REF;
6133	      OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6134	    }
6135	  else
6136	    notice_outer = false;
6137	  goto do_add;
6138	case OMP_CLAUSE_SHARED:
6139	  flags = GOVD_SHARED | GOVD_EXPLICIT;
6140	  goto do_add;
6141	case OMP_CLAUSE_FIRSTPRIVATE:
6142	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6143	  check_non_private = "firstprivate";
6144	  goto do_add;
6145	case OMP_CLAUSE_LASTPRIVATE:
6146	  flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6147	  check_non_private = "lastprivate";
6148	  decl = OMP_CLAUSE_DECL (c);
6149	  if (omp_no_lastprivate (ctx))
6150	    {
6151	      notice_outer = false;
6152	      flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6153	    }
6154	  else if (error_operand_p (decl))
6155	    goto do_add;
6156	  else if (outer_ctx
6157		   && outer_ctx->region_type == ORT_COMBINED_PARALLEL
6158		   && splay_tree_lookup (outer_ctx->variables,
6159					 (splay_tree_key) decl) == NULL)
6160	    omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6161	  else if (outer_ctx
6162		   && outer_ctx->region_type == ORT_WORKSHARE
6163		   && outer_ctx->combined_loop
6164		   && splay_tree_lookup (outer_ctx->variables,
6165					 (splay_tree_key) decl) == NULL
6166		   && !omp_check_private (outer_ctx, decl, false))
6167	    {
6168	      omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6169	      if (outer_ctx->outer_context
6170		  && (outer_ctx->outer_context->region_type
6171		      == ORT_COMBINED_PARALLEL)
6172		  && splay_tree_lookup (outer_ctx->outer_context->variables,
6173					(splay_tree_key) decl) == NULL)
6174		omp_add_variable (outer_ctx->outer_context, decl,
6175				  GOVD_SHARED | GOVD_SEEN);
6176	    }
6177	  goto do_add;
6178	case OMP_CLAUSE_REDUCTION:
6179	  flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6180	  check_non_private = "reduction";
6181	  goto do_add;
6182	case OMP_CLAUSE_LINEAR:
6183	  if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6184			     is_gimple_val, fb_rvalue) == GS_ERROR)
6185	    {
6186	      remove = true;
6187	      break;
6188	    }
6189	  else
6190	    {
6191	      /* For combined #pragma omp parallel for simd, need to put
6192		 lastprivate and perhaps firstprivate too on the
6193		 parallel.  Similarly for #pragma omp for simd.  */
6194	      struct gimplify_omp_ctx *octx = outer_ctx;
6195	      decl = NULL_TREE;
6196	      if (omp_no_lastprivate (ctx))
6197		OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6198	      do
6199		{
6200		  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6201		      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6202		    break;
6203		  decl = OMP_CLAUSE_DECL (c);
6204		  if (error_operand_p (decl))
6205		    {
6206		      decl = NULL_TREE;
6207		      break;
6208		    }
6209		  if (octx
6210		      && octx->region_type == ORT_WORKSHARE
6211		      && octx->combined_loop)
6212		    {
6213		      if (octx->outer_context
6214			  && (octx->outer_context->region_type
6215			      == ORT_COMBINED_PARALLEL
6216			      || (octx->outer_context->region_type
6217				  == ORT_COMBINED_TEAMS)))
6218			octx = octx->outer_context;
6219		      else if (omp_check_private (octx, decl, false))
6220			break;
6221		    }
6222		  else
6223		    break;
6224		  if (splay_tree_lookup (octx->variables,
6225					 (splay_tree_key) decl) != NULL)
6226		    {
6227		      octx = NULL;
6228		      break;
6229		    }
6230		  flags = GOVD_SEEN;
6231		  if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6232		    flags |= GOVD_FIRSTPRIVATE;
6233		  if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6234		    flags |= GOVD_LASTPRIVATE;
6235		  omp_add_variable (octx, decl, flags);
6236		  if (octx->outer_context == NULL)
6237		    break;
6238		  octx = octx->outer_context;
6239		}
6240	      while (1);
6241	      if (octx
6242		  && decl
6243		  && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6244		      || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6245		omp_notice_variable (octx, decl, true);
6246	    }
6247	  flags = GOVD_LINEAR | GOVD_EXPLICIT;
6248	  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6249	      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6250	    {
6251	      notice_outer = false;
6252	      flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6253	    }
6254	  goto do_add;
6255
6256	case OMP_CLAUSE_MAP:
6257	  decl = OMP_CLAUSE_DECL (c);
6258	  if (error_operand_p (decl))
6259	    {
6260	      remove = true;
6261	      break;
6262	    }
6263	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6264	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6265				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6266	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6267			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6268	    {
6269	      remove = true;
6270	      break;
6271	    }
6272	  if (!DECL_P (decl))
6273	    {
6274	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6275				 NULL, is_gimple_lvalue, fb_lvalue)
6276		  == GS_ERROR)
6277		{
6278		  remove = true;
6279		  break;
6280		}
6281	      break;
6282	    }
6283	  flags = GOVD_MAP | GOVD_EXPLICIT;
6284	  goto do_add;
6285
6286	case OMP_CLAUSE_DEPEND:
6287	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6288	    {
6289	      gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6290			     NULL, is_gimple_val, fb_rvalue);
6291	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6292	    }
6293	  if (error_operand_p (OMP_CLAUSE_DECL (c)))
6294	    {
6295	      remove = true;
6296	      break;
6297	    }
6298	  OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6299	  if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6300			     is_gimple_val, fb_rvalue) == GS_ERROR)
6301	    {
6302	      remove = true;
6303	      break;
6304	    }
6305	  break;
6306
6307	case OMP_CLAUSE_TO:
6308	case OMP_CLAUSE_FROM:
6309	case OMP_CLAUSE__CACHE_:
6310	  decl = OMP_CLAUSE_DECL (c);
6311	  if (error_operand_p (decl))
6312	    {
6313	      remove = true;
6314	      break;
6315	    }
6316	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6317	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6318				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6319	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6320			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6321	    {
6322	      remove = true;
6323	      break;
6324	    }
6325	  if (!DECL_P (decl))
6326	    {
6327	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6328				 NULL, is_gimple_lvalue, fb_lvalue)
6329		  == GS_ERROR)
6330		{
6331		  remove = true;
6332		  break;
6333		}
6334	      break;
6335	    }
6336	  goto do_notice;
6337
6338	do_add:
6339	  decl = OMP_CLAUSE_DECL (c);
6340	  if (error_operand_p (decl))
6341	    {
6342	      remove = true;
6343	      break;
6344	    }
6345	  omp_add_variable (ctx, decl, flags);
6346	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6347	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6348	    {
6349	      omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6350				GOVD_LOCAL | GOVD_SEEN);
6351	      gimplify_omp_ctxp = ctx;
6352	      push_gimplify_context ();
6353
6354	      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6355	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6356
6357	      gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6358		  		&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6359	      pop_gimplify_context
6360		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6361	      push_gimplify_context ();
6362	      gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6363		  		&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6364	      pop_gimplify_context
6365		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6366	      OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6367	      OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6368
6369	      gimplify_omp_ctxp = outer_ctx;
6370	    }
6371	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6372		   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6373	    {
6374	      gimplify_omp_ctxp = ctx;
6375	      push_gimplify_context ();
6376	      if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6377		{
6378		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6379				      NULL, NULL);
6380		  TREE_SIDE_EFFECTS (bind) = 1;
6381		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6382		  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6383		}
6384	      gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6385				&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6386	      pop_gimplify_context
6387		(gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6388	      OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6389
6390	      gimplify_omp_ctxp = outer_ctx;
6391	    }
6392	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6393		   && OMP_CLAUSE_LINEAR_STMT (c))
6394	    {
6395	      gimplify_omp_ctxp = ctx;
6396	      push_gimplify_context ();
6397	      if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
6398		{
6399		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6400				      NULL, NULL);
6401		  TREE_SIDE_EFFECTS (bind) = 1;
6402		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
6403		  OMP_CLAUSE_LINEAR_STMT (c) = bind;
6404		}
6405	      gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
6406				&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6407	      pop_gimplify_context
6408		(gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
6409	      OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
6410
6411	      gimplify_omp_ctxp = outer_ctx;
6412	    }
6413	  if (notice_outer)
6414	    goto do_notice;
6415	  break;
6416
6417	case OMP_CLAUSE_COPYIN:
6418	case OMP_CLAUSE_COPYPRIVATE:
6419	  decl = OMP_CLAUSE_DECL (c);
6420	  if (error_operand_p (decl))
6421	    {
6422	      remove = true;
6423	      break;
6424	    }
6425	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6426	      && !remove
6427	      && !omp_check_private (ctx, decl, true))
6428	    {
6429	      remove = true;
6430	      if (is_global_var (decl))
6431		{
6432		  if (DECL_THREAD_LOCAL_P (decl))
6433		    remove = false;
6434		  else if (DECL_HAS_VALUE_EXPR_P (decl))
6435		    {
6436		      tree value = get_base_address (DECL_VALUE_EXPR (decl));
6437
6438		      if (value
6439			  && DECL_P (value)
6440			  && DECL_THREAD_LOCAL_P (value))
6441			remove = false;
6442		    }
6443		}
6444	      if (remove)
6445		error_at (OMP_CLAUSE_LOCATION (c),
6446			  "copyprivate variable %qE is not threadprivate"
6447			  " or private in outer context", DECL_NAME (decl));
6448	    }
6449	do_notice:
6450	  if (outer_ctx)
6451	    omp_notice_variable (outer_ctx, decl, true);
6452	  if (check_non_private
6453	      && region_type == ORT_WORKSHARE
6454	      && omp_check_private (ctx, decl, false))
6455	    {
6456	      error ("%s variable %qE is private in outer context",
6457		     check_non_private, DECL_NAME (decl));
6458	      remove = true;
6459	    }
6460	  break;
6461
6462	case OMP_CLAUSE_FINAL:
6463	case OMP_CLAUSE_IF:
6464	  OMP_CLAUSE_OPERAND (c, 0)
6465	    = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6466	  /* Fall through.  */
6467
6468	case OMP_CLAUSE_SCHEDULE:
6469	case OMP_CLAUSE_NUM_THREADS:
6470	case OMP_CLAUSE_NUM_TEAMS:
6471	case OMP_CLAUSE_THREAD_LIMIT:
6472	case OMP_CLAUSE_DIST_SCHEDULE:
6473	case OMP_CLAUSE_DEVICE:
6474	case OMP_CLAUSE__CILK_FOR_COUNT_:
6475	case OMP_CLAUSE_ASYNC:
6476	case OMP_CLAUSE_WAIT:
6477	case OMP_CLAUSE_NUM_GANGS:
6478	case OMP_CLAUSE_NUM_WORKERS:
6479	case OMP_CLAUSE_VECTOR_LENGTH:
6480	case OMP_CLAUSE_GANG:
6481	case OMP_CLAUSE_WORKER:
6482	case OMP_CLAUSE_VECTOR:
6483	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6484			     is_gimple_val, fb_rvalue) == GS_ERROR)
6485	    remove = true;
6486	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_GANG
6487	      && gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
6488				is_gimple_val, fb_rvalue) == GS_ERROR)
6489	    remove = true;
6490	  break;
6491
6492	case OMP_CLAUSE_DEVICE_RESIDENT:
6493	case OMP_CLAUSE_USE_DEVICE:
6494	case OMP_CLAUSE_INDEPENDENT:
6495	  remove = true;
6496	  break;
6497
6498	case OMP_CLAUSE_NOWAIT:
6499	case OMP_CLAUSE_ORDERED:
6500	case OMP_CLAUSE_UNTIED:
6501	case OMP_CLAUSE_COLLAPSE:
6502	case OMP_CLAUSE_AUTO:
6503	case OMP_CLAUSE_SEQ:
6504	case OMP_CLAUSE_MERGEABLE:
6505	case OMP_CLAUSE_PROC_BIND:
6506	case OMP_CLAUSE_SAFELEN:
6507	  break;
6508
6509	case OMP_CLAUSE_ALIGNED:
6510	  decl = OMP_CLAUSE_DECL (c);
6511	  if (error_operand_p (decl))
6512	    {
6513	      remove = true;
6514	      break;
6515	    }
6516	  if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
6517			     is_gimple_val, fb_rvalue) == GS_ERROR)
6518	    {
6519	      remove = true;
6520	      break;
6521	    }
6522	  if (!is_global_var (decl)
6523	      && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6524	    omp_add_variable (ctx, decl, GOVD_ALIGNED);
6525	  break;
6526
6527	case OMP_CLAUSE_DEFAULT:
6528	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6529	  break;
6530
6531	default:
6532	  gcc_unreachable ();
6533	}
6534
6535      if (remove)
6536	*list_p = OMP_CLAUSE_CHAIN (c);
6537      else
6538	list_p = &OMP_CLAUSE_CHAIN (c);
6539    }
6540
6541  gimplify_omp_ctxp = ctx;
6542}
6543
6544struct gimplify_adjust_omp_clauses_data
6545{
6546  tree *list_p;
6547  gimple_seq *pre_p;
6548};
6549
6550/* For all variables that were not actually used within the context,
6551   remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
6552
6553static int
6554gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6555{
6556  tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
6557  gimple_seq *pre_p
6558    = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
6559  tree decl = (tree) n->key;
6560  unsigned flags = n->value;
6561  enum omp_clause_code code;
6562  tree clause;
6563  bool private_debug;
6564
6565  if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6566    return 0;
6567  if ((flags & GOVD_SEEN) == 0)
6568    return 0;
6569  if (flags & GOVD_DEBUG_PRIVATE)
6570    {
6571      gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6572      private_debug = true;
6573    }
6574  else if (flags & GOVD_MAP)
6575    private_debug = false;
6576  else
6577    private_debug
6578      = lang_hooks.decls.omp_private_debug_clause (decl,
6579						   !!(flags & GOVD_SHARED));
6580  if (private_debug)
6581    code = OMP_CLAUSE_PRIVATE;
6582  else if (flags & GOVD_MAP)
6583    code = OMP_CLAUSE_MAP;
6584  else if (flags & GOVD_SHARED)
6585    {
6586      if (is_global_var (decl))
6587	{
6588	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6589	  while (ctx != NULL)
6590	    {
6591	      splay_tree_node on
6592		= splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6593	      if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6594				      | GOVD_PRIVATE | GOVD_REDUCTION
6595				      | GOVD_LINEAR | GOVD_MAP)) != 0)
6596		break;
6597	      ctx = ctx->outer_context;
6598	    }
6599	  if (ctx == NULL)
6600	    return 0;
6601	}
6602      code = OMP_CLAUSE_SHARED;
6603    }
6604  else if (flags & GOVD_PRIVATE)
6605    code = OMP_CLAUSE_PRIVATE;
6606  else if (flags & GOVD_FIRSTPRIVATE)
6607    code = OMP_CLAUSE_FIRSTPRIVATE;
6608  else if (flags & GOVD_LASTPRIVATE)
6609    code = OMP_CLAUSE_LASTPRIVATE;
6610  else if (flags & GOVD_ALIGNED)
6611    return 0;
6612  else
6613    gcc_unreachable ();
6614
6615  clause = build_omp_clause (input_location, code);
6616  OMP_CLAUSE_DECL (clause) = decl;
6617  OMP_CLAUSE_CHAIN (clause) = *list_p;
6618  if (private_debug)
6619    OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6620  else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6621    OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6622  else if (code == OMP_CLAUSE_MAP)
6623    {
6624      OMP_CLAUSE_SET_MAP_KIND (clause,
6625			       flags & GOVD_MAP_TO_ONLY
6626			       ? GOMP_MAP_TO
6627			       : GOMP_MAP_TOFROM);
6628      if (DECL_SIZE (decl)
6629	  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6630	{
6631	  tree decl2 = DECL_VALUE_EXPR (decl);
6632	  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6633	  decl2 = TREE_OPERAND (decl2, 0);
6634	  gcc_assert (DECL_P (decl2));
6635	  tree mem = build_simple_mem_ref (decl2);
6636	  OMP_CLAUSE_DECL (clause) = mem;
6637	  OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6638	  if (gimplify_omp_ctxp->outer_context)
6639	    {
6640	      struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6641	      omp_notice_variable (ctx, decl2, true);
6642	      omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6643	    }
6644	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6645				      OMP_CLAUSE_MAP);
6646	  OMP_CLAUSE_DECL (nc) = decl;
6647	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
6648	  OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
6649	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6650	  OMP_CLAUSE_CHAIN (clause) = nc;
6651	}
6652      else
6653	OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
6654    }
6655  if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
6656    {
6657      tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
6658      OMP_CLAUSE_DECL (nc) = decl;
6659      OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
6660      OMP_CLAUSE_CHAIN (nc) = *list_p;
6661      OMP_CLAUSE_CHAIN (clause) = nc;
6662      struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6663      gimplify_omp_ctxp = ctx->outer_context;
6664      lang_hooks.decls.omp_finish_clause (nc, pre_p);
6665      gimplify_omp_ctxp = ctx;
6666    }
6667  *list_p = clause;
6668  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6669  gimplify_omp_ctxp = ctx->outer_context;
6670  lang_hooks.decls.omp_finish_clause (clause, pre_p);
6671  gimplify_omp_ctxp = ctx;
6672  return 0;
6673}
6674
6675static void
6676gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
6677{
6678  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6679  tree c, decl;
6680
6681  while ((c = *list_p) != NULL)
6682    {
6683      splay_tree_node n;
6684      bool remove = false;
6685
6686      switch (OMP_CLAUSE_CODE (c))
6687	{
6688	case OMP_CLAUSE_PRIVATE:
6689	case OMP_CLAUSE_SHARED:
6690	case OMP_CLAUSE_FIRSTPRIVATE:
6691	case OMP_CLAUSE_LINEAR:
6692	  decl = OMP_CLAUSE_DECL (c);
6693	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6694	  remove = !(n->value & GOVD_SEEN);
6695	  if (! remove)
6696	    {
6697	      bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6698	      if ((n->value & GOVD_DEBUG_PRIVATE)
6699		  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6700		{
6701		  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6702			      || ((n->value & GOVD_DATA_SHARE_CLASS)
6703				  == GOVD_PRIVATE));
6704		  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6705		  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6706		}
6707	    }
6708	  break;
6709
6710	case OMP_CLAUSE_LASTPRIVATE:
6711	  /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6712	     accurately reflect the presence of a FIRSTPRIVATE clause.  */
6713	  decl = OMP_CLAUSE_DECL (c);
6714	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6715	  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6716	    = (n->value & GOVD_FIRSTPRIVATE) != 0;
6717	  if (omp_no_lastprivate (ctx))
6718	    {
6719	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6720		remove = true;
6721	      else
6722		OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
6723	    }
6724	  break;
6725
6726	case OMP_CLAUSE_ALIGNED:
6727	  decl = OMP_CLAUSE_DECL (c);
6728	  if (!is_global_var (decl))
6729	    {
6730	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6731	      remove = n == NULL || !(n->value & GOVD_SEEN);
6732	      if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6733		{
6734		  struct gimplify_omp_ctx *octx;
6735		  if (n != NULL
6736		      && (n->value & (GOVD_DATA_SHARE_CLASS
6737				      & ~GOVD_FIRSTPRIVATE)))
6738		    remove = true;
6739		  else
6740		    for (octx = ctx->outer_context; octx;
6741			 octx = octx->outer_context)
6742		      {
6743			n = splay_tree_lookup (octx->variables,
6744					       (splay_tree_key) decl);
6745			if (n == NULL)
6746			  continue;
6747			if (n->value & GOVD_LOCAL)
6748			  break;
6749			/* We have to avoid assigning a shared variable
6750			   to itself when trying to add
6751			   __builtin_assume_aligned.  */
6752			if (n->value & GOVD_SHARED)
6753			  {
6754			    remove = true;
6755			    break;
6756			  }
6757		      }
6758		}
6759	    }
6760	  else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6761	    {
6762	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6763	      if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6764		remove = true;
6765	    }
6766	  break;
6767
6768	case OMP_CLAUSE_MAP:
6769	  decl = OMP_CLAUSE_DECL (c);
6770	  if (!DECL_P (decl))
6771	    break;
6772	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6773	  if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6774	    remove = true;
6775	  else if (DECL_SIZE (decl)
6776		   && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6777		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER)
6778	    {
6779	      /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
6780		 for these, TREE_CODE (DECL_SIZE (decl)) will always be
6781		 INTEGER_CST.  */
6782	      gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
6783
6784	      tree decl2 = DECL_VALUE_EXPR (decl);
6785	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6786	      decl2 = TREE_OPERAND (decl2, 0);
6787	      gcc_assert (DECL_P (decl2));
6788	      tree mem = build_simple_mem_ref (decl2);
6789	      OMP_CLAUSE_DECL (c) = mem;
6790	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6791	      if (ctx->outer_context)
6792		{
6793		  omp_notice_variable (ctx->outer_context, decl2, true);
6794		  omp_notice_variable (ctx->outer_context,
6795				       OMP_CLAUSE_SIZE (c), true);
6796		}
6797	      tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6798					  OMP_CLAUSE_MAP);
6799	      OMP_CLAUSE_DECL (nc) = decl;
6800	      OMP_CLAUSE_SIZE (nc) = size_zero_node;
6801	      OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
6802	      OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6803	      OMP_CLAUSE_CHAIN (c) = nc;
6804	      c = nc;
6805	    }
6806	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6807	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6808	  break;
6809
6810	case OMP_CLAUSE_TO:
6811	case OMP_CLAUSE_FROM:
6812	case OMP_CLAUSE__CACHE_:
6813	  decl = OMP_CLAUSE_DECL (c);
6814	  if (!DECL_P (decl))
6815	    break;
6816	  if (DECL_SIZE (decl)
6817	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6818	    {
6819	      tree decl2 = DECL_VALUE_EXPR (decl);
6820	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6821	      decl2 = TREE_OPERAND (decl2, 0);
6822	      gcc_assert (DECL_P (decl2));
6823	      tree mem = build_simple_mem_ref (decl2);
6824	      OMP_CLAUSE_DECL (c) = mem;
6825	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6826	      if (ctx->outer_context)
6827		{
6828		  omp_notice_variable (ctx->outer_context, decl2, true);
6829		  omp_notice_variable (ctx->outer_context,
6830				       OMP_CLAUSE_SIZE (c), true);
6831		}
6832	    }
6833	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6834	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6835	  break;
6836
6837	case OMP_CLAUSE_REDUCTION:
6838	case OMP_CLAUSE_COPYIN:
6839	case OMP_CLAUSE_COPYPRIVATE:
6840	case OMP_CLAUSE_IF:
6841	case OMP_CLAUSE_NUM_THREADS:
6842	case OMP_CLAUSE_NUM_TEAMS:
6843	case OMP_CLAUSE_THREAD_LIMIT:
6844	case OMP_CLAUSE_DIST_SCHEDULE:
6845	case OMP_CLAUSE_DEVICE:
6846	case OMP_CLAUSE_SCHEDULE:
6847	case OMP_CLAUSE_NOWAIT:
6848	case OMP_CLAUSE_ORDERED:
6849	case OMP_CLAUSE_DEFAULT:
6850	case OMP_CLAUSE_UNTIED:
6851	case OMP_CLAUSE_COLLAPSE:
6852	case OMP_CLAUSE_FINAL:
6853	case OMP_CLAUSE_MERGEABLE:
6854	case OMP_CLAUSE_PROC_BIND:
6855	case OMP_CLAUSE_SAFELEN:
6856	case OMP_CLAUSE_DEPEND:
6857	case OMP_CLAUSE__CILK_FOR_COUNT_:
6858	case OMP_CLAUSE_ASYNC:
6859	case OMP_CLAUSE_WAIT:
6860	case OMP_CLAUSE_DEVICE_RESIDENT:
6861	case OMP_CLAUSE_USE_DEVICE:
6862	case OMP_CLAUSE_INDEPENDENT:
6863	case OMP_CLAUSE_NUM_GANGS:
6864	case OMP_CLAUSE_NUM_WORKERS:
6865	case OMP_CLAUSE_VECTOR_LENGTH:
6866	case OMP_CLAUSE_GANG:
6867	case OMP_CLAUSE_WORKER:
6868	case OMP_CLAUSE_VECTOR:
6869	case OMP_CLAUSE_AUTO:
6870	case OMP_CLAUSE_SEQ:
6871	  break;
6872
6873	default:
6874	  gcc_unreachable ();
6875	}
6876
6877      if (remove)
6878	*list_p = OMP_CLAUSE_CHAIN (c);
6879      else
6880	list_p = &OMP_CLAUSE_CHAIN (c);
6881    }
6882
6883  /* Add in any implicit data sharing.  */
6884  struct gimplify_adjust_omp_clauses_data data;
6885  data.list_p = list_p;
6886  data.pre_p = pre_p;
6887  splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
6888
6889  gimplify_omp_ctxp = ctx->outer_context;
6890  delete_omp_context (ctx);
6891}
6892
6893/* Gimplify OACC_CACHE.  */
6894
6895static void
6896gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
6897{
6898  tree expr = *expr_p;
6899
6900  gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6901  gimplify_adjust_omp_clauses (pre_p, &OACC_CACHE_CLAUSES (expr));
6902
6903  /* TODO: Do something sensible with this information.  */
6904
6905  *expr_p = NULL_TREE;
6906}
6907
6908/* Gimplify the contents of an OMP_PARALLEL statement.  This involves
6909   gimplification of the body, as well as scanning the body for used
6910   variables.  We need to do this scan now, because variable-sized
6911   decls will be decomposed during gimplification.  */
6912
6913static void
6914gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6915{
6916  tree expr = *expr_p;
6917  gimple g;
6918  gimple_seq body = NULL;
6919
6920  gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6921			     OMP_PARALLEL_COMBINED (expr)
6922			     ? ORT_COMBINED_PARALLEL
6923			     : ORT_PARALLEL);
6924
6925  push_gimplify_context ();
6926
6927  g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6928  if (gimple_code (g) == GIMPLE_BIND)
6929    pop_gimplify_context (g);
6930  else
6931    pop_gimplify_context (NULL);
6932
6933  gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
6934
6935  g = gimple_build_omp_parallel (body,
6936				 OMP_PARALLEL_CLAUSES (expr),
6937				 NULL_TREE, NULL_TREE);
6938  if (OMP_PARALLEL_COMBINED (expr))
6939    gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6940  gimplify_seq_add_stmt (pre_p, g);
6941  *expr_p = NULL_TREE;
6942}
6943
6944/* Gimplify the contents of an OMP_TASK statement.  This involves
6945   gimplification of the body, as well as scanning the body for used
6946   variables.  We need to do this scan now, because variable-sized
6947   decls will be decomposed during gimplification.  */
6948
6949static void
6950gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6951{
6952  tree expr = *expr_p;
6953  gimple g;
6954  gimple_seq body = NULL;
6955
6956  gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6957			     find_omp_clause (OMP_TASK_CLAUSES (expr),
6958					      OMP_CLAUSE_UNTIED)
6959			     ? ORT_UNTIED_TASK : ORT_TASK);
6960
6961  push_gimplify_context ();
6962
6963  g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6964  if (gimple_code (g) == GIMPLE_BIND)
6965    pop_gimplify_context (g);
6966  else
6967    pop_gimplify_context (NULL);
6968
6969  gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
6970
6971  g = gimple_build_omp_task (body,
6972			     OMP_TASK_CLAUSES (expr),
6973			     NULL_TREE, NULL_TREE,
6974			     NULL_TREE, NULL_TREE, NULL_TREE);
6975  gimplify_seq_add_stmt (pre_p, g);
6976  *expr_p = NULL_TREE;
6977}
6978
6979/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6980   with non-NULL OMP_FOR_INIT.  */
6981
6982static tree
6983find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6984{
6985  *walk_subtrees = 0;
6986  switch (TREE_CODE (*tp))
6987    {
6988    case OMP_FOR:
6989      *walk_subtrees = 1;
6990      /* FALLTHRU */
6991    case OMP_SIMD:
6992      if (OMP_FOR_INIT (*tp) != NULL_TREE)
6993	return *tp;
6994      break;
6995    case BIND_EXPR:
6996    case STATEMENT_LIST:
6997    case OMP_PARALLEL:
6998      *walk_subtrees = 1;
6999      break;
7000    default:
7001      break;
7002    }
7003  return NULL_TREE;
7004}
7005
7006/* Gimplify the gross structure of an OMP_FOR statement.  */
7007
7008static enum gimplify_status
7009gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
7010{
7011  tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
7012  enum gimplify_status ret = GS_ALL_DONE;
7013  enum gimplify_status tret;
7014  gomp_for *gfor;
7015  gimple_seq for_body, for_pre_body;
7016  int i;
7017  bool simd;
7018  bitmap has_decl_expr = NULL;
7019
7020  orig_for_stmt = for_stmt = *expr_p;
7021
7022  switch (TREE_CODE (for_stmt))
7023    {
7024    case OMP_FOR:
7025    case CILK_FOR:
7026    case OMP_DISTRIBUTE:
7027    case OACC_LOOP:
7028      simd = false;
7029      break;
7030    case OMP_SIMD:
7031    case CILK_SIMD:
7032      simd = true;
7033      break;
7034    default:
7035      gcc_unreachable ();
7036    }
7037
7038  /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
7039     clause for the IV.  */
7040  if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7041    {
7042      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
7043      gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7044      decl = TREE_OPERAND (t, 0);
7045      for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7046	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7047	    && OMP_CLAUSE_DECL (c) == decl)
7048	  {
7049	    OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
7050	    break;
7051	  }
7052    }
7053
7054  if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
7055    {
7056      gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
7057      inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
7058				  find_combined_omp_for, NULL, NULL);
7059      if (inner_for_stmt == NULL_TREE)
7060	{
7061	  gcc_assert (seen_error ());
7062	  *expr_p = NULL_TREE;
7063	  return GS_ERROR;
7064	}
7065    }
7066
7067  gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
7068			     simd ? ORT_SIMD : ORT_WORKSHARE);
7069  if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
7070    gimplify_omp_ctxp->distribute = true;
7071
7072  /* Handle OMP_FOR_INIT.  */
7073  for_pre_body = NULL;
7074  if (simd && OMP_FOR_PRE_BODY (for_stmt))
7075    {
7076      has_decl_expr = BITMAP_ALLOC (NULL);
7077      if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
7078	  && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
7079	     == VAR_DECL)
7080	{
7081	  t = OMP_FOR_PRE_BODY (for_stmt);
7082	  bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
7083	}
7084      else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
7085	{
7086	  tree_stmt_iterator si;
7087	  for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
7088	       tsi_next (&si))
7089	    {
7090	      t = tsi_stmt (si);
7091	      if (TREE_CODE (t) == DECL_EXPR
7092		  && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
7093		bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
7094	    }
7095	}
7096    }
7097  gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
7098  OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
7099
7100  if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
7101    {
7102      for_stmt = inner_for_stmt;
7103      gimplify_omp_ctxp->combined_loop = true;
7104    }
7105
7106  for_body = NULL;
7107  gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7108	      == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
7109  gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7110	      == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
7111  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7112    {
7113      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7114      gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7115      decl = TREE_OPERAND (t, 0);
7116      gcc_assert (DECL_P (decl));
7117      gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
7118		  || POINTER_TYPE_P (TREE_TYPE (decl)));
7119
7120      /* Make sure the iteration variable is private.  */
7121      tree c = NULL_TREE;
7122      tree c2 = NULL_TREE;
7123      if (orig_for_stmt != for_stmt)
7124	/* Do this only on innermost construct for combined ones.  */;
7125      else if (simd)
7126	{
7127	  splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
7128						 (splay_tree_key)decl);
7129	  omp_is_private (gimplify_omp_ctxp, decl,
7130			  1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7131			       != 1));
7132	  if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7133	    omp_notice_variable (gimplify_omp_ctxp, decl, true);
7134	  else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7135	    {
7136	      c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
7137	      OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
7138	      unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
7139	      if ((has_decl_expr
7140		   && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
7141		  || omp_no_lastprivate (gimplify_omp_ctxp))
7142		{
7143		  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
7144		  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7145		}
7146	      struct gimplify_omp_ctx *outer
7147		= gimplify_omp_ctxp->outer_context;
7148	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7149		{
7150		  if (outer->region_type == ORT_WORKSHARE
7151		      && outer->combined_loop)
7152		    {
7153		      n = splay_tree_lookup (outer->variables,
7154					     (splay_tree_key)decl);
7155		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
7156			{
7157			  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
7158			  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7159			}
7160		    }
7161		}
7162
7163	      OMP_CLAUSE_DECL (c) = decl;
7164	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
7165	      OMP_FOR_CLAUSES (for_stmt) = c;
7166	      omp_add_variable (gimplify_omp_ctxp, decl, flags);
7167	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7168		{
7169		  if (outer->region_type == ORT_WORKSHARE
7170		      && outer->combined_loop)
7171		    {
7172		      if (outer->outer_context
7173			  && (outer->outer_context->region_type
7174			      == ORT_COMBINED_PARALLEL))
7175			outer = outer->outer_context;
7176		      else if (omp_check_private (outer, decl, false))
7177			outer = NULL;
7178		    }
7179		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
7180		    outer = NULL;
7181		  if (outer)
7182		    {
7183		      n = splay_tree_lookup (outer->variables,
7184					     (splay_tree_key)decl);
7185		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
7186			{
7187			  omp_add_variable (outer, decl,
7188					    GOVD_LASTPRIVATE | GOVD_SEEN);
7189			  if (outer->outer_context)
7190			    omp_notice_variable (outer->outer_context, decl,
7191						 true);
7192			}
7193		    }
7194		}
7195	    }
7196	  else
7197	    {
7198	      bool lastprivate
7199		= (!has_decl_expr
7200		   || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
7201		  && !omp_no_lastprivate (gimplify_omp_ctxp);
7202	      struct gimplify_omp_ctx *outer
7203		= gimplify_omp_ctxp->outer_context;
7204	      if (outer && lastprivate)
7205		{
7206		  if (outer->region_type == ORT_WORKSHARE
7207		      && outer->combined_loop)
7208		    {
7209		      n = splay_tree_lookup (outer->variables,
7210					     (splay_tree_key)decl);
7211		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
7212			{
7213			  lastprivate = false;
7214			  outer = NULL;
7215			}
7216		      else if (outer->outer_context
7217			       && (outer->outer_context->region_type
7218				   == ORT_COMBINED_PARALLEL))
7219			outer = outer->outer_context;
7220		      else if (omp_check_private (outer, decl, false))
7221			outer = NULL;
7222		    }
7223		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
7224		    outer = NULL;
7225		  if (outer)
7226		    {
7227		      n = splay_tree_lookup (outer->variables,
7228					     (splay_tree_key)decl);
7229		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
7230			{
7231			  omp_add_variable (outer, decl,
7232					    GOVD_LASTPRIVATE | GOVD_SEEN);
7233			  if (outer->outer_context)
7234			    omp_notice_variable (outer->outer_context, decl,
7235						 true);
7236			}
7237		    }
7238		}
7239
7240	      c = build_omp_clause (input_location,
7241				    lastprivate ? OMP_CLAUSE_LASTPRIVATE
7242						: OMP_CLAUSE_PRIVATE);
7243	      OMP_CLAUSE_DECL (c) = decl;
7244	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
7245	      OMP_FOR_CLAUSES (for_stmt) = c;
7246	      omp_add_variable (gimplify_omp_ctxp, decl,
7247				(lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
7248				| GOVD_EXPLICIT | GOVD_SEEN);
7249	      c = NULL_TREE;
7250	    }
7251	}
7252      else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
7253	omp_notice_variable (gimplify_omp_ctxp, decl, true);
7254      else
7255	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
7256
7257      /* If DECL is not a gimple register, create a temporary variable to act
7258	 as an iteration counter.  This is valid, since DECL cannot be
7259	 modified in the body of the loop.  Similarly for any iteration vars
7260	 in simd with collapse > 1 where the iterator vars must be
7261	 lastprivate.  */
7262      if (orig_for_stmt != for_stmt)
7263	var = decl;
7264      else if (!is_gimple_reg (decl)
7265	       || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
7266	{
7267	  var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7268	  TREE_OPERAND (t, 0) = var;
7269
7270	  gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
7271
7272	  if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7273	    {
7274	      c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
7275	      OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
7276	      OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
7277	      OMP_CLAUSE_DECL (c2) = var;
7278	      OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
7279	      OMP_FOR_CLAUSES (for_stmt) = c2;
7280	      omp_add_variable (gimplify_omp_ctxp, var,
7281				GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
7282	      if (c == NULL_TREE)
7283		{
7284		  c = c2;
7285		  c2 = NULL_TREE;
7286		}
7287	    }
7288	  else
7289	    omp_add_variable (gimplify_omp_ctxp, var,
7290			      GOVD_PRIVATE | GOVD_SEEN);
7291	}
7292      else
7293	var = decl;
7294
7295      tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7296			    is_gimple_val, fb_rvalue);
7297      ret = MIN (ret, tret);
7298      if (ret == GS_ERROR)
7299	return ret;
7300
7301      /* Handle OMP_FOR_COND.  */
7302      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7303      gcc_assert (COMPARISON_CLASS_P (t));
7304      gcc_assert (TREE_OPERAND (t, 0) == decl);
7305
7306      tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7307			    is_gimple_val, fb_rvalue);
7308      ret = MIN (ret, tret);
7309
7310      /* Handle OMP_FOR_INCR.  */
7311      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7312      switch (TREE_CODE (t))
7313	{
7314	case PREINCREMENT_EXPR:
7315	case POSTINCREMENT_EXPR:
7316	  {
7317	    tree decl = TREE_OPERAND (t, 0);
7318	    /* c_omp_for_incr_canonicalize_ptr() should have been
7319	       called to massage things appropriately.  */
7320	    gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
7321
7322	    if (orig_for_stmt != for_stmt)
7323	      break;
7324	    t = build_int_cst (TREE_TYPE (decl), 1);
7325	    if (c)
7326	      OMP_CLAUSE_LINEAR_STEP (c) = t;
7327	    t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
7328	    t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
7329	    TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
7330	    break;
7331	  }
7332
7333	case PREDECREMENT_EXPR:
7334	case POSTDECREMENT_EXPR:
7335	  /* c_omp_for_incr_canonicalize_ptr() should have been
7336	     called to massage things appropriately.  */
7337	  gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
7338	  if (orig_for_stmt != for_stmt)
7339	    break;
7340	  t = build_int_cst (TREE_TYPE (decl), -1);
7341	  if (c)
7342	    OMP_CLAUSE_LINEAR_STEP (c) = t;
7343	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
7344	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
7345	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
7346	  break;
7347
7348	case MODIFY_EXPR:
7349	  gcc_assert (TREE_OPERAND (t, 0) == decl);
7350	  TREE_OPERAND (t, 0) = var;
7351
7352	  t = TREE_OPERAND (t, 1);
7353	  switch (TREE_CODE (t))
7354	    {
7355	    case PLUS_EXPR:
7356	      if (TREE_OPERAND (t, 1) == decl)
7357		{
7358		  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
7359		  TREE_OPERAND (t, 0) = var;
7360		  break;
7361		}
7362
7363	      /* Fallthru.  */
7364	    case MINUS_EXPR:
7365	    case POINTER_PLUS_EXPR:
7366	      gcc_assert (TREE_OPERAND (t, 0) == decl);
7367	      TREE_OPERAND (t, 0) = var;
7368	      break;
7369	    default:
7370	      gcc_unreachable ();
7371	    }
7372
7373	  tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7374				is_gimple_val, fb_rvalue);
7375	  ret = MIN (ret, tret);
7376	  if (c)
7377	    {
7378	      tree step = TREE_OPERAND (t, 1);
7379	      tree stept = TREE_TYPE (decl);
7380	      if (POINTER_TYPE_P (stept))
7381		stept = sizetype;
7382	      step = fold_convert (stept, step);
7383	      if (TREE_CODE (t) == MINUS_EXPR)
7384		step = fold_build1 (NEGATE_EXPR, stept, step);
7385	      OMP_CLAUSE_LINEAR_STEP (c) = step;
7386	      if (step != TREE_OPERAND (t, 1))
7387		{
7388		  tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
7389					&for_pre_body, NULL,
7390					is_gimple_val, fb_rvalue);
7391		  ret = MIN (ret, tret);
7392		}
7393	    }
7394	  break;
7395
7396	default:
7397	  gcc_unreachable ();
7398	}
7399
7400      if (c2)
7401	{
7402	  gcc_assert (c);
7403	  OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
7404	}
7405
7406      if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7407	  && orig_for_stmt == for_stmt)
7408	{
7409	  for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
7410	    if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7411		  && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7412		 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7413		     && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
7414		     && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
7415		&& OMP_CLAUSE_DECL (c) == decl)
7416	      {
7417		t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7418		gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7419		gcc_assert (TREE_OPERAND (t, 0) == var);
7420		t = TREE_OPERAND (t, 1);
7421		gcc_assert (TREE_CODE (t) == PLUS_EXPR
7422			    || TREE_CODE (t) == MINUS_EXPR
7423			    || TREE_CODE (t) == POINTER_PLUS_EXPR);
7424		gcc_assert (TREE_OPERAND (t, 0) == var);
7425		t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7426			    TREE_OPERAND (t, 1));
7427		gimple_seq *seq;
7428		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
7429		  seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
7430		else
7431		  seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
7432		gimplify_assign (decl, t, seq);
7433	    }
7434	}
7435    }
7436
7437  BITMAP_FREE (has_decl_expr);
7438
7439  gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
7440
7441  if (orig_for_stmt != for_stmt)
7442    for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7443      {
7444	t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7445	decl = TREE_OPERAND (t, 0);
7446	var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7447	omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7448	TREE_OPERAND (t, 0) = var;
7449	t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7450	TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7451	TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7452      }
7453
7454  gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
7455
7456  int kind;
7457  switch (TREE_CODE (orig_for_stmt))
7458    {
7459    case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7460    case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
7461    case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
7462    case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
7463    case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
7464    case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
7465    default:
7466      gcc_unreachable ();
7467    }
7468  gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
7469			       TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7470			       for_pre_body);
7471  if (orig_for_stmt != for_stmt)
7472    gimple_omp_for_set_combined_p (gfor, true);
7473  if (gimplify_omp_ctxp
7474      && (gimplify_omp_ctxp->combined_loop
7475	  || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7476	      && gimplify_omp_ctxp->outer_context
7477	      && gimplify_omp_ctxp->outer_context->combined_loop)))
7478    {
7479      gimple_omp_for_set_combined_into_p (gfor, true);
7480      if (gimplify_omp_ctxp->combined_loop)
7481	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7482      else
7483	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7484    }
7485
7486  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7487    {
7488      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7489      gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7490      gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7491      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7492      gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7493      gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7494      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7495      gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7496    }
7497
7498  gimplify_seq_add_stmt (pre_p, gfor);
7499  if (ret != GS_ALL_DONE)
7500    return GS_ERROR;
7501  *expr_p = NULL_TREE;
7502  return GS_ALL_DONE;
7503}
7504
7505/* Gimplify the gross structure of several OMP constructs.  */
7506
7507static void
7508gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7509{
7510  tree expr = *expr_p;
7511  gimple stmt;
7512  gimple_seq body = NULL;
7513  enum omp_region_type ort;
7514
7515  switch (TREE_CODE (expr))
7516    {
7517    case OMP_SECTIONS:
7518    case OMP_SINGLE:
7519      ort = ORT_WORKSHARE;
7520      break;
7521    case OACC_KERNELS:
7522    case OACC_PARALLEL:
7523    case OMP_TARGET:
7524      ort = ORT_TARGET;
7525      break;
7526    case OACC_DATA:
7527    case OMP_TARGET_DATA:
7528      ort = ORT_TARGET_DATA;
7529      break;
7530    case OMP_TEAMS:
7531      ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
7532      break;
7533    default:
7534      gcc_unreachable ();
7535    }
7536  gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7537  if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7538    {
7539      push_gimplify_context ();
7540      gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7541      if (gimple_code (g) == GIMPLE_BIND)
7542	pop_gimplify_context (g);
7543      else
7544	pop_gimplify_context (NULL);
7545      if (ort == ORT_TARGET_DATA)
7546	{
7547	  enum built_in_function end_ix;
7548	  switch (TREE_CODE (expr))
7549	    {
7550	    case OACC_DATA:
7551	      end_ix = BUILT_IN_GOACC_DATA_END;
7552	      break;
7553	    case OMP_TARGET_DATA:
7554	      end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
7555	      break;
7556	    default:
7557	      gcc_unreachable ();
7558	    }
7559	  tree fn = builtin_decl_explicit (end_ix);
7560	  g = gimple_build_call (fn, 0);
7561	  gimple_seq cleanup = NULL;
7562	  gimple_seq_add_stmt (&cleanup, g);
7563	  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7564	  body = NULL;
7565	  gimple_seq_add_stmt (&body, g);
7566	}
7567    }
7568  else
7569    gimplify_and_add (OMP_BODY (expr), &body);
7570  gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
7571
7572  switch (TREE_CODE (expr))
7573    {
7574    case OACC_DATA:
7575      stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
7576				      OMP_CLAUSES (expr));
7577      break;
7578    case OACC_KERNELS:
7579      stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
7580				      OMP_CLAUSES (expr));
7581      break;
7582    case OACC_PARALLEL:
7583      stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
7584				      OMP_CLAUSES (expr));
7585      break;
7586    case OMP_SECTIONS:
7587      stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7588      break;
7589    case OMP_SINGLE:
7590      stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7591      break;
7592    case OMP_TARGET:
7593      stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7594				      OMP_CLAUSES (expr));
7595      break;
7596    case OMP_TARGET_DATA:
7597      stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7598				      OMP_CLAUSES (expr));
7599      break;
7600    case OMP_TEAMS:
7601      stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7602      break;
7603    default:
7604      gcc_unreachable ();
7605    }
7606
7607  gimplify_seq_add_stmt (pre_p, stmt);
7608  *expr_p = NULL_TREE;
7609}
7610
7611/* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
7612   target update constructs.  */
7613
7614static void
7615gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7616{
7617  tree expr = *expr_p, clauses;
7618  int kind;
7619  gomp_target *stmt;
7620
7621  switch (TREE_CODE (expr))
7622    {
7623    case OACC_ENTER_DATA:
7624      clauses = OACC_ENTER_DATA_CLAUSES (expr);
7625      kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
7626      break;
7627    case OACC_EXIT_DATA:
7628      clauses = OACC_EXIT_DATA_CLAUSES (expr);
7629      kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
7630      break;
7631    case OACC_UPDATE:
7632      clauses = OACC_UPDATE_CLAUSES (expr);
7633      kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
7634      break;
7635    case OMP_TARGET_UPDATE:
7636      clauses = OMP_TARGET_UPDATE_CLAUSES (expr);
7637      kind = GF_OMP_TARGET_KIND_UPDATE;
7638      break;
7639    default:
7640      gcc_unreachable ();
7641    }
7642  gimplify_scan_omp_clauses (&clauses, pre_p, ORT_WORKSHARE);
7643  gimplify_adjust_omp_clauses (pre_p, &clauses);
7644  stmt = gimple_build_omp_target (NULL, kind, clauses);
7645
7646  gimplify_seq_add_stmt (pre_p, stmt);
7647  *expr_p = NULL_TREE;
7648}
7649
7650/* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
7651   stabilized the lhs of the atomic operation as *ADDR.  Return true if
7652   EXPR is this stabilized form.  */
7653
7654static bool
7655goa_lhs_expr_p (tree expr, tree addr)
7656{
7657  /* Also include casts to other type variants.  The C front end is fond
7658     of adding these for e.g. volatile variables.  This is like
7659     STRIP_TYPE_NOPS but includes the main variant lookup.  */
7660  STRIP_USELESS_TYPE_CONVERSION (expr);
7661
7662  if (TREE_CODE (expr) == INDIRECT_REF)
7663    {
7664      expr = TREE_OPERAND (expr, 0);
7665      while (expr != addr
7666	     && (CONVERT_EXPR_P (expr)
7667		 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7668	     && TREE_CODE (expr) == TREE_CODE (addr)
7669	     && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7670	{
7671	  expr = TREE_OPERAND (expr, 0);
7672	  addr = TREE_OPERAND (addr, 0);
7673	}
7674      if (expr == addr)
7675	return true;
7676      return (TREE_CODE (addr) == ADDR_EXPR
7677	      && TREE_CODE (expr) == ADDR_EXPR
7678	      && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7679    }
7680  if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7681    return true;
7682  return false;
7683}
7684
7685/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR.  If an
7686   expression does not involve the lhs, evaluate it into a temporary.
7687   Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7688   or -1 if an error was encountered.  */
7689
7690static int
7691goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7692		    tree lhs_var)
7693{
7694  tree expr = *expr_p;
7695  int saw_lhs;
7696
7697  if (goa_lhs_expr_p (expr, lhs_addr))
7698    {
7699      *expr_p = lhs_var;
7700      return 1;
7701    }
7702  if (is_gimple_val (expr))
7703    return 0;
7704
7705  saw_lhs = 0;
7706  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7707    {
7708    case tcc_binary:
7709    case tcc_comparison:
7710      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7711				     lhs_var);
7712    case tcc_unary:
7713      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7714				     lhs_var);
7715      break;
7716    case tcc_expression:
7717      switch (TREE_CODE (expr))
7718	{
7719	case TRUTH_ANDIF_EXPR:
7720	case TRUTH_ORIF_EXPR:
7721	case TRUTH_AND_EXPR:
7722	case TRUTH_OR_EXPR:
7723	case TRUTH_XOR_EXPR:
7724	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7725					 lhs_addr, lhs_var);
7726	case TRUTH_NOT_EXPR:
7727	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7728					 lhs_addr, lhs_var);
7729	  break;
7730	case COMPOUND_EXPR:
7731	  /* Break out any preevaluations from cp_build_modify_expr.  */
7732	  for (; TREE_CODE (expr) == COMPOUND_EXPR;
7733	       expr = TREE_OPERAND (expr, 1))
7734	    gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7735	  *expr_p = expr;
7736	  return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7737	default:
7738	  break;
7739	}
7740      break;
7741    default:
7742      break;
7743    }
7744
7745  if (saw_lhs == 0)
7746    {
7747      enum gimplify_status gs;
7748      gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7749      if (gs != GS_ALL_DONE)
7750	saw_lhs = -1;
7751    }
7752
7753  return saw_lhs;
7754}
7755
7756/* Gimplify an OMP_ATOMIC statement.  */
7757
7758static enum gimplify_status
7759gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7760{
7761  tree addr = TREE_OPERAND (*expr_p, 0);
7762  tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7763	     ? NULL : TREE_OPERAND (*expr_p, 1);
7764  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7765  tree tmp_load;
7766  gomp_atomic_load *loadstmt;
7767  gomp_atomic_store *storestmt;
7768
7769  tmp_load = create_tmp_reg (type);
7770  if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7771    return GS_ERROR;
7772
7773  if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7774      != GS_ALL_DONE)
7775    return GS_ERROR;
7776
7777  loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7778  gimplify_seq_add_stmt (pre_p, loadstmt);
7779  if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7780      != GS_ALL_DONE)
7781    return GS_ERROR;
7782
7783  if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7784    rhs = tmp_load;
7785  storestmt = gimple_build_omp_atomic_store (rhs);
7786  gimplify_seq_add_stmt (pre_p, storestmt);
7787  if (OMP_ATOMIC_SEQ_CST (*expr_p))
7788    {
7789      gimple_omp_atomic_set_seq_cst (loadstmt);
7790      gimple_omp_atomic_set_seq_cst (storestmt);
7791    }
7792  switch (TREE_CODE (*expr_p))
7793    {
7794    case OMP_ATOMIC_READ:
7795    case OMP_ATOMIC_CAPTURE_OLD:
7796      *expr_p = tmp_load;
7797      gimple_omp_atomic_set_need_value (loadstmt);
7798      break;
7799    case OMP_ATOMIC_CAPTURE_NEW:
7800      *expr_p = rhs;
7801      gimple_omp_atomic_set_need_value (storestmt);
7802      break;
7803    default:
7804      *expr_p = NULL;
7805      break;
7806    }
7807
7808  return GS_ALL_DONE;
7809}
7810
7811/* Gimplify a TRANSACTION_EXPR.  This involves gimplification of the
7812   body, and adding some EH bits.  */
7813
7814static enum gimplify_status
7815gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7816{
7817  tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7818  gimple body_stmt;
7819  gtransaction *trans_stmt;
7820  gimple_seq body = NULL;
7821  int subcode = 0;
7822
7823  /* Wrap the transaction body in a BIND_EXPR so we have a context
7824     where to put decls for OMP.  */
7825  if (TREE_CODE (tbody) != BIND_EXPR)
7826    {
7827      tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7828      TREE_SIDE_EFFECTS (bind) = 1;
7829      SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7830      TRANSACTION_EXPR_BODY (expr) = bind;
7831    }
7832
7833  push_gimplify_context ();
7834  temp = voidify_wrapper_expr (*expr_p, NULL);
7835
7836  body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7837  pop_gimplify_context (body_stmt);
7838
7839  trans_stmt = gimple_build_transaction (body, NULL);
7840  if (TRANSACTION_EXPR_OUTER (expr))
7841    subcode = GTMA_IS_OUTER;
7842  else if (TRANSACTION_EXPR_RELAXED (expr))
7843    subcode = GTMA_IS_RELAXED;
7844  gimple_transaction_set_subcode (trans_stmt, subcode);
7845
7846  gimplify_seq_add_stmt (pre_p, trans_stmt);
7847
7848  if (temp)
7849    {
7850      *expr_p = temp;
7851      return GS_OK;
7852    }
7853
7854  *expr_p = NULL_TREE;
7855  return GS_ALL_DONE;
7856}
7857
7858/* Convert the GENERIC expression tree *EXPR_P to GIMPLE.  If the
7859   expression produces a value to be used as an operand inside a GIMPLE
7860   statement, the value will be stored back in *EXPR_P.  This value will
7861   be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7862   an SSA_NAME.  The corresponding sequence of GIMPLE statements is
7863   emitted in PRE_P and POST_P.
7864
7865   Additionally, this process may overwrite parts of the input
7866   expression during gimplification.  Ideally, it should be
7867   possible to do non-destructive gimplification.
7868
7869   EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
7870      the expression needs to evaluate to a value to be used as
7871      an operand in a GIMPLE statement, this value will be stored in
7872      *EXPR_P on exit.  This happens when the caller specifies one
7873      of fb_lvalue or fb_rvalue fallback flags.
7874
7875   PRE_P will contain the sequence of GIMPLE statements corresponding
7876       to the evaluation of EXPR and all the side-effects that must
7877       be executed before the main expression.  On exit, the last
7878       statement of PRE_P is the core statement being gimplified.  For
7879       instance, when gimplifying 'if (++a)' the last statement in
7880       PRE_P will be 'if (t.1)' where t.1 is the result of
7881       pre-incrementing 'a'.
7882
7883   POST_P will contain the sequence of GIMPLE statements corresponding
7884       to the evaluation of all the side-effects that must be executed
7885       after the main expression.  If this is NULL, the post
7886       side-effects are stored at the end of PRE_P.
7887
7888       The reason why the output is split in two is to handle post
7889       side-effects explicitly.  In some cases, an expression may have
7890       inner and outer post side-effects which need to be emitted in
7891       an order different from the one given by the recursive
7892       traversal.  For instance, for the expression (*p--)++ the post
7893       side-effects of '--' must actually occur *after* the post
7894       side-effects of '++'.  However, gimplification will first visit
7895       the inner expression, so if a separate POST sequence was not
7896       used, the resulting sequence would be:
7897
7898       	    1	t.1 = *p
7899       	    2	p = p - 1
7900       	    3	t.2 = t.1 + 1
7901       	    4	*p = t.2
7902
7903       However, the post-decrement operation in line #2 must not be
7904       evaluated until after the store to *p at line #4, so the
7905       correct sequence should be:
7906
7907       	    1	t.1 = *p
7908       	    2	t.2 = t.1 + 1
7909       	    3	*p = t.2
7910       	    4	p = p - 1
7911
7912       So, by specifying a separate post queue, it is possible
7913       to emit the post side-effects in the correct order.
7914       If POST_P is NULL, an internal queue will be used.  Before
7915       returning to the caller, the sequence POST_P is appended to
7916       the main output sequence PRE_P.
7917
7918   GIMPLE_TEST_F points to a function that takes a tree T and
7919       returns nonzero if T is in the GIMPLE form requested by the
7920       caller.  The GIMPLE predicates are in gimple.c.
7921
7922   FALLBACK tells the function what sort of a temporary we want if
7923       gimplification cannot produce an expression that complies with
7924       GIMPLE_TEST_F.
7925
7926       fb_none means that no temporary should be generated
7927       fb_rvalue means that an rvalue is OK to generate
7928       fb_lvalue means that an lvalue is OK to generate
7929       fb_either means that either is OK, but an lvalue is preferable.
7930       fb_mayfail means that gimplification may fail (in which case
7931       GS_ERROR will be returned)
7932
7933   The return value is either GS_ERROR or GS_ALL_DONE, since this
7934   function iterates until EXPR is completely gimplified or an error
7935   occurs.  */
7936
7937enum gimplify_status
7938gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7939	       bool (*gimple_test_f) (tree), fallback_t fallback)
7940{
7941  tree tmp;
7942  gimple_seq internal_pre = NULL;
7943  gimple_seq internal_post = NULL;
7944  tree save_expr;
7945  bool is_statement;
7946  location_t saved_location;
7947  enum gimplify_status ret;
7948  gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7949
7950  save_expr = *expr_p;
7951  if (save_expr == NULL_TREE)
7952    return GS_ALL_DONE;
7953
7954  /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
7955  is_statement = gimple_test_f == is_gimple_stmt;
7956  if (is_statement)
7957    gcc_assert (pre_p);
7958
7959  /* Consistency checks.  */
7960  if (gimple_test_f == is_gimple_reg)
7961    gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7962  else if (gimple_test_f == is_gimple_val
7963           || gimple_test_f == is_gimple_call_addr
7964           || gimple_test_f == is_gimple_condexpr
7965           || gimple_test_f == is_gimple_mem_rhs
7966           || gimple_test_f == is_gimple_mem_rhs_or_call
7967           || gimple_test_f == is_gimple_reg_rhs
7968           || gimple_test_f == is_gimple_reg_rhs_or_call
7969           || gimple_test_f == is_gimple_asm_val
7970	   || gimple_test_f == is_gimple_mem_ref_addr)
7971    gcc_assert (fallback & fb_rvalue);
7972  else if (gimple_test_f == is_gimple_min_lval
7973	   || gimple_test_f == is_gimple_lvalue)
7974    gcc_assert (fallback & fb_lvalue);
7975  else if (gimple_test_f == is_gimple_addressable)
7976    gcc_assert (fallback & fb_either);
7977  else if (gimple_test_f == is_gimple_stmt)
7978    gcc_assert (fallback == fb_none);
7979  else
7980    {
7981      /* We should have recognized the GIMPLE_TEST_F predicate to
7982	 know what kind of fallback to use in case a temporary is
7983	 needed to hold the value or address of *EXPR_P.  */
7984      gcc_unreachable ();
7985    }
7986
7987  /* We used to check the predicate here and return immediately if it
7988     succeeds.  This is wrong; the design is for gimplification to be
7989     idempotent, and for the predicates to only test for valid forms, not
7990     whether they are fully simplified.  */
7991  if (pre_p == NULL)
7992    pre_p = &internal_pre;
7993
7994  if (post_p == NULL)
7995    post_p = &internal_post;
7996
7997  /* Remember the last statements added to PRE_P and POST_P.  Every
7998     new statement added by the gimplification helpers needs to be
7999     annotated with location information.  To centralize the
8000     responsibility, we remember the last statement that had been
8001     added to both queues before gimplifying *EXPR_P.  If
8002     gimplification produces new statements in PRE_P and POST_P, those
8003     statements will be annotated with the same location information
8004     as *EXPR_P.  */
8005  pre_last_gsi = gsi_last (*pre_p);
8006  post_last_gsi = gsi_last (*post_p);
8007
8008  saved_location = input_location;
8009  if (save_expr != error_mark_node
8010      && EXPR_HAS_LOCATION (*expr_p))
8011    input_location = EXPR_LOCATION (*expr_p);
8012
8013  /* Loop over the specific gimplifiers until the toplevel node
8014     remains the same.  */
8015  do
8016    {
8017      /* Strip away as many useless type conversions as possible
8018	 at the toplevel.  */
8019      STRIP_USELESS_TYPE_CONVERSION (*expr_p);
8020
8021      /* Remember the expr.  */
8022      save_expr = *expr_p;
8023
8024      /* Die, die, die, my darling.  */
8025      if (save_expr == error_mark_node
8026	  || (TREE_TYPE (save_expr)
8027	      && TREE_TYPE (save_expr) == error_mark_node))
8028	{
8029	  ret = GS_ERROR;
8030	  break;
8031	}
8032
8033      /* Do any language-specific gimplification.  */
8034      ret = ((enum gimplify_status)
8035	     lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
8036      if (ret == GS_OK)
8037	{
8038	  if (*expr_p == NULL_TREE)
8039	    break;
8040	  if (*expr_p != save_expr)
8041	    continue;
8042	}
8043      else if (ret != GS_UNHANDLED)
8044	break;
8045
8046      /* Make sure that all the cases set 'ret' appropriately.  */
8047      ret = GS_UNHANDLED;
8048      switch (TREE_CODE (*expr_p))
8049	{
8050	  /* First deal with the special cases.  */
8051
8052	case POSTINCREMENT_EXPR:
8053	case POSTDECREMENT_EXPR:
8054	case PREINCREMENT_EXPR:
8055	case PREDECREMENT_EXPR:
8056	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
8057					fallback != fb_none,
8058					TREE_TYPE (*expr_p));
8059	  break;
8060
8061	case VIEW_CONVERT_EXPR:
8062	  if (is_gimple_reg_type (TREE_TYPE (*expr_p))
8063	      && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
8064	    {
8065	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8066				   post_p, is_gimple_val, fb_rvalue);
8067	      recalculate_side_effects (*expr_p);
8068	      break;
8069	    }
8070	  /* Fallthru.  */
8071
8072	case ARRAY_REF:
8073	case ARRAY_RANGE_REF:
8074	case REALPART_EXPR:
8075	case IMAGPART_EXPR:
8076	case COMPONENT_REF:
8077	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
8078					fallback ? fallback : fb_rvalue);
8079	  break;
8080
8081	case COND_EXPR:
8082	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
8083
8084	  /* C99 code may assign to an array in a structure value of a
8085	     conditional expression, and this has undefined behavior
8086	     only on execution, so create a temporary if an lvalue is
8087	     required.  */
8088	  if (fallback == fb_lvalue)
8089	    {
8090	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8091	      mark_addressable (*expr_p);
8092	      ret = GS_OK;
8093	    }
8094	  break;
8095
8096	case CALL_EXPR:
8097	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
8098
8099	  /* C99 code may assign to an array in a structure returned
8100	     from a function, and this has undefined behavior only on
8101	     execution, so create a temporary if an lvalue is
8102	     required.  */
8103	  if (fallback == fb_lvalue)
8104	    {
8105	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8106	      mark_addressable (*expr_p);
8107	      ret = GS_OK;
8108	    }
8109	  break;
8110
8111	case TREE_LIST:
8112	  gcc_unreachable ();
8113
8114	case COMPOUND_EXPR:
8115	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
8116	  break;
8117
8118	case COMPOUND_LITERAL_EXPR:
8119	  ret = gimplify_compound_literal_expr (expr_p, pre_p,
8120						gimple_test_f, fallback);
8121	  break;
8122
8123	case MODIFY_EXPR:
8124	case INIT_EXPR:
8125	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
8126				      fallback != fb_none);
8127	  break;
8128
8129	case TRUTH_ANDIF_EXPR:
8130	case TRUTH_ORIF_EXPR:
8131	  {
8132	    /* Preserve the original type of the expression and the
8133	       source location of the outer expression.  */
8134	    tree org_type = TREE_TYPE (*expr_p);
8135	    *expr_p = gimple_boolify (*expr_p);
8136	    *expr_p = build3_loc (input_location, COND_EXPR,
8137				  org_type, *expr_p,
8138				  fold_convert_loc
8139				    (input_location,
8140				     org_type, boolean_true_node),
8141				  fold_convert_loc
8142				    (input_location,
8143				     org_type, boolean_false_node));
8144	    ret = GS_OK;
8145	    break;
8146	  }
8147
8148	case TRUTH_NOT_EXPR:
8149	  {
8150	    tree type = TREE_TYPE (*expr_p);
8151	    /* The parsers are careful to generate TRUTH_NOT_EXPR
8152	       only with operands that are always zero or one.
8153	       We do not fold here but handle the only interesting case
8154	       manually, as fold may re-introduce the TRUTH_NOT_EXPR.  */
8155	    *expr_p = gimple_boolify (*expr_p);
8156	    if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
8157	      *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
8158				    TREE_TYPE (*expr_p),
8159				    TREE_OPERAND (*expr_p, 0));
8160	    else
8161	      *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
8162				    TREE_TYPE (*expr_p),
8163				    TREE_OPERAND (*expr_p, 0),
8164				    build_int_cst (TREE_TYPE (*expr_p), 1));
8165	    if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
8166	      *expr_p = fold_convert_loc (input_location, type, *expr_p);
8167	    ret = GS_OK;
8168	    break;
8169	  }
8170
8171	case ADDR_EXPR:
8172	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
8173	  break;
8174
8175	case ANNOTATE_EXPR:
8176	  {
8177	    tree cond = TREE_OPERAND (*expr_p, 0);
8178	    tree kind = TREE_OPERAND (*expr_p, 1);
8179	    tree type = TREE_TYPE (cond);
8180	    if (!INTEGRAL_TYPE_P (type))
8181	      {
8182		*expr_p = cond;
8183		ret = GS_OK;
8184		break;
8185	      }
8186	    tree tmp = create_tmp_var (type);
8187	    gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
8188	    gcall *call
8189	      = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
8190	    gimple_call_set_lhs (call, tmp);
8191	    gimplify_seq_add_stmt (pre_p, call);
8192	    *expr_p = tmp;
8193	    ret = GS_ALL_DONE;
8194	    break;
8195	  }
8196
8197	case VA_ARG_EXPR:
8198	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
8199	  break;
8200
8201	CASE_CONVERT:
8202	  if (IS_EMPTY_STMT (*expr_p))
8203	    {
8204	      ret = GS_ALL_DONE;
8205	      break;
8206	    }
8207
8208	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
8209	      || fallback == fb_none)
8210	    {
8211	      /* Just strip a conversion to void (or in void context) and
8212		 try again.  */
8213	      *expr_p = TREE_OPERAND (*expr_p, 0);
8214	      ret = GS_OK;
8215	      break;
8216	    }
8217
8218	  ret = gimplify_conversion (expr_p);
8219	  if (ret == GS_ERROR)
8220	    break;
8221	  if (*expr_p != save_expr)
8222	    break;
8223	  /* FALLTHRU */
8224
8225	case FIX_TRUNC_EXPR:
8226	  /* unary_expr: ... | '(' cast ')' val | ...  */
8227	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8228			       is_gimple_val, fb_rvalue);
8229	  recalculate_side_effects (*expr_p);
8230	  break;
8231
8232	case INDIRECT_REF:
8233	  {
8234	    bool volatilep = TREE_THIS_VOLATILE (*expr_p);
8235	    bool notrap = TREE_THIS_NOTRAP (*expr_p);
8236	    tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
8237
8238	    *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
8239	    if (*expr_p != save_expr)
8240	      {
8241		ret = GS_OK;
8242		break;
8243	      }
8244
8245	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8246				 is_gimple_reg, fb_rvalue);
8247	    if (ret == GS_ERROR)
8248	      break;
8249
8250	    recalculate_side_effects (*expr_p);
8251	    *expr_p = fold_build2_loc (input_location, MEM_REF,
8252				       TREE_TYPE (*expr_p),
8253				       TREE_OPERAND (*expr_p, 0),
8254				       build_int_cst (saved_ptr_type, 0));
8255	    TREE_THIS_VOLATILE (*expr_p) = volatilep;
8256	    TREE_THIS_NOTRAP (*expr_p) = notrap;
8257	    ret = GS_OK;
8258	    break;
8259	  }
8260
8261	/* We arrive here through the various re-gimplifcation paths.  */
8262	case MEM_REF:
8263	  /* First try re-folding the whole thing.  */
8264	  tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
8265			     TREE_OPERAND (*expr_p, 0),
8266			     TREE_OPERAND (*expr_p, 1));
8267	  if (tmp)
8268	    {
8269	      *expr_p = tmp;
8270	      recalculate_side_effects (*expr_p);
8271	      ret = GS_OK;
8272	      break;
8273	    }
8274	  /* Avoid re-gimplifying the address operand if it is already
8275	     in suitable form.  Re-gimplifying would mark the address
8276	     operand addressable.  Always gimplify when not in SSA form
8277	     as we still may have to gimplify decls with value-exprs.  */
8278	  if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
8279	      || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
8280	    {
8281	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8282				   is_gimple_mem_ref_addr, fb_rvalue);
8283	      if (ret == GS_ERROR)
8284		break;
8285	    }
8286	  recalculate_side_effects (*expr_p);
8287	  ret = GS_ALL_DONE;
8288	  break;
8289
8290	/* Constants need not be gimplified.  */
8291	case INTEGER_CST:
8292	case REAL_CST:
8293	case FIXED_CST:
8294	case STRING_CST:
8295	case COMPLEX_CST:
8296	case VECTOR_CST:
8297	  /* Drop the overflow flag on constants, we do not want
8298	     that in the GIMPLE IL.  */
8299	  if (TREE_OVERFLOW_P (*expr_p))
8300	    *expr_p = drop_tree_overflow (*expr_p);
8301	  ret = GS_ALL_DONE;
8302	  break;
8303
8304	case CONST_DECL:
8305	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
8306	     CONST_DECL node.  Otherwise the decl is replaceable by its
8307	     value.  */
8308	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
8309	  if (fallback & fb_lvalue)
8310	    ret = GS_ALL_DONE;
8311	  else
8312	    {
8313	      *expr_p = DECL_INITIAL (*expr_p);
8314	      ret = GS_OK;
8315	    }
8316	  break;
8317
8318	case DECL_EXPR:
8319	  ret = gimplify_decl_expr (expr_p, pre_p);
8320	  break;
8321
8322	case BIND_EXPR:
8323	  ret = gimplify_bind_expr (expr_p, pre_p);
8324	  break;
8325
8326	case LOOP_EXPR:
8327	  ret = gimplify_loop_expr (expr_p, pre_p);
8328	  break;
8329
8330	case SWITCH_EXPR:
8331	  ret = gimplify_switch_expr (expr_p, pre_p);
8332	  break;
8333
8334	case EXIT_EXPR:
8335	  ret = gimplify_exit_expr (expr_p);
8336	  break;
8337
8338	case GOTO_EXPR:
8339	  /* If the target is not LABEL, then it is a computed jump
8340	     and the target needs to be gimplified.  */
8341	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8342	    {
8343	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
8344				   NULL, is_gimple_val, fb_rvalue);
8345	      if (ret == GS_ERROR)
8346		break;
8347	    }
8348	  gimplify_seq_add_stmt (pre_p,
8349			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
8350	  ret = GS_ALL_DONE;
8351	  break;
8352
8353	case PREDICT_EXPR:
8354	  gimplify_seq_add_stmt (pre_p,
8355			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
8356					      PREDICT_EXPR_OUTCOME (*expr_p)));
8357	  ret = GS_ALL_DONE;
8358	  break;
8359
8360	case LABEL_EXPR:
8361	  ret = GS_ALL_DONE;
8362	  gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
8363		      == current_function_decl);
8364	  gimplify_seq_add_stmt (pre_p,
8365			  gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
8366	  break;
8367
8368	case CASE_LABEL_EXPR:
8369	  ret = gimplify_case_label_expr (expr_p, pre_p);
8370	  break;
8371
8372	case RETURN_EXPR:
8373	  ret = gimplify_return_expr (*expr_p, pre_p);
8374	  break;
8375
8376	case CONSTRUCTOR:
8377	  /* Don't reduce this in place; let gimplify_init_constructor work its
8378	     magic.  Buf if we're just elaborating this for side effects, just
8379	     gimplify any element that has side-effects.  */
8380	  if (fallback == fb_none)
8381	    {
8382	      unsigned HOST_WIDE_INT ix;
8383	      tree val;
8384	      tree temp = NULL_TREE;
8385	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
8386		if (TREE_SIDE_EFFECTS (val))
8387		  append_to_statement_list (val, &temp);
8388
8389	      *expr_p = temp;
8390	      ret = temp ? GS_OK : GS_ALL_DONE;
8391	    }
8392	  /* C99 code may assign to an array in a constructed
8393	     structure or union, and this has undefined behavior only
8394	     on execution, so create a temporary if an lvalue is
8395	     required.  */
8396	  else if (fallback == fb_lvalue)
8397	    {
8398	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8399	      mark_addressable (*expr_p);
8400	      ret = GS_OK;
8401	    }
8402	  else
8403	    ret = GS_ALL_DONE;
8404	  break;
8405
8406	  /* The following are special cases that are not handled by the
8407	     original GIMPLE grammar.  */
8408
8409	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
8410	     eliminated.  */
8411	case SAVE_EXPR:
8412	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
8413	  break;
8414
8415	case BIT_FIELD_REF:
8416	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8417			       post_p, is_gimple_lvalue, fb_either);
8418	  recalculate_side_effects (*expr_p);
8419	  break;
8420
8421	case TARGET_MEM_REF:
8422	  {
8423	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
8424
8425	    if (TMR_BASE (*expr_p))
8426	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
8427				  post_p, is_gimple_mem_ref_addr, fb_either);
8428	    if (TMR_INDEX (*expr_p))
8429	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
8430				  post_p, is_gimple_val, fb_rvalue);
8431	    if (TMR_INDEX2 (*expr_p))
8432	      r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
8433				  post_p, is_gimple_val, fb_rvalue);
8434	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
8435	    ret = MIN (r0, r1);
8436	  }
8437	  break;
8438
8439	case NON_LVALUE_EXPR:
8440	  /* This should have been stripped above.  */
8441	  gcc_unreachable ();
8442
8443	case ASM_EXPR:
8444	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
8445	  break;
8446
8447	case TRY_FINALLY_EXPR:
8448	case TRY_CATCH_EXPR:
8449	  {
8450	    gimple_seq eval, cleanup;
8451	    gtry *try_;
8452
8453	    /* Calls to destructors are generated automatically in FINALLY/CATCH
8454	       block. They should have location as UNKNOWN_LOCATION. However,
8455	       gimplify_call_expr will reset these call stmts to input_location
8456	       if it finds stmt's location is unknown. To prevent resetting for
8457	       destructors, we set the input_location to unknown.
8458	       Note that this only affects the destructor calls in FINALLY/CATCH
8459	       block, and will automatically reset to its original value by the
8460	       end of gimplify_expr.  */
8461	    input_location = UNKNOWN_LOCATION;
8462	    eval = cleanup = NULL;
8463	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
8464	    gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
8465	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
8466	    if (gimple_seq_empty_p (cleanup))
8467	      {
8468		gimple_seq_add_seq (pre_p, eval);
8469		ret = GS_ALL_DONE;
8470		break;
8471	      }
8472	    try_ = gimple_build_try (eval, cleanup,
8473				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8474				     ? GIMPLE_TRY_FINALLY
8475				     : GIMPLE_TRY_CATCH);
8476	    if (EXPR_HAS_LOCATION (save_expr))
8477	      gimple_set_location (try_, EXPR_LOCATION (save_expr));
8478	    else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8479	      gimple_set_location (try_, saved_location);
8480	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8481	      gimple_try_set_catch_is_cleanup (try_,
8482					       TRY_CATCH_IS_CLEANUP (*expr_p));
8483	    gimplify_seq_add_stmt (pre_p, try_);
8484	    ret = GS_ALL_DONE;
8485	    break;
8486	  }
8487
8488	case CLEANUP_POINT_EXPR:
8489	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8490	  break;
8491
8492	case TARGET_EXPR:
8493	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
8494	  break;
8495
8496	case CATCH_EXPR:
8497	  {
8498	    gimple c;
8499	    gimple_seq handler = NULL;
8500	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8501	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8502	    gimplify_seq_add_stmt (pre_p, c);
8503	    ret = GS_ALL_DONE;
8504	    break;
8505	  }
8506
8507	case EH_FILTER_EXPR:
8508	  {
8509	    gimple ehf;
8510	    gimple_seq failure = NULL;
8511
8512	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8513	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8514	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8515	    gimplify_seq_add_stmt (pre_p, ehf);
8516	    ret = GS_ALL_DONE;
8517	    break;
8518	  }
8519
8520	case OBJ_TYPE_REF:
8521	  {
8522	    enum gimplify_status r0, r1;
8523	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8524				post_p, is_gimple_val, fb_rvalue);
8525	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8526				post_p, is_gimple_val, fb_rvalue);
8527	    TREE_SIDE_EFFECTS (*expr_p) = 0;
8528	    ret = MIN (r0, r1);
8529	  }
8530	  break;
8531
8532	case LABEL_DECL:
8533	  /* We get here when taking the address of a label.  We mark
8534	     the label as "forced"; meaning it can never be removed and
8535	     it is a potential target for any computed goto.  */
8536	  FORCED_LABEL (*expr_p) = 1;
8537	  ret = GS_ALL_DONE;
8538	  break;
8539
8540	case STATEMENT_LIST:
8541	  ret = gimplify_statement_list (expr_p, pre_p);
8542	  break;
8543
8544	case WITH_SIZE_EXPR:
8545	  {
8546	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8547			   post_p == &internal_post ? NULL : post_p,
8548			   gimple_test_f, fallback);
8549	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8550			   is_gimple_val, fb_rvalue);
8551	    ret = GS_ALL_DONE;
8552	  }
8553	  break;
8554
8555	case VAR_DECL:
8556	case PARM_DECL:
8557	  ret = gimplify_var_or_parm_decl (expr_p);
8558	  break;
8559
8560	case RESULT_DECL:
8561	  /* When within an OMP context, notice uses of variables.  */
8562	  if (gimplify_omp_ctxp)
8563	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8564	  ret = GS_ALL_DONE;
8565	  break;
8566
8567	case SSA_NAME:
8568	  /* Allow callbacks into the gimplifier during optimization.  */
8569	  ret = GS_ALL_DONE;
8570	  break;
8571
8572	case OMP_PARALLEL:
8573	  gimplify_omp_parallel (expr_p, pre_p);
8574	  ret = GS_ALL_DONE;
8575	  break;
8576
8577	case OMP_TASK:
8578	  gimplify_omp_task (expr_p, pre_p);
8579	  ret = GS_ALL_DONE;
8580	  break;
8581
8582	case OMP_FOR:
8583	case OMP_SIMD:
8584	case CILK_SIMD:
8585	case CILK_FOR:
8586	case OMP_DISTRIBUTE:
8587	case OACC_LOOP:
8588	  ret = gimplify_omp_for (expr_p, pre_p);
8589	  break;
8590
8591	case OACC_CACHE:
8592	  gimplify_oacc_cache (expr_p, pre_p);
8593	  ret = GS_ALL_DONE;
8594	  break;
8595
8596	case OACC_HOST_DATA:
8597	case OACC_DECLARE:
8598	  sorry ("directive not yet implemented");
8599	  ret = GS_ALL_DONE;
8600	  break;
8601
8602	case OACC_KERNELS:
8603	  if (OACC_KERNELS_COMBINED (*expr_p))
8604	    sorry ("directive not yet implemented");
8605	  else
8606	    gimplify_omp_workshare (expr_p, pre_p);
8607	  ret = GS_ALL_DONE;
8608	  break;
8609
8610	case OACC_PARALLEL:
8611	  if (OACC_PARALLEL_COMBINED (*expr_p))
8612	    sorry ("directive not yet implemented");
8613	  else
8614	    gimplify_omp_workshare (expr_p, pre_p);
8615	  ret = GS_ALL_DONE;
8616	  break;
8617
8618	case OACC_DATA:
8619	case OMP_SECTIONS:
8620	case OMP_SINGLE:
8621	case OMP_TARGET:
8622	case OMP_TARGET_DATA:
8623	case OMP_TEAMS:
8624	  gimplify_omp_workshare (expr_p, pre_p);
8625	  ret = GS_ALL_DONE;
8626	  break;
8627
8628	case OACC_ENTER_DATA:
8629	case OACC_EXIT_DATA:
8630	case OACC_UPDATE:
8631	case OMP_TARGET_UPDATE:
8632	  gimplify_omp_target_update (expr_p, pre_p);
8633	  ret = GS_ALL_DONE;
8634	  break;
8635
8636	case OMP_SECTION:
8637	case OMP_MASTER:
8638	case OMP_TASKGROUP:
8639	case OMP_ORDERED:
8640	case OMP_CRITICAL:
8641	  {
8642	    gimple_seq body = NULL;
8643	    gimple g;
8644
8645	    gimplify_and_add (OMP_BODY (*expr_p), &body);
8646	    switch (TREE_CODE (*expr_p))
8647	      {
8648	      case OMP_SECTION:
8649	        g = gimple_build_omp_section (body);
8650	        break;
8651	      case OMP_MASTER:
8652	        g = gimple_build_omp_master (body);
8653		break;
8654	      case OMP_TASKGROUP:
8655		{
8656		  gimple_seq cleanup = NULL;
8657		  tree fn
8658		    = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8659		  g = gimple_build_call (fn, 0);
8660		  gimple_seq_add_stmt (&cleanup, g);
8661		  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8662		  body = NULL;
8663		  gimple_seq_add_stmt (&body, g);
8664		  g = gimple_build_omp_taskgroup (body);
8665		}
8666		break;
8667	      case OMP_ORDERED:
8668		g = gimple_build_omp_ordered (body);
8669		break;
8670	      case OMP_CRITICAL:
8671		g = gimple_build_omp_critical (body,
8672		    			       OMP_CRITICAL_NAME (*expr_p));
8673		break;
8674	      default:
8675		gcc_unreachable ();
8676	      }
8677	    gimplify_seq_add_stmt (pre_p, g);
8678	    ret = GS_ALL_DONE;
8679	    break;
8680	  }
8681
8682	case OMP_ATOMIC:
8683	case OMP_ATOMIC_READ:
8684	case OMP_ATOMIC_CAPTURE_OLD:
8685	case OMP_ATOMIC_CAPTURE_NEW:
8686	  ret = gimplify_omp_atomic (expr_p, pre_p);
8687	  break;
8688
8689	case TRANSACTION_EXPR:
8690	  ret = gimplify_transaction (expr_p, pre_p);
8691	  break;
8692
8693	case TRUTH_AND_EXPR:
8694	case TRUTH_OR_EXPR:
8695	case TRUTH_XOR_EXPR:
8696	  {
8697	    tree orig_type = TREE_TYPE (*expr_p);
8698	    tree new_type, xop0, xop1;
8699	    *expr_p = gimple_boolify (*expr_p);
8700	    new_type = TREE_TYPE (*expr_p);
8701	    if (!useless_type_conversion_p (orig_type, new_type))
8702	      {
8703		*expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8704		ret = GS_OK;
8705		break;
8706	      }
8707
8708	  /* Boolified binary truth expressions are semantically equivalent
8709	     to bitwise binary expressions.  Canonicalize them to the
8710	     bitwise variant.  */
8711	    switch (TREE_CODE (*expr_p))
8712	      {
8713	      case TRUTH_AND_EXPR:
8714		TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8715		break;
8716	      case TRUTH_OR_EXPR:
8717		TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8718		break;
8719	      case TRUTH_XOR_EXPR:
8720		TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8721		break;
8722	      default:
8723		break;
8724	      }
8725	    /* Now make sure that operands have compatible type to
8726	       expression's new_type.  */
8727	    xop0 = TREE_OPERAND (*expr_p, 0);
8728	    xop1 = TREE_OPERAND (*expr_p, 1);
8729	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8730	      TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8731							    new_type,
8732	      						    xop0);
8733	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8734	      TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8735							    new_type,
8736	      						    xop1);
8737	    /* Continue classified as tcc_binary.  */
8738	    goto expr_2;
8739	  }
8740
8741	case FMA_EXPR:
8742	case VEC_COND_EXPR:
8743	case VEC_PERM_EXPR:
8744	  /* Classified as tcc_expression.  */
8745	  goto expr_3;
8746
8747	case POINTER_PLUS_EXPR:
8748	  {
8749	    enum gimplify_status r0, r1;
8750	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8751				post_p, is_gimple_val, fb_rvalue);
8752	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8753				post_p, is_gimple_val, fb_rvalue);
8754	    recalculate_side_effects (*expr_p);
8755	    ret = MIN (r0, r1);
8756	    break;
8757	  }
8758
8759	case CILK_SYNC_STMT:
8760	  {
8761	    if (!fn_contains_cilk_spawn_p (cfun))
8762	      {
8763		error_at (EXPR_LOCATION (*expr_p),
8764			  "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8765		ret = GS_ERROR;
8766	      }
8767	    else
8768	      {
8769		gimplify_cilk_sync (expr_p, pre_p);
8770		ret = GS_ALL_DONE;
8771	      }
8772	    break;
8773	  }
8774
8775	default:
8776	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8777	    {
8778	    case tcc_comparison:
8779	      /* Handle comparison of objects of non scalar mode aggregates
8780	     	 with a call to memcmp.  It would be nice to only have to do
8781	     	 this for variable-sized objects, but then we'd have to allow
8782	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
8783	     	 that's too complex.
8784
8785		 Compare scalar mode aggregates as scalar mode values.  Using
8786		 memcmp for them would be very inefficient at best, and is
8787		 plain wrong if bitfields are involved.  */
8788		{
8789		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8790
8791		  /* Vector comparisons need no boolification.  */
8792		  if (TREE_CODE (type) == VECTOR_TYPE)
8793		    goto expr_2;
8794		  else if (!AGGREGATE_TYPE_P (type))
8795		    {
8796		      tree org_type = TREE_TYPE (*expr_p);
8797		      *expr_p = gimple_boolify (*expr_p);
8798		      if (!useless_type_conversion_p (org_type,
8799						      TREE_TYPE (*expr_p)))
8800			{
8801			  *expr_p = fold_convert_loc (input_location,
8802						      org_type, *expr_p);
8803			  ret = GS_OK;
8804			}
8805		      else
8806			goto expr_2;
8807		    }
8808		  else if (TYPE_MODE (type) != BLKmode)
8809		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8810		  else
8811		    ret = gimplify_variable_sized_compare (expr_p);
8812
8813		  break;
8814		}
8815
8816	    /* If *EXPR_P does not need to be special-cased, handle it
8817	       according to its class.  */
8818	    case tcc_unary:
8819	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8820				   post_p, is_gimple_val, fb_rvalue);
8821	      break;
8822
8823	    case tcc_binary:
8824	    expr_2:
8825	      {
8826		enum gimplify_status r0, r1;
8827
8828		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8829		                    post_p, is_gimple_val, fb_rvalue);
8830		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8831				    post_p, is_gimple_val, fb_rvalue);
8832
8833		ret = MIN (r0, r1);
8834		break;
8835	      }
8836
8837	    expr_3:
8838	      {
8839		enum gimplify_status r0, r1, r2;
8840
8841		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8842		                    post_p, is_gimple_val, fb_rvalue);
8843		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8844				    post_p, is_gimple_val, fb_rvalue);
8845		r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8846				    post_p, is_gimple_val, fb_rvalue);
8847
8848		ret = MIN (MIN (r0, r1), r2);
8849		break;
8850	      }
8851
8852	    case tcc_declaration:
8853	    case tcc_constant:
8854	      ret = GS_ALL_DONE;
8855	      goto dont_recalculate;
8856
8857	    default:
8858	      gcc_unreachable ();
8859	    }
8860
8861	  recalculate_side_effects (*expr_p);
8862
8863	dont_recalculate:
8864	  break;
8865	}
8866
8867      gcc_assert (*expr_p || ret != GS_OK);
8868    }
8869  while (ret == GS_OK);
8870
8871  /* If we encountered an error_mark somewhere nested inside, either
8872     stub out the statement or propagate the error back out.  */
8873  if (ret == GS_ERROR)
8874    {
8875      if (is_statement)
8876	*expr_p = NULL;
8877      goto out;
8878    }
8879
8880  /* This was only valid as a return value from the langhook, which
8881     we handled.  Make sure it doesn't escape from any other context.  */
8882  gcc_assert (ret != GS_UNHANDLED);
8883
8884  if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8885    {
8886      /* We aren't looking for a value, and we don't have a valid
8887	 statement.  If it doesn't have side-effects, throw it away.  */
8888      if (!TREE_SIDE_EFFECTS (*expr_p))
8889	*expr_p = NULL;
8890      else if (!TREE_THIS_VOLATILE (*expr_p))
8891	{
8892	  /* This is probably a _REF that contains something nested that
8893	     has side effects.  Recurse through the operands to find it.  */
8894	  enum tree_code code = TREE_CODE (*expr_p);
8895
8896	  switch (code)
8897	    {
8898	    case COMPONENT_REF:
8899	    case REALPART_EXPR:
8900	    case IMAGPART_EXPR:
8901	    case VIEW_CONVERT_EXPR:
8902	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8903			     gimple_test_f, fallback);
8904	      break;
8905
8906	    case ARRAY_REF:
8907	    case ARRAY_RANGE_REF:
8908	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8909			     gimple_test_f, fallback);
8910	      gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8911			     gimple_test_f, fallback);
8912	      break;
8913
8914	    default:
8915	       /* Anything else with side-effects must be converted to
8916		  a valid statement before we get here.  */
8917	      gcc_unreachable ();
8918	    }
8919
8920	  *expr_p = NULL;
8921	}
8922      else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8923	       && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8924	{
8925	  /* Historically, the compiler has treated a bare reference
8926	     to a non-BLKmode volatile lvalue as forcing a load.  */
8927	  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8928
8929	  /* Normally, we do not want to create a temporary for a
8930	     TREE_ADDRESSABLE type because such a type should not be
8931	     copied by bitwise-assignment.  However, we make an
8932	     exception here, as all we are doing here is ensuring that
8933	     we read the bytes that make up the type.  We use
8934	     create_tmp_var_raw because create_tmp_var will abort when
8935	     given a TREE_ADDRESSABLE type.  */
8936	  tree tmp = create_tmp_var_raw (type, "vol");
8937	  gimple_add_tmp_var (tmp);
8938	  gimplify_assign (tmp, *expr_p, pre_p);
8939	  *expr_p = NULL;
8940	}
8941      else
8942	/* We can't do anything useful with a volatile reference to
8943	   an incomplete type, so just throw it away.  Likewise for
8944	   a BLKmode type, since any implicit inner load should
8945	   already have been turned into an explicit one by the
8946	   gimplification process.  */
8947	*expr_p = NULL;
8948    }
8949
8950  /* If we are gimplifying at the statement level, we're done.  Tack
8951     everything together and return.  */
8952  if (fallback == fb_none || is_statement)
8953    {
8954      /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8955         it out for GC to reclaim it.  */
8956      *expr_p = NULL_TREE;
8957
8958      if (!gimple_seq_empty_p (internal_pre)
8959	  || !gimple_seq_empty_p (internal_post))
8960	{
8961	  gimplify_seq_add_seq (&internal_pre, internal_post);
8962	  gimplify_seq_add_seq (pre_p, internal_pre);
8963	}
8964
8965      /* The result of gimplifying *EXPR_P is going to be the last few
8966	 statements in *PRE_P and *POST_P.  Add location information
8967	 to all the statements that were added by the gimplification
8968	 helpers.  */
8969      if (!gimple_seq_empty_p (*pre_p))
8970	annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8971
8972      if (!gimple_seq_empty_p (*post_p))
8973	annotate_all_with_location_after (*post_p, post_last_gsi,
8974					  input_location);
8975
8976      goto out;
8977    }
8978
8979#ifdef ENABLE_GIMPLE_CHECKING
8980  if (*expr_p)
8981    {
8982      enum tree_code code = TREE_CODE (*expr_p);
8983      /* These expressions should already be in gimple IR form.  */
8984      gcc_assert (code != MODIFY_EXPR
8985		  && code != ASM_EXPR
8986		  && code != BIND_EXPR
8987		  && code != CATCH_EXPR
8988		  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8989		  && code != EH_FILTER_EXPR
8990		  && code != GOTO_EXPR
8991		  && code != LABEL_EXPR
8992		  && code != LOOP_EXPR
8993		  && code != SWITCH_EXPR
8994		  && code != TRY_FINALLY_EXPR
8995		  && code != OACC_PARALLEL
8996		  && code != OACC_KERNELS
8997		  && code != OACC_DATA
8998		  && code != OACC_HOST_DATA
8999		  && code != OACC_DECLARE
9000		  && code != OACC_UPDATE
9001		  && code != OACC_ENTER_DATA
9002		  && code != OACC_EXIT_DATA
9003		  && code != OACC_CACHE
9004		  && code != OMP_CRITICAL
9005		  && code != OMP_FOR
9006		  && code != OACC_LOOP
9007		  && code != OMP_MASTER
9008		  && code != OMP_TASKGROUP
9009		  && code != OMP_ORDERED
9010		  && code != OMP_PARALLEL
9011		  && code != OMP_SECTIONS
9012		  && code != OMP_SECTION
9013		  && code != OMP_SINGLE);
9014    }
9015#endif
9016
9017  /* Otherwise we're gimplifying a subexpression, so the resulting
9018     value is interesting.  If it's a valid operand that matches
9019     GIMPLE_TEST_F, we're done. Unless we are handling some
9020     post-effects internally; if that's the case, we need to copy into
9021     a temporary before adding the post-effects to POST_P.  */
9022  if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
9023    goto out;
9024
9025  /* Otherwise, we need to create a new temporary for the gimplified
9026     expression.  */
9027
9028  /* We can't return an lvalue if we have an internal postqueue.  The
9029     object the lvalue refers to would (probably) be modified by the
9030     postqueue; we need to copy the value out first, which means an
9031     rvalue.  */
9032  if ((fallback & fb_lvalue)
9033      && gimple_seq_empty_p (internal_post)
9034      && is_gimple_addressable (*expr_p))
9035    {
9036      /* An lvalue will do.  Take the address of the expression, store it
9037	 in a temporary, and replace the expression with an INDIRECT_REF of
9038	 that temporary.  */
9039      tmp = build_fold_addr_expr_loc (input_location, *expr_p);
9040      gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
9041      *expr_p = build_simple_mem_ref (tmp);
9042    }
9043  else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
9044    {
9045      /* An rvalue will do.  Assign the gimplified expression into a
9046	 new temporary TMP and replace the original expression with
9047	 TMP.  First, make sure that the expression has a type so that
9048	 it can be assigned into a temporary.  */
9049      gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
9050      *expr_p = get_formal_tmp_var (*expr_p, pre_p);
9051    }
9052  else
9053    {
9054#ifdef ENABLE_GIMPLE_CHECKING
9055      if (!(fallback & fb_mayfail))
9056	{
9057	  fprintf (stderr, "gimplification failed:\n");
9058	  print_generic_expr (stderr, *expr_p, 0);
9059	  debug_tree (*expr_p);
9060	  internal_error ("gimplification failed");
9061	}
9062#endif
9063      gcc_assert (fallback & fb_mayfail);
9064
9065      /* If this is an asm statement, and the user asked for the
9066	 impossible, don't die.  Fail and let gimplify_asm_expr
9067	 issue an error.  */
9068      ret = GS_ERROR;
9069      goto out;
9070    }
9071
9072  /* Make sure the temporary matches our predicate.  */
9073  gcc_assert ((*gimple_test_f) (*expr_p));
9074
9075  if (!gimple_seq_empty_p (internal_post))
9076    {
9077      annotate_all_with_location (internal_post, input_location);
9078      gimplify_seq_add_seq (pre_p, internal_post);
9079    }
9080
9081 out:
9082  input_location = saved_location;
9083  return ret;
9084}
9085
9086/* Look through TYPE for variable-sized objects and gimplify each such
9087   size that we find.  Add to LIST_P any statements generated.  */
9088
9089void
9090gimplify_type_sizes (tree type, gimple_seq *list_p)
9091{
9092  tree field, t;
9093
9094  if (type == NULL || type == error_mark_node)
9095    return;
9096
9097  /* We first do the main variant, then copy into any other variants.  */
9098  type = TYPE_MAIN_VARIANT (type);
9099
9100  /* Avoid infinite recursion.  */
9101  if (TYPE_SIZES_GIMPLIFIED (type))
9102    return;
9103
9104  TYPE_SIZES_GIMPLIFIED (type) = 1;
9105
9106  switch (TREE_CODE (type))
9107    {
9108    case INTEGER_TYPE:
9109    case ENUMERAL_TYPE:
9110    case BOOLEAN_TYPE:
9111    case REAL_TYPE:
9112    case FIXED_POINT_TYPE:
9113      gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
9114      gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
9115
9116      for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
9117	{
9118	  TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
9119	  TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
9120	}
9121      break;
9122
9123    case ARRAY_TYPE:
9124      /* These types may not have declarations, so handle them here.  */
9125      gimplify_type_sizes (TREE_TYPE (type), list_p);
9126      gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
9127      /* Ensure VLA bounds aren't removed, for -O0 they should be variables
9128	 with assigned stack slots, for -O1+ -g they should be tracked
9129	 by VTA.  */
9130      if (!(TYPE_NAME (type)
9131	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
9132	    && DECL_IGNORED_P (TYPE_NAME (type)))
9133	  && TYPE_DOMAIN (type)
9134	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
9135	{
9136	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
9137	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
9138	    DECL_IGNORED_P (t) = 0;
9139	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9140	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
9141	    DECL_IGNORED_P (t) = 0;
9142	}
9143      break;
9144
9145    case RECORD_TYPE:
9146    case UNION_TYPE:
9147    case QUAL_UNION_TYPE:
9148      for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9149	if (TREE_CODE (field) == FIELD_DECL)
9150	  {
9151	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9152	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
9153	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
9154	    gimplify_type_sizes (TREE_TYPE (field), list_p);
9155	  }
9156      break;
9157
9158    case POINTER_TYPE:
9159    case REFERENCE_TYPE:
9160	/* We used to recurse on the pointed-to type here, which turned out to
9161	   be incorrect because its definition might refer to variables not
9162	   yet initialized at this point if a forward declaration is involved.
9163
9164	   It was actually useful for anonymous pointed-to types to ensure
9165	   that the sizes evaluation dominates every possible later use of the
9166	   values.  Restricting to such types here would be safe since there
9167	   is no possible forward declaration around, but would introduce an
9168	   undesirable middle-end semantic to anonymity.  We then defer to
9169	   front-ends the responsibility of ensuring that the sizes are
9170	   evaluated both early and late enough, e.g. by attaching artificial
9171	   type declarations to the tree.  */
9172      break;
9173
9174    default:
9175      break;
9176    }
9177
9178  gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
9179  gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
9180
9181  for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
9182    {
9183      TYPE_SIZE (t) = TYPE_SIZE (type);
9184      TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
9185      TYPE_SIZES_GIMPLIFIED (t) = 1;
9186    }
9187}
9188
9189/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
9190   a size or position, has had all of its SAVE_EXPRs evaluated.
9191   We add any required statements to *STMT_P.  */
9192
9193void
9194gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
9195{
9196  tree expr = *expr_p;
9197
9198  /* We don't do anything if the value isn't there, is constant, or contains
9199     A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
9200     a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
9201     will want to replace it with a new variable, but that will cause problems
9202     if this type is from outside the function.  It's OK to have that here.  */
9203  if (is_gimple_sizepos (expr))
9204    return;
9205
9206  *expr_p = unshare_expr (expr);
9207
9208  gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
9209}
9210
9211/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
9212   containing the sequence of corresponding GIMPLE statements.  If DO_PARMS
9213   is true, also gimplify the parameters.  */
9214
9215gbind *
9216gimplify_body (tree fndecl, bool do_parms)
9217{
9218  location_t saved_location = input_location;
9219  gimple_seq parm_stmts, seq;
9220  gimple outer_stmt;
9221  gbind *outer_bind;
9222  struct cgraph_node *cgn;
9223
9224  timevar_push (TV_TREE_GIMPLIFY);
9225
9226  /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
9227     gimplification.  */
9228  default_rtl_profile ();
9229
9230  gcc_assert (gimplify_ctxp == NULL);
9231  push_gimplify_context ();
9232
9233  if (flag_openacc || flag_openmp)
9234    {
9235      gcc_assert (gimplify_omp_ctxp == NULL);
9236      if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
9237	gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
9238    }
9239
9240  /* Unshare most shared trees in the body and in that of any nested functions.
9241     It would seem we don't have to do this for nested functions because
9242     they are supposed to be output and then the outer function gimplified
9243     first, but the g++ front end doesn't always do it that way.  */
9244  unshare_body (fndecl);
9245  unvisit_body (fndecl);
9246
9247  cgn = cgraph_node::get (fndecl);
9248  if (cgn && cgn->origin)
9249    nonlocal_vlas = new hash_set<tree>;
9250
9251  /* Make sure input_location isn't set to something weird.  */
9252  input_location = DECL_SOURCE_LOCATION (fndecl);
9253
9254  /* Resolve callee-copies.  This has to be done before processing
9255     the body so that DECL_VALUE_EXPR gets processed correctly.  */
9256  parm_stmts = do_parms ? gimplify_parameters () : NULL;
9257
9258  /* Gimplify the function's body.  */
9259  seq = NULL;
9260  gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
9261  outer_stmt = gimple_seq_first_stmt (seq);
9262  if (!outer_stmt)
9263    {
9264      outer_stmt = gimple_build_nop ();
9265      gimplify_seq_add_stmt (&seq, outer_stmt);
9266    }
9267
9268  /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
9269     not the case, wrap everything in a GIMPLE_BIND to make it so.  */
9270  if (gimple_code (outer_stmt) == GIMPLE_BIND
9271      && gimple_seq_first (seq) == gimple_seq_last (seq))
9272    outer_bind = as_a <gbind *> (outer_stmt);
9273  else
9274    outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
9275
9276  DECL_SAVED_TREE (fndecl) = NULL_TREE;
9277
9278  /* If we had callee-copies statements, insert them at the beginning
9279     of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
9280  if (!gimple_seq_empty_p (parm_stmts))
9281    {
9282      tree parm;
9283
9284      gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
9285      gimple_bind_set_body (outer_bind, parm_stmts);
9286
9287      for (parm = DECL_ARGUMENTS (current_function_decl);
9288	   parm; parm = DECL_CHAIN (parm))
9289	if (DECL_HAS_VALUE_EXPR_P (parm))
9290	  {
9291	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
9292	    DECL_IGNORED_P (parm) = 0;
9293	  }
9294    }
9295
9296  if (nonlocal_vlas)
9297    {
9298      if (nonlocal_vla_vars)
9299	{
9300	  /* tree-nested.c may later on call declare_vars (..., true);
9301	     which relies on BLOCK_VARS chain to be the tail of the
9302	     gimple_bind_vars chain.  Ensure we don't violate that
9303	     assumption.  */
9304	  if (gimple_bind_block (outer_bind)
9305	      == DECL_INITIAL (current_function_decl))
9306	    declare_vars (nonlocal_vla_vars, outer_bind, true);
9307	  else
9308	    BLOCK_VARS (DECL_INITIAL (current_function_decl))
9309	      = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
9310			 nonlocal_vla_vars);
9311	  nonlocal_vla_vars = NULL_TREE;
9312	}
9313      delete nonlocal_vlas;
9314      nonlocal_vlas = NULL;
9315    }
9316
9317  if ((flag_openacc || flag_openmp || flag_openmp_simd)
9318      && gimplify_omp_ctxp)
9319    {
9320      delete_omp_context (gimplify_omp_ctxp);
9321      gimplify_omp_ctxp = NULL;
9322    }
9323
9324  pop_gimplify_context (outer_bind);
9325  gcc_assert (gimplify_ctxp == NULL);
9326
9327#ifdef ENABLE_CHECKING
9328  if (!seen_error ())
9329    verify_gimple_in_seq (gimple_bind_body (outer_bind));
9330#endif
9331
9332  timevar_pop (TV_TREE_GIMPLIFY);
9333  input_location = saved_location;
9334
9335  return outer_bind;
9336}
9337
9338typedef char *char_p; /* For DEF_VEC_P.  */
9339
9340/* Return whether we should exclude FNDECL from instrumentation.  */
9341
9342static bool
9343flag_instrument_functions_exclude_p (tree fndecl)
9344{
9345  vec<char_p> *v;
9346
9347  v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
9348  if (v && v->length () > 0)
9349    {
9350      const char *name;
9351      int i;
9352      char *s;
9353
9354      name = lang_hooks.decl_printable_name (fndecl, 0);
9355      FOR_EACH_VEC_ELT (*v, i, s)
9356	if (strstr (name, s) != NULL)
9357	  return true;
9358    }
9359
9360  v = (vec<char_p> *) flag_instrument_functions_exclude_files;
9361  if (v && v->length () > 0)
9362    {
9363      const char *name;
9364      int i;
9365      char *s;
9366
9367      name = DECL_SOURCE_FILE (fndecl);
9368      FOR_EACH_VEC_ELT (*v, i, s)
9369	if (strstr (name, s) != NULL)
9370	  return true;
9371    }
9372
9373  return false;
9374}
9375
9376/* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
9377   node for the function we want to gimplify.
9378
9379   Return the sequence of GIMPLE statements corresponding to the body
9380   of FNDECL.  */
9381
9382void
9383gimplify_function_tree (tree fndecl)
9384{
9385  tree parm, ret;
9386  gimple_seq seq;
9387  gbind *bind;
9388
9389  gcc_assert (!gimple_body (fndecl));
9390
9391  if (DECL_STRUCT_FUNCTION (fndecl))
9392    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
9393  else
9394    push_struct_function (fndecl);
9395
9396  for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
9397    {
9398      /* Preliminarily mark non-addressed complex variables as eligible
9399         for promotion to gimple registers.  We'll transform their uses
9400         as we find them.  */
9401      if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
9402	   || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
9403          && !TREE_THIS_VOLATILE (parm)
9404          && !needs_to_live_in_memory (parm))
9405        DECL_GIMPLE_REG_P (parm) = 1;
9406    }
9407
9408  ret = DECL_RESULT (fndecl);
9409  if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
9410       || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
9411      && !needs_to_live_in_memory (ret))
9412    DECL_GIMPLE_REG_P (ret) = 1;
9413
9414  bind = gimplify_body (fndecl, true);
9415
9416  /* The tree body of the function is no longer needed, replace it
9417     with the new GIMPLE body.  */
9418  seq = NULL;
9419  gimple_seq_add_stmt (&seq, bind);
9420  gimple_set_body (fndecl, seq);
9421
9422  /* If we're instrumenting function entry/exit, then prepend the call to
9423     the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
9424     catch the exit hook.  */
9425  /* ??? Add some way to ignore exceptions for this TFE.  */
9426  if (flag_instrument_function_entry_exit
9427      && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
9428      && !flag_instrument_functions_exclude_p (fndecl))
9429    {
9430      tree x;
9431      gbind *new_bind;
9432      gimple tf;
9433      gimple_seq cleanup = NULL, body = NULL;
9434      tree tmp_var;
9435      gcall *call;
9436
9437      x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
9438      call = gimple_build_call (x, 1, integer_zero_node);
9439      tmp_var = create_tmp_var (ptr_type_node, "return_addr");
9440      gimple_call_set_lhs (call, tmp_var);
9441      gimplify_seq_add_stmt (&cleanup, call);
9442      x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
9443      call = gimple_build_call (x, 2,
9444				build_fold_addr_expr (current_function_decl),
9445				tmp_var);
9446      gimplify_seq_add_stmt (&cleanup, call);
9447      tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
9448
9449      x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
9450      call = gimple_build_call (x, 1, integer_zero_node);
9451      tmp_var = create_tmp_var (ptr_type_node, "return_addr");
9452      gimple_call_set_lhs (call, tmp_var);
9453      gimplify_seq_add_stmt (&body, call);
9454      x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
9455      call = gimple_build_call (x, 2,
9456				build_fold_addr_expr (current_function_decl),
9457				tmp_var);
9458      gimplify_seq_add_stmt (&body, call);
9459      gimplify_seq_add_stmt (&body, tf);
9460      new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
9461      /* Clear the block for BIND, since it is no longer directly inside
9462         the function, but within a try block.  */
9463      gimple_bind_set_block (bind, NULL);
9464
9465      /* Replace the current function body with the body
9466         wrapped in the try/finally TF.  */
9467      seq = NULL;
9468      gimple_seq_add_stmt (&seq, new_bind);
9469      gimple_set_body (fndecl, seq);
9470      bind = new_bind;
9471    }
9472
9473  if ((flag_sanitize & SANITIZE_THREAD) != 0
9474      && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
9475    {
9476      gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
9477      gimple tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
9478      gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
9479      /* Clear the block for BIND, since it is no longer directly inside
9480	 the function, but within a try block.  */
9481      gimple_bind_set_block (bind, NULL);
9482      /* Replace the current function body with the body
9483	 wrapped in the try/finally TF.  */
9484      seq = NULL;
9485      gimple_seq_add_stmt (&seq, new_bind);
9486      gimple_set_body (fndecl, seq);
9487    }
9488
9489  DECL_SAVED_TREE (fndecl) = NULL_TREE;
9490  cfun->curr_properties = PROP_gimple_any;
9491
9492  pop_cfun ();
9493}
9494
9495/* Return a dummy expression of type TYPE in order to keep going after an
9496   error.  */
9497
9498static tree
9499dummy_object (tree type)
9500{
9501  tree t = build_int_cst (build_pointer_type (type), 0);
9502  return build2 (MEM_REF, type, t, t);
9503}
9504
9505/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9506   builtin function, but a very special sort of operator.  */
9507
9508enum gimplify_status
9509gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
9510{
9511  tree promoted_type, have_va_type;
9512  tree valist = TREE_OPERAND (*expr_p, 0);
9513  tree type = TREE_TYPE (*expr_p);
9514  tree t;
9515  location_t loc = EXPR_LOCATION (*expr_p);
9516
9517  /* Verify that valist is of the proper type.  */
9518  have_va_type = TREE_TYPE (valist);
9519  if (have_va_type == error_mark_node)
9520    return GS_ERROR;
9521  have_va_type = targetm.canonical_va_list_type (have_va_type);
9522
9523  if (have_va_type == NULL_TREE)
9524    {
9525      error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9526      return GS_ERROR;
9527    }
9528
9529  /* Generate a diagnostic for requesting data of a type that cannot
9530     be passed through `...' due to type promotion at the call site.  */
9531  if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9532	   != type)
9533    {
9534      static bool gave_help;
9535      bool warned;
9536
9537      /* Unfortunately, this is merely undefined, rather than a constraint
9538	 violation, so we cannot make this an error.  If this call is never
9539	 executed, the program is still strictly conforming.  */
9540      warned = warning_at (loc, 0,
9541	  		   "%qT is promoted to %qT when passed through %<...%>",
9542			   type, promoted_type);
9543      if (!gave_help && warned)
9544	{
9545	  gave_help = true;
9546	  inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9547		  promoted_type, type);
9548	}
9549
9550      /* We can, however, treat "undefined" any way we please.
9551	 Call abort to encourage the user to fix the program.  */
9552      if (warned)
9553	inform (loc, "if this code is reached, the program will abort");
9554      /* Before the abort, allow the evaluation of the va_list
9555	 expression to exit or longjmp.  */
9556      gimplify_and_add (valist, pre_p);
9557      t = build_call_expr_loc (loc,
9558			       builtin_decl_implicit (BUILT_IN_TRAP), 0);
9559      gimplify_and_add (t, pre_p);
9560
9561      /* This is dead code, but go ahead and finish so that the
9562	 mode of the result comes out right.  */
9563      *expr_p = dummy_object (type);
9564      return GS_ALL_DONE;
9565    }
9566  else
9567    {
9568      /* Make it easier for the backends by protecting the valist argument
9569	 from multiple evaluations.  */
9570      if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9571	{
9572	  /* For this case, the backends will be expecting a pointer to
9573	     TREE_TYPE (abi), but it's possible we've
9574	     actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9575	     So fix it.  */
9576	  if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9577	    {
9578	      tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9579	      valist = fold_convert_loc (loc, p1,
9580					 build_fold_addr_expr_loc (loc, valist));
9581	    }
9582
9583	  gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9584	}
9585      else
9586	gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
9587
9588      if (!targetm.gimplify_va_arg_expr)
9589	/* FIXME: Once most targets are converted we should merely
9590	   assert this is non-null.  */
9591	return GS_ALL_DONE;
9592
9593      *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9594      return GS_OK;
9595    }
9596}
9597
9598/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9599
9600   DST/SRC are the destination and source respectively.  You can pass
9601   ungimplified trees in DST or SRC, in which case they will be
9602   converted to a gimple operand if necessary.
9603
9604   This function returns the newly created GIMPLE_ASSIGN tuple.  */
9605
9606gimple
9607gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9608{
9609  tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9610  gimplify_and_add (t, seq_p);
9611  ggc_free (t);
9612  return gimple_seq_last_stmt (*seq_p);
9613}
9614
9615inline hashval_t
9616gimplify_hasher::hash (const value_type *p)
9617{
9618  tree t = p->val;
9619  return iterative_hash_expr (t, 0);
9620}
9621
9622inline bool
9623gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
9624{
9625  tree t1 = p1->val;
9626  tree t2 = p2->val;
9627  enum tree_code code = TREE_CODE (t1);
9628
9629  if (TREE_CODE (t2) != code
9630      || TREE_TYPE (t1) != TREE_TYPE (t2))
9631    return false;
9632
9633  if (!operand_equal_p (t1, t2, 0))
9634    return false;
9635
9636#ifdef ENABLE_CHECKING
9637  /* Only allow them to compare equal if they also hash equal; otherwise
9638     results are nondeterminate, and we fail bootstrap comparison.  */
9639  gcc_assert (hash (p1) == hash (p2));
9640#endif
9641
9642  return true;
9643}
9644