1/* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
2
3   Copyright (C) 2002-2022 Free Software Foundation, Inc.
4   Contributed by Jason Merrill <jason@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "target.h"
26#include "basic-block.h"
27#include "cp-tree.h"
28#include "gimple.h"
29#include "predict.h"
30#include "stor-layout.h"
31#include "tree-iterator.h"
32#include "gimplify.h"
33#include "c-family/c-ubsan.h"
34#include "stringpool.h"
35#include "attribs.h"
36#include "asan.h"
37#include "gcc-rich-location.h"
38#include "memmodel.h"
39#include "tm_p.h"
40#include "output.h"
41#include "file-prefix-map.h"
42#include "cgraph.h"
43#include "omp-general.h"
44#include "opts.h"
45
46struct cp_fold_data
47{
48  hash_set<tree> pset;
49  bool genericize; // called from cp_fold_function?
50
51  cp_fold_data (bool g): genericize (g) {}
52};
53
54/* Forward declarations.  */
55
56static tree cp_genericize_r (tree *, int *, void *);
57static tree cp_fold_r (tree *, int *, void *);
58static void cp_genericize_tree (tree*, bool);
59static tree cp_fold (tree);
60
61/* Genericize a TRY_BLOCK.  */
62
63static void
64genericize_try_block (tree *stmt_p)
65{
66  tree body = TRY_STMTS (*stmt_p);
67  tree cleanup = TRY_HANDLERS (*stmt_p);
68
69  *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
70}
71
72/* Genericize a HANDLER by converting to a CATCH_EXPR.  */
73
74static void
75genericize_catch_block (tree *stmt_p)
76{
77  tree type = HANDLER_TYPE (*stmt_p);
78  tree body = HANDLER_BODY (*stmt_p);
79
80  /* FIXME should the caught type go in TREE_TYPE?  */
81  *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
82}
83
84/* A terser interface for building a representation of an exception
85   specification.  */
86
87static tree
88build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
89{
90  tree t;
91
92  /* FIXME should the allowed types go in TREE_TYPE?  */
93  t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
94  append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
95
96  t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
97  append_to_statement_list (body, &TREE_OPERAND (t, 0));
98
99  return t;
100}
101
102/* Genericize an EH_SPEC_BLOCK by converting it to a
103   TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
104
105static void
106genericize_eh_spec_block (tree *stmt_p)
107{
108  tree body = EH_SPEC_STMTS (*stmt_p);
109  tree allowed = EH_SPEC_RAISES (*stmt_p);
110  tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
111
112  *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
113  suppress_warning (*stmt_p);
114  suppress_warning (TREE_OPERAND (*stmt_p, 1));
115}
116
117/* Return the first non-compound statement in STMT.  */
118
119tree
120first_stmt (tree stmt)
121{
122  switch (TREE_CODE (stmt))
123    {
124    case STATEMENT_LIST:
125      if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
126	return first_stmt (p->stmt);
127      return void_node;
128
129    case BIND_EXPR:
130      return first_stmt (BIND_EXPR_BODY (stmt));
131
132    default:
133      return stmt;
134    }
135}
136
137/* Genericize an IF_STMT by turning it into a COND_EXPR.  */
138
139static void
140genericize_if_stmt (tree *stmt_p)
141{
142  tree stmt, cond, then_, else_;
143  location_t locus = EXPR_LOCATION (*stmt_p);
144
145  stmt = *stmt_p;
146  cond = IF_COND (stmt);
147  then_ = THEN_CLAUSE (stmt);
148  else_ = ELSE_CLAUSE (stmt);
149
150  if (then_ && else_)
151    {
152      tree ft = first_stmt (then_);
153      tree fe = first_stmt (else_);
154      br_predictor pr;
155      if (TREE_CODE (ft) == PREDICT_EXPR
156	  && TREE_CODE (fe) == PREDICT_EXPR
157	  && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
158	  && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
159	{
160	  gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
161	  richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
162	  warning_at (&richloc, OPT_Wattributes,
163		      "both branches of %<if%> statement marked as %qs",
164		      pr == PRED_HOT_LABEL ? "likely" : "unlikely");
165	}
166    }
167
168  if (!then_)
169    then_ = build_empty_stmt (locus);
170  if (!else_)
171    else_ = build_empty_stmt (locus);
172
173  /* consteval if has been verified not to have the then_/else_ blocks
174     entered by gotos/case labels from elsewhere, and as then_ block
175     can contain unfolded immediate function calls, we have to discard
176     the then_ block regardless of whether else_ has side-effects or not.  */
177  if (IF_STMT_CONSTEVAL_P (stmt))
178    {
179      if (block_may_fallthru (then_))
180	stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
181		       void_node, else_);
182      else
183	stmt = else_;
184    }
185  else if (IF_STMT_CONSTEXPR_P (stmt))
186    stmt = integer_nonzerop (cond) ? then_ : else_;
187  /* ??? This optimization doesn't seem to belong here, but removing it
188     causes -Wreturn-type regressions (e.g. 107310).  */
189  else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
190    stmt = then_;
191  else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
192    stmt = else_;
193  else
194    stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
195  protected_set_expr_location_if_unset (stmt, locus);
196  *stmt_p = stmt;
197}
198
199/* Hook into the middle of gimplifying an OMP_FOR node.  */
200
201static enum gimplify_status
202cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
203{
204  tree for_stmt = *expr_p;
205  gimple_seq seq = NULL;
206
207  /* Protect ourselves from recursion.  */
208  if (OMP_FOR_GIMPLIFYING_P (for_stmt))
209    return GS_UNHANDLED;
210  OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
211
212  gimplify_and_add (for_stmt, &seq);
213  gimple_seq_add_seq (pre_p, seq);
214
215  OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
216
217  return GS_ALL_DONE;
218}
219
220/*  Gimplify an EXPR_STMT node.  */
221
222static void
223gimplify_expr_stmt (tree *stmt_p)
224{
225  tree stmt = EXPR_STMT_EXPR (*stmt_p);
226
227  if (stmt == error_mark_node)
228    stmt = NULL;
229
230  /* Gimplification of a statement expression will nullify the
231     statement if all its side effects are moved to *PRE_P and *POST_P.
232
233     In this case we will not want to emit the gimplified statement.
234     However, we may still want to emit a warning, so we do that before
235     gimplification.  */
236  if (stmt && warn_unused_value)
237    {
238      if (!TREE_SIDE_EFFECTS (stmt))
239	{
240	  if (!IS_EMPTY_STMT (stmt)
241	      && !VOID_TYPE_P (TREE_TYPE (stmt))
242	      && !warning_suppressed_p (stmt, OPT_Wunused_value))
243	    warning (OPT_Wunused_value, "statement with no effect");
244	}
245      else
246	warn_if_unused_value (stmt, input_location);
247    }
248
249  if (stmt == NULL_TREE)
250    stmt = alloc_stmt_list ();
251
252  *stmt_p = stmt;
253}
254
255/* Gimplify initialization from an AGGR_INIT_EXPR.  */
256
257static void
258cp_gimplify_init_expr (tree *expr_p)
259{
260  tree from = TREE_OPERAND (*expr_p, 1);
261  tree to = TREE_OPERAND (*expr_p, 0);
262  tree t;
263
264  if (TREE_CODE (from) == TARGET_EXPR)
265    if (tree init = TARGET_EXPR_INITIAL (from))
266      {
267	if (target_expr_needs_replace (from))
268	  {
269	    /* If this was changed by cp_genericize_target_expr, we need to
270	       walk into it to replace uses of the slot.  */
271	    replace_decl (&init, TARGET_EXPR_SLOT (from), to);
272	    *expr_p = init;
273	    return;
274	  }
275	else
276	  from = init;
277      }
278
279  /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
280     inside the TARGET_EXPR.  */
281  for (t = from; t; )
282    {
283      tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
284
285      /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
286	 replace the slot operand with our target.
287
288	 Should we add a target parm to gimplify_expr instead?  No, as in this
289	 case we want to replace the INIT_EXPR.  */
290      if (TREE_CODE (sub) == AGGR_INIT_EXPR
291	  || TREE_CODE (sub) == VEC_INIT_EXPR)
292	{
293	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
294	    AGGR_INIT_EXPR_SLOT (sub) = to;
295	  else
296	    VEC_INIT_EXPR_SLOT (sub) = to;
297	  *expr_p = from;
298
299	  /* The initialization is now a side-effect, so the container can
300	     become void.  */
301	  if (from != sub)
302	    TREE_TYPE (from) = void_type_node;
303	}
304
305      /* Handle aggregate NSDMI.  */
306      replace_placeholders (sub, to);
307
308      if (t == sub)
309	break;
310      else
311	t = TREE_OPERAND (t, 1);
312    }
313
314}
315
316/* Gimplify a MUST_NOT_THROW_EXPR.  */
317
318static enum gimplify_status
319gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
320{
321  tree stmt = *expr_p;
322  tree temp = voidify_wrapper_expr (stmt, NULL);
323  tree body = TREE_OPERAND (stmt, 0);
324  gimple_seq try_ = NULL;
325  gimple_seq catch_ = NULL;
326  gimple *mnt;
327
328  gimplify_and_add (body, &try_);
329  mnt = gimple_build_eh_must_not_throw (terminate_fn);
330  gimple_seq_add_stmt_without_update (&catch_, mnt);
331  mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
332
333  gimple_seq_add_stmt_without_update (pre_p, mnt);
334  if (temp)
335    {
336      *expr_p = temp;
337      return GS_OK;
338    }
339
340  *expr_p = NULL;
341  return GS_ALL_DONE;
342}
343
344/* Return TRUE if an operand (OP) of a given TYPE being copied is
345   really just an empty class copy.
346
347   Check that the operand has a simple form so that TARGET_EXPRs and
348   non-empty CONSTRUCTORs get reduced properly, and we leave the
349   return slot optimization alone because it isn't a copy.  */
350
351bool
352simple_empty_class_p (tree type, tree op, tree_code code)
353{
354  if (TREE_CODE (op) == COMPOUND_EXPR)
355    return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
356  if (SIMPLE_TARGET_EXPR_P (op)
357      && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
358    /* The TARGET_EXPR is itself a simple copy, look through it.  */
359    return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
360
361  if (TREE_CODE (op) == PARM_DECL
362      && TREE_ADDRESSABLE (TREE_TYPE (op)))
363    {
364      tree fn = DECL_CONTEXT (op);
365      if (DECL_THUNK_P (fn)
366	  || lambda_static_thunk_p (fn))
367	/* In a thunk, we pass through invisible reference parms, so this isn't
368	   actually a copy.  */
369	return false;
370    }
371
372  return
373    (TREE_CODE (op) == EMPTY_CLASS_EXPR
374     || code == MODIFY_EXPR
375     || is_gimple_lvalue (op)
376     || INDIRECT_REF_P (op)
377     || (TREE_CODE (op) == CONSTRUCTOR
378	 && CONSTRUCTOR_NELTS (op) == 0)
379     || (TREE_CODE (op) == CALL_EXPR
380	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
381    && !TREE_CLOBBER_P (op)
382    && is_really_empty_class (type, /*ignore_vptr*/true);
383}
384
385/* Returns true if evaluating E as an lvalue has side-effects;
386   specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
387   have side-effects until there is a read or write through it.  */
388
389static bool
390lvalue_has_side_effects (tree e)
391{
392  if (!TREE_SIDE_EFFECTS (e))
393    return false;
394  while (handled_component_p (e))
395    {
396      if (TREE_CODE (e) == ARRAY_REF
397	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
398	return true;
399      e = TREE_OPERAND (e, 0);
400    }
401  if (DECL_P (e))
402    /* Just naming a variable has no side-effects.  */
403    return false;
404  else if (INDIRECT_REF_P (e))
405    /* Similarly, indirection has no side-effects.  */
406    return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
407  else
408    /* For anything else, trust TREE_SIDE_EFFECTS.  */
409    return TREE_SIDE_EFFECTS (e);
410}
411
412/* Gimplify *EXPR_P as rvalue into an expression that can't be modified
413   by expressions with side-effects in other operands.  */
414
415static enum gimplify_status
416gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
417		    bool (*gimple_test_f) (tree))
418{
419  enum gimplify_status t
420    = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
421  if (t == GS_ERROR)
422    return GS_ERROR;
423  else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
424    *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
425  return t;
426}
427
428/* Like gimplify_arg, but if ORDERED is set (which should be set if
429   any of the arguments this argument is sequenced before has
430   TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
431   are gimplified into SSA_NAME or a fresh temporary and for
432   non-is_gimple_reg_type we don't optimize away TARGET_EXPRs.  */
433
434static enum gimplify_status
435cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
436		 bool ordered)
437{
438  enum gimplify_status t;
439  if (ordered
440      && !is_gimple_reg_type (TREE_TYPE (*arg_p))
441      && TREE_CODE (*arg_p) == TARGET_EXPR)
442    {
443      /* gimplify_arg would strip away the TARGET_EXPR, but
444	 that can mean we don't copy the argument and some following
445	 argument with side-effect could modify it.  */
446      protected_set_expr_location (*arg_p, call_location);
447      return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
448    }
449  else
450    {
451      t = gimplify_arg (arg_p, pre_p, call_location);
452      if (t == GS_ERROR)
453	return GS_ERROR;
454      else if (ordered
455	       && is_gimple_reg_type (TREE_TYPE (*arg_p))
456	       && is_gimple_variable (*arg_p)
457	       && TREE_CODE (*arg_p) != SSA_NAME
458	       /* No need to force references into register, references
459		  can't be modified.  */
460	       && !TYPE_REF_P (TREE_TYPE (*arg_p))
461	       /* And this can't be modified either.  */
462	       && *arg_p != current_class_ptr)
463	*arg_p = get_initialized_tmp_var (*arg_p, pre_p);
464      return t;
465    }
466
467}
468
469/* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
470
471int
472cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
473{
474  int saved_stmts_are_full_exprs_p = 0;
475  location_t loc = cp_expr_loc_or_input_loc (*expr_p);
476  enum tree_code code = TREE_CODE (*expr_p);
477  enum gimplify_status ret;
478
479  if (STATEMENT_CODE_P (code))
480    {
481      saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
482      current_stmt_tree ()->stmts_are_full_exprs_p
483	= STMT_IS_FULL_EXPR_P (*expr_p);
484    }
485
486  switch (code)
487    {
488    case AGGR_INIT_EXPR:
489      simplify_aggr_init_expr (expr_p);
490      ret = GS_OK;
491      break;
492
493    case VEC_INIT_EXPR:
494      {
495	*expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
496					tf_warning_or_error);
497
498	cp_fold_data data (/*genericize*/true);
499	cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
500	cp_genericize_tree (expr_p, false);
501	copy_if_shared (expr_p);
502	ret = GS_OK;
503      }
504      break;
505
506    case THROW_EXPR:
507      /* FIXME communicate throw type to back end, probably by moving
508	 THROW_EXPR into ../tree.def.  */
509      *expr_p = TREE_OPERAND (*expr_p, 0);
510      ret = GS_OK;
511      break;
512
513    case MUST_NOT_THROW_EXPR:
514      ret = gimplify_must_not_throw_expr (expr_p, pre_p);
515      break;
516
517      /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
518	 LHS of an assignment might also be involved in the RHS, as in bug
519	 25979.  */
520    case INIT_EXPR:
521      cp_gimplify_init_expr (expr_p);
522      if (TREE_CODE (*expr_p) != INIT_EXPR)
523	return GS_OK;
524      /* Fall through.  */
525    case MODIFY_EXPR:
526    modify_expr_case:
527      {
528	/* If the back end isn't clever enough to know that the lhs and rhs
529	   types are the same, add an explicit conversion.  */
530	tree op0 = TREE_OPERAND (*expr_p, 0);
531	tree op1 = TREE_OPERAND (*expr_p, 1);
532
533	if (!error_operand_p (op0)
534	    && !error_operand_p (op1)
535	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
536		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
537	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
538	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
539					      TREE_TYPE (op0), op1);
540
541	else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
542	  {
543	    while (TREE_CODE (op1) == TARGET_EXPR)
544	      /* We're disconnecting the initializer from its target,
545		 don't create a temporary.  */
546	      op1 = TARGET_EXPR_INITIAL (op1);
547
548	    /* Remove any copies of empty classes.  Also drop volatile
549	       variables on the RHS to avoid infinite recursion from
550	       gimplify_expr trying to load the value.  */
551	    if (TREE_SIDE_EFFECTS (op1))
552	      {
553		if (TREE_THIS_VOLATILE (op1)
554		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
555		  op1 = build_fold_addr_expr (op1);
556
557		gimplify_and_add (op1, pre_p);
558	      }
559	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
560			   is_gimple_lvalue, fb_lvalue);
561	    *expr_p = TREE_OPERAND (*expr_p, 0);
562	    if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
563	      /* Avoid 'return *<retval>;'  */
564	      *expr_p = TREE_OPERAND (*expr_p, 0);
565	  }
566	/* P0145 says that the RHS is sequenced before the LHS.
567	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
568	   isn't quite strong enough in two cases:
569
570	   1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
571	   mean it's evaluated after the LHS.
572
573	   2) the value calculation of the RHS is also sequenced before the
574	   LHS, so for scalar assignment we need to preevaluate if the
575	   RHS could be affected by LHS side-effects even if it has no
576	   side-effects of its own.  We don't need this for classes because
577	   class assignment takes its RHS by reference.  */
578       else if (flag_strong_eval_order > 1
579                && TREE_CODE (*expr_p) == MODIFY_EXPR
580                && lvalue_has_side_effects (op0)
581		&& (TREE_CODE (op1) == CALL_EXPR
582		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
583			&& !TREE_CONSTANT (op1))))
584	 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
585      }
586      ret = GS_OK;
587      break;
588
589    case EMPTY_CLASS_EXPR:
590      /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
591      *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
592      ret = GS_OK;
593      break;
594
595    case BASELINK:
596      *expr_p = BASELINK_FUNCTIONS (*expr_p);
597      ret = GS_OK;
598      break;
599
600    case TRY_BLOCK:
601      genericize_try_block (expr_p);
602      ret = GS_OK;
603      break;
604
605    case HANDLER:
606      genericize_catch_block (expr_p);
607      ret = GS_OK;
608      break;
609
610    case EH_SPEC_BLOCK:
611      genericize_eh_spec_block (expr_p);
612      ret = GS_OK;
613      break;
614
615    case USING_STMT:
616      gcc_unreachable ();
617
618    case FOR_STMT:
619    case WHILE_STMT:
620    case DO_STMT:
621    case SWITCH_STMT:
622    case CONTINUE_STMT:
623    case BREAK_STMT:
624      gcc_unreachable ();
625
626    case OMP_FOR:
627    case OMP_SIMD:
628    case OMP_DISTRIBUTE:
629    case OMP_LOOP:
630    case OMP_TASKLOOP:
631      ret = cp_gimplify_omp_for (expr_p, pre_p);
632      break;
633
634    case EXPR_STMT:
635      gimplify_expr_stmt (expr_p);
636      ret = GS_OK;
637      break;
638
639    case UNARY_PLUS_EXPR:
640      {
641	tree arg = TREE_OPERAND (*expr_p, 0);
642	tree type = TREE_TYPE (*expr_p);
643	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
644					    : arg;
645	ret = GS_OK;
646      }
647      break;
648
649    case CALL_EXPR:
650      ret = GS_OK;
651      if (flag_strong_eval_order == 2
652	  && CALL_EXPR_FN (*expr_p)
653	  && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
654	  && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
655	{
656	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
657	  enum gimplify_status t
658	    = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
659				  is_gimple_call_addr);
660	  if (t == GS_ERROR)
661	    ret = GS_ERROR;
662	  /* GIMPLE considers most pointer conversion useless, but for
663	     calls we actually care about the exact function pointer type.  */
664	  else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
665	    CALL_EXPR_FN (*expr_p)
666	      = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
667	}
668      if (!CALL_EXPR_FN (*expr_p))
669	/* Internal function call.  */;
670      else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
671	{
672	  /* This is a call to a (compound) assignment operator that used
673	     the operator syntax; gimplify the RHS first.  */
674	  gcc_assert (call_expr_nargs (*expr_p) == 2);
675	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
676	  enum gimplify_status t
677	    = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
678			       TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
679	  if (t == GS_ERROR)
680	    ret = GS_ERROR;
681	}
682      else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
683	{
684	  /* Leave the last argument for gimplify_call_expr, to avoid problems
685	     with __builtin_va_arg_pack().  */
686	  int nargs = call_expr_nargs (*expr_p) - 1;
687	  int last_side_effects_arg = -1;
688	  for (int i = nargs; i > 0; --i)
689	    if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
690	      {
691		last_side_effects_arg = i;
692		break;
693	      }
694	  for (int i = 0; i < nargs; ++i)
695	    {
696	      enum gimplify_status t
697		= cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
698				   i < last_side_effects_arg);
699	      if (t == GS_ERROR)
700		ret = GS_ERROR;
701	    }
702	}
703      else if (flag_strong_eval_order
704	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
705	{
706	  /* If flag_strong_eval_order, evaluate the object argument first.  */
707	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
708	  if (INDIRECT_TYPE_P (fntype))
709	    fntype = TREE_TYPE (fntype);
710	  if (TREE_CODE (fntype) == METHOD_TYPE)
711	    {
712	      int nargs = call_expr_nargs (*expr_p);
713	      bool side_effects = false;
714	      for (int i = 1; i < nargs; ++i)
715		if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
716		  {
717		    side_effects = true;
718		    break;
719		  }
720	      enum gimplify_status t
721		= cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
722				   side_effects);
723	      if (t == GS_ERROR)
724		ret = GS_ERROR;
725	    }
726	}
727      if (ret != GS_ERROR)
728	{
729	  tree decl = cp_get_callee_fndecl_nofold (*expr_p);
730	  if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
731	    switch (DECL_FE_FUNCTION_CODE (decl))
732	      {
733	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
734		*expr_p = boolean_false_node;
735		break;
736	      case CP_BUILT_IN_SOURCE_LOCATION:
737		*expr_p
738		  = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
739		break;
740	      case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
741		*expr_p
742		  = fold_builtin_is_corresponding_member
743			(EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
744			 &CALL_EXPR_ARG (*expr_p, 0));
745		break;
746	      case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
747		*expr_p
748		  = fold_builtin_is_pointer_inverconvertible_with_class
749			(EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
750			 &CALL_EXPR_ARG (*expr_p, 0));
751		break;
752	      default:
753		break;
754	      }
755	}
756      break;
757
758    case TARGET_EXPR:
759      /* A TARGET_EXPR that expresses direct-initialization should have been
760	 elided by cp_gimplify_init_expr.  */
761      gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
762      ret = GS_UNHANDLED;
763      break;
764
765    case PTRMEM_CST:
766      *expr_p = cplus_expand_constant (*expr_p);
767      if (TREE_CODE (*expr_p) == PTRMEM_CST)
768	ret = GS_ERROR;
769      else
770	ret = GS_OK;
771      break;
772
773    case RETURN_EXPR:
774      if (TREE_OPERAND (*expr_p, 0)
775	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
776	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
777	{
778	  expr_p = &TREE_OPERAND (*expr_p, 0);
779	  /* Avoid going through the INIT_EXPR case, which can
780	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
781	  goto modify_expr_case;
782	}
783      /* Fall through.  */
784
785    default:
786      ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
787      break;
788    }
789
790  /* Restore saved state.  */
791  if (STATEMENT_CODE_P (code))
792    current_stmt_tree ()->stmts_are_full_exprs_p
793      = saved_stmts_are_full_exprs_p;
794
795  return ret;
796}
797
798static inline bool
799is_invisiref_parm (const_tree t)
800{
801  return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
802	  && DECL_BY_REFERENCE (t));
803}
804
805/* A stable comparison routine for use with splay trees and DECLs.  */
806
807static int
808splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
809{
810  tree a = (tree) xa;
811  tree b = (tree) xb;
812
813  return DECL_UID (a) - DECL_UID (b);
814}
815
816/* OpenMP context during genericization.  */
817
818struct cp_genericize_omp_taskreg
819{
820  bool is_parallel;
821  bool default_shared;
822  struct cp_genericize_omp_taskreg *outer;
823  splay_tree variables;
824};
825
826/* Return true if genericization should try to determine if
827   DECL is firstprivate or shared within task regions.  */
828
829static bool
830omp_var_to_track (tree decl)
831{
832  tree type = TREE_TYPE (decl);
833  if (is_invisiref_parm (decl))
834    type = TREE_TYPE (type);
835  else if (TYPE_REF_P (type))
836    type = TREE_TYPE (type);
837  while (TREE_CODE (type) == ARRAY_TYPE)
838    type = TREE_TYPE (type);
839  if (type == error_mark_node || !CLASS_TYPE_P (type))
840    return false;
841  if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
842    return false;
843  if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
844    return false;
845  return true;
846}
847
848/* Note DECL use in OpenMP region OMP_CTX during genericization.  */
849
850static void
851omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
852{
853  splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
854					 (splay_tree_key) decl);
855  if (n == NULL)
856    {
857      int flags = OMP_CLAUSE_DEFAULT_SHARED;
858      if (omp_ctx->outer)
859	omp_cxx_notice_variable (omp_ctx->outer, decl);
860      if (!omp_ctx->default_shared)
861	{
862	  struct cp_genericize_omp_taskreg *octx;
863
864	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
865	    {
866	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
867	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
868		{
869		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
870		  break;
871		}
872	      if (octx->is_parallel)
873		break;
874	    }
875	  if (octx == NULL
876	      && (TREE_CODE (decl) == PARM_DECL
877		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
878		      && DECL_CONTEXT (decl) == current_function_decl)))
879	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
880	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
881	    {
882	      /* DECL is implicitly determined firstprivate in
883		 the current task construct.  Ensure copy ctor and
884		 dtor are instantiated, because during gimplification
885		 it will be already too late.  */
886	      tree type = TREE_TYPE (decl);
887	      if (is_invisiref_parm (decl))
888		type = TREE_TYPE (type);
889	      else if (TYPE_REF_P (type))
890		type = TREE_TYPE (type);
891	      while (TREE_CODE (type) == ARRAY_TYPE)
892		type = TREE_TYPE (type);
893	      get_copy_ctor (type, tf_none);
894	      get_dtor (type, tf_none);
895	    }
896	}
897      splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
898    }
899}
900
901/* If we might need to clean up a partially constructed object, break down the
902   CONSTRUCTOR with split_nonconstant_init.  Also expand VEC_INIT_EXPR at this
903   point.  If initializing TO with FROM is non-trivial, overwrite *REPLACE with
904   the result.  */
905
906static void
907cp_genericize_init (tree *replace, tree from, tree to)
908{
909  if (TREE_CODE (from) == VEC_INIT_EXPR)
910    {
911      tree init = expand_vec_init_expr (to, from, tf_warning_or_error);
912
913      /* Make cp_gimplify_init_expr call replace_decl.  */
914      *replace = fold_convert (void_type_node, init);
915    }
916  else if (flag_exceptions
917	   && TREE_CODE (from) == CONSTRUCTOR
918	   && TREE_SIDE_EFFECTS (from)
919	   && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
920    {
921      to = cp_stabilize_reference (to);
922      replace_placeholders (from, to);
923      *replace = split_nonconstant_init (to, from);
924    }
925}
926
927/* For an INIT_EXPR, replace the INIT_EXPR itself.  */
928
929static void
930cp_genericize_init_expr (tree *stmt_p)
931{
932  iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
933  tree to = TREE_OPERAND (*stmt_p, 0);
934  tree from = TREE_OPERAND (*stmt_p, 1);
935  if (SIMPLE_TARGET_EXPR_P (from)
936      /* Return gets confused if we clobber its INIT_EXPR this soon.  */
937      && TREE_CODE (to) != RESULT_DECL)
938    from = TARGET_EXPR_INITIAL (from);
939  cp_genericize_init (stmt_p, from, to);
940}
941
942/* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL.  We will need to use
943   replace_decl later when we know what we're initializing.  */
944
945static void
946cp_genericize_target_expr (tree *stmt_p)
947{
948  iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
949  tree slot = TARGET_EXPR_SLOT (*stmt_p);
950  cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
951		      TARGET_EXPR_INITIAL (*stmt_p), slot);
952  gcc_assert (!DECL_INITIAL (slot));
953}
954
955/* Genericization context.  */
956
957struct cp_genericize_data
958{
959  hash_set<tree> *p_set;
960  auto_vec<tree> bind_expr_stack;
961  struct cp_genericize_omp_taskreg *omp_ctx;
962  tree try_block;
963  bool no_sanitize_p;
964  bool handle_invisiref_parm_p;
965};
966
967/* Perform any pre-gimplification folding of C++ front end trees to
968   GENERIC.
969   Note:  The folding of non-omp cases is something to move into
970     the middle-end.  As for now we have most foldings only on GENERIC
971     in fold-const, we need to perform this before transformation to
972     GIMPLE-form.  */
973
974static tree
975cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
976{
977  cp_fold_data *data = (cp_fold_data*)data_;
978  tree stmt = *stmt_p;
979  enum tree_code code = TREE_CODE (stmt);
980
981  switch (code)
982    {
983    case PTRMEM_CST:
984      if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
985	  && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
986	{
987	  if (!data->pset.add (stmt))
988	    error_at (PTRMEM_CST_LOCATION (stmt),
989		      "taking address of an immediate function %qD",
990		      PTRMEM_CST_MEMBER (stmt));
991	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
992	  break;
993	}
994      break;
995
996    case ADDR_EXPR:
997      if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
998	  && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
999	{
1000	  error_at (EXPR_LOCATION (stmt),
1001		    "taking address of an immediate function %qD",
1002		    TREE_OPERAND (stmt, 0));
1003	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1004	  break;
1005	}
1006      break;
1007
1008    case CALL_EXPR:
1009      if (tree fndecl = cp_get_callee_fndecl_nofold (stmt))
1010	if (DECL_IMMEDIATE_FUNCTION_P (fndecl)
1011	    && source_location_current_p (fndecl))
1012	  *stmt_p = stmt = cxx_constant_value (stmt);
1013      break;
1014
1015    case VAR_DECL:
1016      /* In initializers replace anon union artificial VAR_DECLs
1017	 with their DECL_VALUE_EXPRs, as nothing will do it later.
1018	 Ditto for structured bindings.  */
1019      if (!data->genericize
1020	  && DECL_HAS_VALUE_EXPR_P (stmt)
1021	  && (DECL_ANON_UNION_VAR_P (stmt)
1022	      || (DECL_DECOMPOSITION_P (stmt) && DECL_DECOMP_BASE (stmt))))
1023	{
1024	  *stmt_p = stmt = unshare_expr (DECL_VALUE_EXPR (stmt));
1025	  break;
1026	}
1027      break;
1028
1029    default:
1030      break;
1031    }
1032
1033  *stmt_p = stmt = cp_fold (*stmt_p);
1034
1035  if (data->pset.add (stmt))
1036    {
1037      /* Don't walk subtrees of stmts we've already walked once, otherwise
1038	 we can have exponential complexity with e.g. lots of nested
1039	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
1040	 always the same tree, which the first time cp_fold_r has been
1041	 called on it had the subtrees walked.  */
1042      *walk_subtrees = 0;
1043      return NULL;
1044    }
1045
1046  code = TREE_CODE (stmt);
1047  switch (code)
1048    {
1049      tree x;
1050      int i, n;
1051    case OMP_FOR:
1052    case OMP_SIMD:
1053    case OMP_DISTRIBUTE:
1054    case OMP_LOOP:
1055    case OMP_TASKLOOP:
1056    case OACC_LOOP:
1057      cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1058      cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1059      cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1060      x = OMP_FOR_COND (stmt);
1061      if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1062	{
1063	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1064	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1065	}
1066      else if (x && TREE_CODE (x) == TREE_VEC)
1067	{
1068	  n = TREE_VEC_LENGTH (x);
1069	  for (i = 0; i < n; i++)
1070	    {
1071	      tree o = TREE_VEC_ELT (x, i);
1072	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1073		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1074	    }
1075	}
1076      x = OMP_FOR_INCR (stmt);
1077      if (x && TREE_CODE (x) == TREE_VEC)
1078	{
1079	  n = TREE_VEC_LENGTH (x);
1080	  for (i = 0; i < n; i++)
1081	    {
1082	      tree o = TREE_VEC_ELT (x, i);
1083	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1084		o = TREE_OPERAND (o, 1);
1085	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1086			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1087		{
1088		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1089		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1090		}
1091	    }
1092	}
1093      cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1094      *walk_subtrees = 0;
1095      return NULL;
1096
1097    case IF_STMT:
1098      if (IF_STMT_CONSTEVAL_P (stmt))
1099	{
1100	  /* Don't walk THEN_CLAUSE (stmt) for consteval if.  IF_COND is always
1101	     boolean_false_node.  */
1102	  cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1103	  cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1104	  *walk_subtrees = 0;
1105	  return NULL;
1106	}
1107      break;
1108
1109      /* These are only for genericize time; they're here rather than in
1110	 cp_genericize to avoid problems with the invisible reference
1111	 transition.  */
1112    case INIT_EXPR:
1113      if (data->genericize)
1114	cp_genericize_init_expr (stmt_p);
1115      break;
1116
1117    case TARGET_EXPR:
1118      if (data->genericize)
1119	cp_genericize_target_expr (stmt_p);
1120      break;
1121
1122    default:
1123      break;
1124    }
1125
1126  return NULL;
1127}
1128
1129/* Fold ALL the trees!  FIXME we should be able to remove this, but
1130   apparently that still causes optimization regressions.  */
1131
1132void
1133cp_fold_function (tree fndecl)
1134{
1135  cp_fold_data data (/*genericize*/true);
1136  cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1137}
1138
1139/* Turn SPACESHIP_EXPR EXPR into GENERIC.  */
1140
1141static tree genericize_spaceship (tree expr)
1142{
1143  iloc_sentinel s (cp_expr_location (expr));
1144  tree type = TREE_TYPE (expr);
1145  tree op0 = TREE_OPERAND (expr, 0);
1146  tree op1 = TREE_OPERAND (expr, 1);
1147  return genericize_spaceship (input_location, type, op0, op1);
1148}
1149
1150/* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1151   to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1152   the middle-end (c++/88256).  If EXPR is a DECL, use add_stmt and return
1153   NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR.  */
1154
1155tree
1156predeclare_vla (tree expr)
1157{
1158  tree type = TREE_TYPE (expr);
1159  if (type == error_mark_node)
1160    return expr;
1161  if (is_typedef_decl (expr))
1162    type = DECL_ORIGINAL_TYPE (expr);
1163
1164  /* We need to strip pointers for gimplify_type_sizes.  */
1165  tree vla = type;
1166  while (POINTER_TYPE_P (vla))
1167    {
1168      if (TYPE_NAME (vla))
1169	return expr;
1170      vla = TREE_TYPE (vla);
1171    }
1172  if (vla == type || TYPE_NAME (vla)
1173      || !variably_modified_type_p (vla, NULL_TREE))
1174    return expr;
1175
1176  tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1177  DECL_ARTIFICIAL (decl) = 1;
1178  TYPE_NAME (vla) = decl;
1179  tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1180  if (DECL_P (expr))
1181    {
1182      add_stmt (dexp);
1183      return NULL_TREE;
1184    }
1185  else
1186    {
1187      expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1188      return expr;
1189    }
1190}
1191
1192/* Perform any pre-gimplification lowering of C++ front end trees to
1193   GENERIC.  */
1194
1195static tree
1196cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1197{
1198  tree stmt = *stmt_p;
1199  struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1200  hash_set<tree> *p_set = wtd->p_set;
1201
1202  /* If in an OpenMP context, note var uses.  */
1203  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1204      && (VAR_P (stmt)
1205	  || TREE_CODE (stmt) == PARM_DECL
1206	  || TREE_CODE (stmt) == RESULT_DECL)
1207      && omp_var_to_track (stmt))
1208    omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1209
1210  /* Don't dereference parms in a thunk, pass the references through. */
1211  if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1212      || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1213    {
1214      *walk_subtrees = 0;
1215      return NULL;
1216    }
1217
1218  /* Dereference invisible reference parms.  */
1219  if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1220    {
1221      *stmt_p = convert_from_reference (stmt);
1222      p_set->add (*stmt_p);
1223      *walk_subtrees = 0;
1224      return NULL;
1225    }
1226
1227  /* Map block scope extern declarations to visible declarations with the
1228     same name and type in outer scopes if any.  */
1229  if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1230    if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1231      {
1232	if (alias != error_mark_node)
1233	  {
1234	    *stmt_p = alias;
1235	    TREE_USED (alias) |= TREE_USED (stmt);
1236	  }
1237	*walk_subtrees = 0;
1238	return NULL;
1239      }
1240
1241  if (TREE_CODE (stmt) == INTEGER_CST
1242      && TYPE_REF_P (TREE_TYPE (stmt))
1243      && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1244      && !wtd->no_sanitize_p)
1245    {
1246      ubsan_maybe_instrument_reference (stmt_p);
1247      if (*stmt_p != stmt)
1248	{
1249	  *walk_subtrees = 0;
1250	  return NULL_TREE;
1251	}
1252    }
1253
1254  /* Other than invisiref parms, don't walk the same tree twice.  */
1255  if (p_set->contains (stmt))
1256    {
1257      *walk_subtrees = 0;
1258      return NULL_TREE;
1259    }
1260
1261  switch (TREE_CODE (stmt))
1262    {
1263    case ADDR_EXPR:
1264      if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1265	{
1266	  /* If in an OpenMP context, note var uses.  */
1267	  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1268	      && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1269	    omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1270	  *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1271	  *walk_subtrees = 0;
1272	}
1273      break;
1274
1275    case RETURN_EXPR:
1276      if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1277	/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1278	*walk_subtrees = 0;
1279      break;
1280
1281    case OMP_CLAUSE:
1282      switch (OMP_CLAUSE_CODE (stmt))
1283	{
1284	case OMP_CLAUSE_LASTPRIVATE:
1285	  /* Don't dereference an invisiref in OpenMP clauses.  */
1286	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1287	    {
1288	      *walk_subtrees = 0;
1289	      if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1290		cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1291			      cp_genericize_r, data, NULL);
1292	    }
1293	  break;
1294	case OMP_CLAUSE_PRIVATE:
1295	  /* Don't dereference an invisiref in OpenMP clauses.  */
1296	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1297	    *walk_subtrees = 0;
1298	  else if (wtd->omp_ctx != NULL)
1299	    {
1300	      /* Private clause doesn't cause any references to the
1301		 var in outer contexts, avoid calling
1302		 omp_cxx_notice_variable for it.  */
1303	      struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1304	      wtd->omp_ctx = NULL;
1305	      cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1306			    data, NULL);
1307	      wtd->omp_ctx = old;
1308	      *walk_subtrees = 0;
1309	    }
1310	  break;
1311	case OMP_CLAUSE_SHARED:
1312	case OMP_CLAUSE_FIRSTPRIVATE:
1313	case OMP_CLAUSE_COPYIN:
1314	case OMP_CLAUSE_COPYPRIVATE:
1315	case OMP_CLAUSE_INCLUSIVE:
1316	case OMP_CLAUSE_EXCLUSIVE:
1317	  /* Don't dereference an invisiref in OpenMP clauses.  */
1318	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1319	    *walk_subtrees = 0;
1320	  break;
1321	case OMP_CLAUSE_REDUCTION:
1322	case OMP_CLAUSE_IN_REDUCTION:
1323	case OMP_CLAUSE_TASK_REDUCTION:
1324	  /* Don't dereference an invisiref in reduction clause's
1325	     OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1326	     still needs to be genericized.  */
1327	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1328	    {
1329	      *walk_subtrees = 0;
1330	      if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1331		cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1332			      cp_genericize_r, data, NULL);
1333	      if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1334		cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1335			      cp_genericize_r, data, NULL);
1336	    }
1337	  break;
1338	default:
1339	  break;
1340	}
1341      break;
1342
1343    /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1344       to lower this construct before scanning it, so we need to lower these
1345       before doing anything else.  */
1346    case CLEANUP_STMT:
1347      *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1348			    CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1349						   : TRY_FINALLY_EXPR,
1350			    void_type_node,
1351			    CLEANUP_BODY (stmt),
1352			    CLEANUP_EXPR (stmt));
1353      break;
1354
1355    case IF_STMT:
1356      genericize_if_stmt (stmt_p);
1357      /* *stmt_p has changed, tail recurse to handle it again.  */
1358      return cp_genericize_r (stmt_p, walk_subtrees, data);
1359
1360    /* COND_EXPR might have incompatible types in branches if one or both
1361       arms are bitfields.  Fix it up now.  */
1362    case COND_EXPR:
1363      {
1364	tree type_left
1365	  = (TREE_OPERAND (stmt, 1)
1366	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1367	     : NULL_TREE);
1368	tree type_right
1369	  = (TREE_OPERAND (stmt, 2)
1370	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1371	     : NULL_TREE);
1372	if (type_left
1373	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1374					   TREE_TYPE (TREE_OPERAND (stmt, 1))))
1375	  {
1376	    TREE_OPERAND (stmt, 1)
1377	      = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1378	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1379						   type_left));
1380	  }
1381	if (type_right
1382	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1383					   TREE_TYPE (TREE_OPERAND (stmt, 2))))
1384	  {
1385	    TREE_OPERAND (stmt, 2)
1386	      = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1387	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1388						   type_right));
1389	  }
1390      }
1391      break;
1392
1393    case BIND_EXPR:
1394      if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1395	{
1396	  tree decl;
1397	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1398	    if (VAR_P (decl)
1399		&& !DECL_EXTERNAL (decl)
1400		&& omp_var_to_track (decl))
1401	      {
1402		splay_tree_node n
1403		  = splay_tree_lookup (wtd->omp_ctx->variables,
1404				       (splay_tree_key) decl);
1405		if (n == NULL)
1406		  splay_tree_insert (wtd->omp_ctx->variables,
1407				     (splay_tree_key) decl,
1408				     TREE_STATIC (decl)
1409				     ? OMP_CLAUSE_DEFAULT_SHARED
1410				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1411	      }
1412	}
1413      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1414	{
1415	  /* The point here is to not sanitize static initializers.  */
1416	  bool no_sanitize_p = wtd->no_sanitize_p;
1417	  wtd->no_sanitize_p = true;
1418	  for (tree decl = BIND_EXPR_VARS (stmt);
1419	       decl;
1420	       decl = DECL_CHAIN (decl))
1421	    if (VAR_P (decl)
1422		&& TREE_STATIC (decl)
1423		&& DECL_INITIAL (decl))
1424	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1425	  wtd->no_sanitize_p = no_sanitize_p;
1426	}
1427      wtd->bind_expr_stack.safe_push (stmt);
1428      cp_walk_tree (&BIND_EXPR_BODY (stmt),
1429		    cp_genericize_r, data, NULL);
1430      wtd->bind_expr_stack.pop ();
1431      break;
1432
1433    case USING_STMT:
1434      {
1435	tree block = NULL_TREE;
1436
1437	/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1438	   BLOCK, and append an IMPORTED_DECL to its
1439	   BLOCK_VARS chained list.  */
1440	if (wtd->bind_expr_stack.exists ())
1441	  {
1442	    int i;
1443	    for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1444	      if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1445		break;
1446	  }
1447	if (block)
1448	  {
1449	    tree decl = TREE_OPERAND (stmt, 0);
1450	    gcc_assert (decl);
1451
1452	    if (undeduced_auto_decl (decl))
1453	      /* Omit from the GENERIC, the back-end can't handle it.  */;
1454	    else
1455	      {
1456		tree using_directive = make_node (IMPORTED_DECL);
1457		TREE_TYPE (using_directive) = void_type_node;
1458		DECL_CONTEXT (using_directive) = current_function_decl;
1459		DECL_SOURCE_LOCATION (using_directive)
1460		  = cp_expr_loc_or_input_loc (stmt);
1461
1462		IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1463		DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1464		BLOCK_VARS (block) = using_directive;
1465	      }
1466	  }
1467	/* The USING_STMT won't appear in GENERIC.  */
1468	*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1469	*walk_subtrees = 0;
1470      }
1471      break;
1472
1473    case DECL_EXPR:
1474      if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1475	{
1476	  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1477	  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1478	  *walk_subtrees = 0;
1479	}
1480      else
1481	{
1482	  tree d = DECL_EXPR_DECL (stmt);
1483	  if (VAR_P (d))
1484	    gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1485	}
1486      break;
1487
1488    case OMP_PARALLEL:
1489    case OMP_TASK:
1490    case OMP_TASKLOOP:
1491      {
1492	struct cp_genericize_omp_taskreg omp_ctx;
1493	tree c, decl;
1494	splay_tree_node n;
1495
1496	*walk_subtrees = 0;
1497	cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1498	omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1499	omp_ctx.default_shared = omp_ctx.is_parallel;
1500	omp_ctx.outer = wtd->omp_ctx;
1501	omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1502	wtd->omp_ctx = &omp_ctx;
1503	for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1504	  switch (OMP_CLAUSE_CODE (c))
1505	    {
1506	    case OMP_CLAUSE_SHARED:
1507	    case OMP_CLAUSE_PRIVATE:
1508	    case OMP_CLAUSE_FIRSTPRIVATE:
1509	    case OMP_CLAUSE_LASTPRIVATE:
1510	      decl = OMP_CLAUSE_DECL (c);
1511	      if (decl == error_mark_node || !omp_var_to_track (decl))
1512		break;
1513	      n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1514	      if (n != NULL)
1515		break;
1516	      splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1517				 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1518				 ? OMP_CLAUSE_DEFAULT_SHARED
1519				 : OMP_CLAUSE_DEFAULT_PRIVATE);
1520	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1521		omp_cxx_notice_variable (omp_ctx.outer, decl);
1522	      break;
1523	    case OMP_CLAUSE_DEFAULT:
1524	      if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1525		omp_ctx.default_shared = true;
1526	    default:
1527	      break;
1528	    }
1529	if (TREE_CODE (stmt) == OMP_TASKLOOP)
1530	  c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1531				     cp_genericize_r, cp_walk_subtrees);
1532	else
1533	  cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1534	wtd->omp_ctx = omp_ctx.outer;
1535	splay_tree_delete (omp_ctx.variables);
1536      }
1537      break;
1538
1539    case OMP_TARGET:
1540      cfun->has_omp_target = true;
1541      break;
1542
1543    case TRY_BLOCK:
1544      {
1545        *walk_subtrees = 0;
1546        tree try_block = wtd->try_block;
1547        wtd->try_block = stmt;
1548        cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1549        wtd->try_block = try_block;
1550        cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1551      }
1552      break;
1553
1554    case MUST_NOT_THROW_EXPR:
1555      /* MUST_NOT_THROW_COND might be something else with TM.  */
1556      if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1557	{
1558	  *walk_subtrees = 0;
1559	  tree try_block = wtd->try_block;
1560	  wtd->try_block = stmt;
1561	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1562	  wtd->try_block = try_block;
1563	}
1564      break;
1565
1566    case THROW_EXPR:
1567      {
1568	location_t loc = location_of (stmt);
1569	if (warning_suppressed_p (stmt /* What warning? */))
1570	  /* Never mind.  */;
1571	else if (wtd->try_block)
1572	  {
1573	    if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1574	      {
1575		auto_diagnostic_group d;
1576		if (warning_at (loc, OPT_Wterminate,
1577				"%<throw%> will always call %<terminate%>")
1578		    && cxx_dialect >= cxx11
1579		    && DECL_DESTRUCTOR_P (current_function_decl))
1580		  inform (loc, "in C++11 destructors default to %<noexcept%>");
1581	      }
1582	  }
1583	else
1584	  {
1585	    if (warn_cxx11_compat && cxx_dialect < cxx11
1586		&& DECL_DESTRUCTOR_P (current_function_decl)
1587		&& (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1588		    == NULL_TREE)
1589		&& (get_defaulted_eh_spec (current_function_decl)
1590		    == empty_except_spec))
1591	      warning_at (loc, OPT_Wc__11_compat,
1592			  "in C++11 this %<throw%> will call %<terminate%> "
1593			  "because destructors default to %<noexcept%>");
1594	  }
1595      }
1596      break;
1597
1598    case CONVERT_EXPR:
1599      gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1600      break;
1601
1602    case SPACESHIP_EXPR:
1603      *stmt_p = genericize_spaceship (*stmt_p);
1604      break;
1605
1606    case PTRMEM_CST:
1607      /* By the time we get here we're handing off to the back end, so we don't
1608	 need or want to preserve PTRMEM_CST anymore.  */
1609      *stmt_p = cplus_expand_constant (stmt);
1610      *walk_subtrees = 0;
1611      break;
1612
1613    case MEM_REF:
1614      /* For MEM_REF, make sure not to sanitize the second operand even
1615	 if it has reference type.  It is just an offset with a type
1616	 holding other information.  There is no other processing we
1617	 need to do for INTEGER_CSTs, so just ignore the second argument
1618	 unconditionally.  */
1619      cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1620      *walk_subtrees = 0;
1621      break;
1622
1623    case NOP_EXPR:
1624      *stmt_p = predeclare_vla (*stmt_p);
1625      if (!wtd->no_sanitize_p
1626	  && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1627	  && TYPE_REF_P (TREE_TYPE (stmt)))
1628	ubsan_maybe_instrument_reference (stmt_p);
1629      break;
1630
1631    case CALL_EXPR:
1632      /* Evaluate function concept checks instead of treating them as
1633	 normal functions.  */
1634      if (concept_check_p (stmt))
1635	{
1636	  *stmt_p = evaluate_concept_check (stmt);
1637	  * walk_subtrees = 0;
1638	  break;
1639	}
1640
1641      if (!wtd->no_sanitize_p
1642	  && sanitize_flags_p ((SANITIZE_NULL
1643				| SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1644	{
1645	  tree fn = CALL_EXPR_FN (stmt);
1646	  if (fn != NULL_TREE
1647	      && !error_operand_p (fn)
1648	      && INDIRECT_TYPE_P (TREE_TYPE (fn))
1649	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1650	    {
1651	      bool is_ctor
1652		= TREE_CODE (fn) == ADDR_EXPR
1653		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1654		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1655	      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1656		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1657	      if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1658		cp_ubsan_maybe_instrument_member_call (stmt);
1659	    }
1660	  else if (fn == NULL_TREE
1661		   && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1662		   && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1663		   && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1664	    *walk_subtrees = 0;
1665	}
1666      /* Fall through.  */
1667    case AGGR_INIT_EXPR:
1668      /* For calls to a multi-versioned function, overload resolution
1669	 returns the function with the highest target priority, that is,
1670	 the version that will checked for dispatching first.  If this
1671	 version is inlinable, a direct call to this version can be made
1672	 otherwise the call should go through the dispatcher.  */
1673      {
1674	tree fn = cp_get_callee_fndecl_nofold (stmt);
1675	if (fn && DECL_FUNCTION_VERSIONED (fn)
1676	    && (current_function_decl == NULL
1677		|| !targetm.target_option.can_inline_p (current_function_decl,
1678							fn)))
1679	  if (tree dis = get_function_version_dispatcher (fn))
1680	    {
1681	      mark_versions_used (dis);
1682	      dis = build_address (dis);
1683	      if (TREE_CODE (stmt) == CALL_EXPR)
1684		CALL_EXPR_FN (stmt) = dis;
1685	      else
1686		AGGR_INIT_EXPR_FN (stmt) = dis;
1687	    }
1688      }
1689      break;
1690
1691    case TARGET_EXPR:
1692      if (TARGET_EXPR_INITIAL (stmt)
1693	  && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1694	  && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1695	TARGET_EXPR_NO_ELIDE (stmt) = 1;
1696      break;
1697
1698    case TEMPLATE_ID_EXPR:
1699      gcc_assert (concept_check_p (stmt));
1700      /* Emit the value of the concept check.  */
1701      *stmt_p = evaluate_concept_check (stmt);
1702      walk_subtrees = 0;
1703      break;
1704
1705    case OMP_DISTRIBUTE:
1706      /* Need to explicitly instantiate copy ctors on class iterators of
1707	 composite distribute parallel for.  */
1708      if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1709	{
1710	  tree *data[4] = { NULL, NULL, NULL, NULL };
1711	  tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1712				  find_combined_omp_for, data, NULL);
1713	  if (inner != NULL_TREE
1714	      && TREE_CODE (inner) == OMP_FOR)
1715	    {
1716	      for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1717		if (OMP_FOR_ORIG_DECLS (inner)
1718		    && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1719				  i)) == TREE_LIST
1720		    && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1721				     i)))
1722		  {
1723		    tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1724		    /* Class iterators aren't allowed on OMP_SIMD, so the only
1725		       case we need to solve is distribute parallel for.  */
1726		    gcc_assert (TREE_CODE (inner) == OMP_FOR
1727				&& data[1]);
1728		    tree orig_decl = TREE_PURPOSE (orig);
1729		    tree c, cl = NULL_TREE;
1730		    for (c = OMP_FOR_CLAUSES (inner);
1731			 c; c = OMP_CLAUSE_CHAIN (c))
1732		      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1733			   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1734			  && OMP_CLAUSE_DECL (c) == orig_decl)
1735			{
1736			  cl = c;
1737			  break;
1738			}
1739		    if (cl == NULL_TREE)
1740		      {
1741			for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1742			     c; c = OMP_CLAUSE_CHAIN (c))
1743			  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1744			      && OMP_CLAUSE_DECL (c) == orig_decl)
1745			    {
1746			      cl = c;
1747			      break;
1748			    }
1749		      }
1750		    if (cl)
1751		      {
1752			orig_decl = require_complete_type (orig_decl);
1753			tree inner_type = TREE_TYPE (orig_decl);
1754			if (orig_decl == error_mark_node)
1755			  continue;
1756			if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1757			  inner_type = TREE_TYPE (inner_type);
1758
1759			while (TREE_CODE (inner_type) == ARRAY_TYPE)
1760			  inner_type = TREE_TYPE (inner_type);
1761			get_copy_ctor (inner_type, tf_warning_or_error);
1762		      }
1763		}
1764	    }
1765	}
1766      /* FALLTHRU */
1767
1768    case FOR_STMT:
1769    case WHILE_STMT:
1770    case DO_STMT:
1771    case SWITCH_STMT:
1772    case CONTINUE_STMT:
1773    case BREAK_STMT:
1774    case OMP_FOR:
1775    case OMP_SIMD:
1776    case OMP_LOOP:
1777    case OACC_LOOP:
1778    case STATEMENT_LIST:
1779      /* These cases are handled by shared code.  */
1780      c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1781				 cp_genericize_r, cp_walk_subtrees);
1782      break;
1783
1784    case BIT_CAST_EXPR:
1785      *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1786			    TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1787      break;
1788
1789    default:
1790      if (IS_TYPE_OR_DECL_P (stmt))
1791	*walk_subtrees = 0;
1792      break;
1793    }
1794
1795  p_set->add (*stmt_p);
1796
1797  return NULL;
1798}
1799
1800/* Lower C++ front end trees to GENERIC in T_P.  */
1801
1802static void
1803cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1804{
1805  struct cp_genericize_data wtd;
1806
1807  wtd.p_set = new hash_set<tree>;
1808  wtd.bind_expr_stack.create (0);
1809  wtd.omp_ctx = NULL;
1810  wtd.try_block = NULL_TREE;
1811  wtd.no_sanitize_p = false;
1812  wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1813  cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1814  delete wtd.p_set;
1815  if (sanitize_flags_p (SANITIZE_VPTR))
1816    cp_ubsan_instrument_member_accesses (t_p);
1817}
1818
1819/* If a function that should end with a return in non-void
1820   function doesn't obviously end with return, add ubsan
1821   instrumentation code to verify it at runtime.  If -fsanitize=return
1822   is not enabled, instrument __builtin_unreachable.  */
1823
1824static void
1825cp_maybe_instrument_return (tree fndecl)
1826{
1827  if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1828      || DECL_CONSTRUCTOR_P (fndecl)
1829      || DECL_DESTRUCTOR_P (fndecl)
1830      || !targetm.warn_func_return (fndecl))
1831    return;
1832
1833  if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1834      /* Don't add __builtin_unreachable () if not optimizing, it will not
1835	 improve any optimizations in that case, just break UB code.
1836	 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1837	 UBSan covers this with ubsan_instrument_return above where sufficient
1838	 information is provided, while the __builtin_unreachable () below
1839	 if return sanitization is disabled will just result in hard to
1840	 understand runtime error without location.  */
1841      && (!optimize
1842	  || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1843    return;
1844
1845  tree t = DECL_SAVED_TREE (fndecl);
1846  while (t)
1847    {
1848      switch (TREE_CODE (t))
1849	{
1850	case BIND_EXPR:
1851	  t = BIND_EXPR_BODY (t);
1852	  continue;
1853	case TRY_FINALLY_EXPR:
1854	case CLEANUP_POINT_EXPR:
1855	  t = TREE_OPERAND (t, 0);
1856	  continue;
1857	case STATEMENT_LIST:
1858	  {
1859	    tree_stmt_iterator i = tsi_last (t);
1860	    while (!tsi_end_p (i))
1861	      {
1862		tree p = tsi_stmt (i);
1863		if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1864		  break;
1865		tsi_prev (&i);
1866	      }
1867	    if (!tsi_end_p (i))
1868	      {
1869		t = tsi_stmt (i);
1870		continue;
1871	      }
1872	  }
1873	  break;
1874	case RETURN_EXPR:
1875	  return;
1876	default:
1877	  break;
1878	}
1879      break;
1880    }
1881  if (t == NULL_TREE)
1882    return;
1883  tree *p = &DECL_SAVED_TREE (fndecl);
1884  if (TREE_CODE (*p) == BIND_EXPR)
1885    p = &BIND_EXPR_BODY (*p);
1886
1887  location_t loc = DECL_SOURCE_LOCATION (fndecl);
1888  if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1889    t = ubsan_instrument_return (loc);
1890  else
1891    {
1892      tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1893      t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1894    }
1895
1896  append_to_statement_list (t, p);
1897}
1898
1899void
1900cp_genericize (tree fndecl)
1901{
1902  tree t;
1903
1904  /* Fix up the types of parms passed by invisible reference.  */
1905  for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1906    if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1907      {
1908	/* If a function's arguments are copied to create a thunk,
1909	   then DECL_BY_REFERENCE will be set -- but the type of the
1910	   argument will be a pointer type, so we will never get
1911	   here.  */
1912	gcc_assert (!DECL_BY_REFERENCE (t));
1913	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1914	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1915	DECL_BY_REFERENCE (t) = 1;
1916	TREE_ADDRESSABLE (t) = 0;
1917	relayout_decl (t);
1918      }
1919
1920  /* Do the same for the return value.  */
1921  if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1922    {
1923      t = DECL_RESULT (fndecl);
1924      TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1925      DECL_BY_REFERENCE (t) = 1;
1926      TREE_ADDRESSABLE (t) = 0;
1927      relayout_decl (t);
1928      if (DECL_NAME (t))
1929	{
1930	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1931	  tree outer = outer_curly_brace_block (current_function_decl);
1932	  tree var;
1933
1934	  if (outer)
1935	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1936	      if (VAR_P (var)
1937		  && DECL_NAME (t) == DECL_NAME (var)
1938		  && DECL_HAS_VALUE_EXPR_P (var)
1939		  && DECL_VALUE_EXPR (var) == t)
1940		{
1941		  tree val = convert_from_reference (t);
1942		  SET_DECL_VALUE_EXPR (var, val);
1943		  break;
1944		}
1945	}
1946    }
1947
1948  /* If we're a clone, the body is already GIMPLE.  */
1949  if (DECL_CLONED_FUNCTION_P (fndecl))
1950    return;
1951
1952  /* Allow cp_genericize calls to be nested.  */
1953  bc_state_t save_state;
1954  save_bc_state (&save_state);
1955
1956  /* We do want to see every occurrence of the parms, so we can't just use
1957     walk_tree's hash functionality.  */
1958  cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1959
1960  cp_maybe_instrument_return (fndecl);
1961
1962  /* Do everything else.  */
1963  c_genericize (fndecl);
1964  restore_bc_state (&save_state);
1965}
1966
1967/* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1968   NULL if there is in fact nothing to do.  ARG2 may be null if FN
1969   actually only takes one argument.  */
1970
1971static tree
1972cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1973{
1974  tree defparm, parm, t;
1975  int i = 0;
1976  int nargs;
1977  tree *argarray;
1978
1979  if (fn == NULL)
1980    return NULL;
1981
1982  nargs = list_length (DECL_ARGUMENTS (fn));
1983  argarray = XALLOCAVEC (tree, nargs);
1984
1985  defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1986  if (arg2)
1987    defparm = TREE_CHAIN (defparm);
1988
1989  bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1990  if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1991    {
1992      tree inner_type = TREE_TYPE (arg1);
1993      tree start1, end1, p1;
1994      tree start2 = NULL, p2 = NULL;
1995      tree ret = NULL, lab;
1996
1997      start1 = arg1;
1998      start2 = arg2;
1999      do
2000	{
2001	  inner_type = TREE_TYPE (inner_type);
2002	  start1 = build4 (ARRAY_REF, inner_type, start1,
2003			   size_zero_node, NULL, NULL);
2004	  if (arg2)
2005	    start2 = build4 (ARRAY_REF, inner_type, start2,
2006			     size_zero_node, NULL, NULL);
2007	}
2008      while (TREE_CODE (inner_type) == ARRAY_TYPE);
2009      start1 = build_fold_addr_expr_loc (input_location, start1);
2010      if (arg2)
2011	start2 = build_fold_addr_expr_loc (input_location, start2);
2012
2013      end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2014      end1 = fold_build_pointer_plus (start1, end1);
2015
2016      p1 = create_tmp_var (TREE_TYPE (start1));
2017      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2018      append_to_statement_list (t, &ret);
2019
2020      if (arg2)
2021	{
2022	  p2 = create_tmp_var (TREE_TYPE (start2));
2023	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2024	  append_to_statement_list (t, &ret);
2025	}
2026
2027      lab = create_artificial_label (input_location);
2028      t = build1 (LABEL_EXPR, void_type_node, lab);
2029      append_to_statement_list (t, &ret);
2030
2031      argarray[i++] = p1;
2032      if (arg2)
2033	argarray[i++] = p2;
2034      /* Handle default arguments.  */
2035      for (parm = defparm; parm && parm != void_list_node;
2036	   parm = TREE_CHAIN (parm), i++)
2037	argarray[i] = convert_default_arg (TREE_VALUE (parm),
2038					   TREE_PURPOSE (parm), fn,
2039					   i - is_method, tf_warning_or_error);
2040      t = build_call_a (fn, i, argarray);
2041      t = fold_convert (void_type_node, t);
2042      t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2043      append_to_statement_list (t, &ret);
2044
2045      t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2046      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2047      append_to_statement_list (t, &ret);
2048
2049      if (arg2)
2050	{
2051	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2052	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2053	  append_to_statement_list (t, &ret);
2054	}
2055
2056      t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2057      t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2058      append_to_statement_list (t, &ret);
2059
2060      return ret;
2061    }
2062  else
2063    {
2064      argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2065      if (arg2)
2066	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2067      /* Handle default arguments.  */
2068      for (parm = defparm; parm && parm != void_list_node;
2069	   parm = TREE_CHAIN (parm), i++)
2070	argarray[i] = convert_default_arg (TREE_VALUE (parm),
2071					   TREE_PURPOSE (parm), fn,
2072					   i - is_method, tf_warning_or_error);
2073      t = build_call_a (fn, i, argarray);
2074      t = fold_convert (void_type_node, t);
2075      return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2076    }
2077}
2078
2079/* Return code to initialize DECL with its default constructor, or
2080   NULL if there's nothing to do.  */
2081
2082tree
2083cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2084{
2085  tree info = CP_OMP_CLAUSE_INFO (clause);
2086  tree ret = NULL;
2087
2088  if (info)
2089    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2090
2091  return ret;
2092}
2093
2094/* Return code to initialize DST with a copy constructor from SRC.  */
2095
2096tree
2097cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2098{
2099  tree info = CP_OMP_CLAUSE_INFO (clause);
2100  tree ret = NULL;
2101
2102  if (info)
2103    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2104  if (ret == NULL)
2105    ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2106
2107  return ret;
2108}
2109
2110/* Similarly, except use an assignment operator instead.  */
2111
2112tree
2113cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2114{
2115  tree info = CP_OMP_CLAUSE_INFO (clause);
2116  tree ret = NULL;
2117
2118  if (info)
2119    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2120  if (ret == NULL)
2121    ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2122
2123  return ret;
2124}
2125
2126/* Return code to destroy DECL.  */
2127
2128tree
2129cxx_omp_clause_dtor (tree clause, tree decl)
2130{
2131  tree info = CP_OMP_CLAUSE_INFO (clause);
2132  tree ret = NULL;
2133
2134  if (info)
2135    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2136
2137  return ret;
2138}
2139
2140/* True if OpenMP should privatize what this DECL points to rather
2141   than the DECL itself.  */
2142
2143bool
2144cxx_omp_privatize_by_reference (const_tree decl)
2145{
2146  return (TYPE_REF_P (TREE_TYPE (decl))
2147	  || is_invisiref_parm (decl));
2148}
2149
2150/* Return true if DECL is const qualified var having no mutable member.  */
2151bool
2152cxx_omp_const_qual_no_mutable (tree decl)
2153{
2154  tree type = TREE_TYPE (decl);
2155  if (TYPE_REF_P (type))
2156    {
2157      if (!is_invisiref_parm (decl))
2158	return false;
2159      type = TREE_TYPE (type);
2160
2161      if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2162	{
2163	  /* NVR doesn't preserve const qualification of the
2164	     variable's type.  */
2165	  tree outer = outer_curly_brace_block (current_function_decl);
2166	  tree var;
2167
2168	  if (outer)
2169	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2170	      if (VAR_P (var)
2171		  && DECL_NAME (decl) == DECL_NAME (var)
2172		  && (TYPE_MAIN_VARIANT (type)
2173		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2174		{
2175		  if (TYPE_READONLY (TREE_TYPE (var)))
2176		    type = TREE_TYPE (var);
2177		  break;
2178		}
2179	}
2180    }
2181
2182  if (type == error_mark_node)
2183    return false;
2184
2185  /* Variables with const-qualified type having no mutable member
2186     are predetermined shared.  */
2187  if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2188    return true;
2189
2190  return false;
2191}
2192
2193/* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2194   of DECL is predetermined.  */
2195
2196enum omp_clause_default_kind
2197cxx_omp_predetermined_sharing_1 (tree decl)
2198{
2199  /* Static data members are predetermined shared.  */
2200  if (TREE_STATIC (decl))
2201    {
2202      tree ctx = CP_DECL_CONTEXT (decl);
2203      if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2204	return OMP_CLAUSE_DEFAULT_SHARED;
2205
2206      if (c_omp_predefined_variable (decl))
2207	return OMP_CLAUSE_DEFAULT_SHARED;
2208    }
2209
2210  /* this may not be specified in data-sharing clauses, still we need
2211     to predetermined it firstprivate.  */
2212  if (decl == current_class_ptr)
2213    return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2214
2215  return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2216}
2217
2218/* Likewise, but also include the artificial vars.  We don't want to
2219   disallow the artificial vars being mentioned in explicit clauses,
2220   as we use artificial vars e.g. for loop constructs with random
2221   access iterators other than pointers, but during gimplification
2222   we want to treat them as predetermined.  */
2223
2224enum omp_clause_default_kind
2225cxx_omp_predetermined_sharing (tree decl)
2226{
2227  enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2228  if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2229    return ret;
2230
2231  /* Predetermine artificial variables holding integral values, those
2232     are usually result of gimplify_one_sizepos or SAVE_EXPR
2233     gimplification.  */
2234  if (VAR_P (decl)
2235      && DECL_ARTIFICIAL (decl)
2236      && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2237      && !(DECL_LANG_SPECIFIC (decl)
2238	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2239    return OMP_CLAUSE_DEFAULT_SHARED;
2240
2241  /* Similarly for typeinfo symbols.  */
2242  if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2243    return OMP_CLAUSE_DEFAULT_SHARED;
2244
2245  return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2246}
2247
2248enum omp_clause_defaultmap_kind
2249cxx_omp_predetermined_mapping (tree decl)
2250{
2251  /* Predetermine artificial variables holding integral values, those
2252     are usually result of gimplify_one_sizepos or SAVE_EXPR
2253     gimplification.  */
2254  if (VAR_P (decl)
2255      && DECL_ARTIFICIAL (decl)
2256      && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2257      && !(DECL_LANG_SPECIFIC (decl)
2258	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2259    return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2260
2261  if (c_omp_predefined_variable (decl))
2262    return OMP_CLAUSE_DEFAULTMAP_TO;
2263
2264  return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2265}
2266
2267/* Finalize an implicitly determined clause.  */
2268
2269void
2270cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2271{
2272  tree decl, inner_type;
2273  bool make_shared = false;
2274
2275  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2276      && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2277      && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2278	  || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2279    return;
2280
2281  decl = OMP_CLAUSE_DECL (c);
2282  decl = require_complete_type (decl);
2283  inner_type = TREE_TYPE (decl);
2284  if (decl == error_mark_node)
2285    make_shared = true;
2286  else if (TYPE_REF_P (TREE_TYPE (decl)))
2287    inner_type = TREE_TYPE (inner_type);
2288
2289  /* We're interested in the base element, not arrays.  */
2290  while (TREE_CODE (inner_type) == ARRAY_TYPE)
2291    inner_type = TREE_TYPE (inner_type);
2292
2293  /* Check for special function availability by building a call to one.
2294     Save the results, because later we won't be in the right context
2295     for making these queries.  */
2296  bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2297  bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2298  if (!make_shared
2299      && CLASS_TYPE_P (inner_type)
2300      && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2301				     true))
2302    make_shared = true;
2303
2304  if (make_shared)
2305    {
2306      OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2307      OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2308      OMP_CLAUSE_SHARED_READONLY (c) = 0;
2309    }
2310}
2311
2312/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2313   disregarded in OpenMP construct, because it is going to be
2314   remapped during OpenMP lowering.  SHARED is true if DECL
2315   is going to be shared, false if it is going to be privatized.  */
2316
2317bool
2318cxx_omp_disregard_value_expr (tree decl, bool shared)
2319{
2320  if (shared)
2321    return false;
2322  if (VAR_P (decl)
2323      && DECL_HAS_VALUE_EXPR_P (decl)
2324      && DECL_ARTIFICIAL (decl)
2325      && DECL_LANG_SPECIFIC (decl)
2326      && DECL_OMP_PRIVATIZED_MEMBER (decl))
2327    return true;
2328  if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2329    return true;
2330  return false;
2331}
2332
2333/* Fold expression X which is used as an rvalue if RVAL is true.  */
2334
2335tree
2336cp_fold_maybe_rvalue (tree x, bool rval)
2337{
2338  while (true)
2339    {
2340      x = cp_fold (x);
2341      if (rval)
2342	x = mark_rvalue_use (x);
2343      if (rval && DECL_P (x)
2344	  && !TYPE_REF_P (TREE_TYPE (x)))
2345	{
2346	  tree v = decl_constant_value (x);
2347	  if (v != x && v != error_mark_node)
2348	    {
2349	      x = v;
2350	      continue;
2351	    }
2352	}
2353      break;
2354    }
2355  return x;
2356}
2357
2358/* Fold expression X which is used as an rvalue.  */
2359
2360tree
2361cp_fold_rvalue (tree x)
2362{
2363  return cp_fold_maybe_rvalue (x, true);
2364}
2365
2366/* Perform folding on expression X.  */
2367
2368tree
2369cp_fully_fold (tree x)
2370{
2371  if (processing_template_decl)
2372    return x;
2373  /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2374     have to call both.  */
2375  if (cxx_dialect >= cxx11)
2376    {
2377      x = maybe_constant_value (x);
2378      /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2379	 a TARGET_EXPR; undo that here.  */
2380      if (TREE_CODE (x) == TARGET_EXPR)
2381	x = TARGET_EXPR_INITIAL (x);
2382      else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2383	       && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2384	       && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2385	x = TREE_OPERAND (x, 0);
2386    }
2387  return cp_fold_rvalue (x);
2388}
2389
2390/* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2391   in some cases.  */
2392
2393tree
2394cp_fully_fold_init (tree x)
2395{
2396  if (processing_template_decl)
2397    return x;
2398  x = cp_fully_fold (x);
2399  cp_fold_data data (/*genericize*/false);
2400  cp_walk_tree (&x, cp_fold_r, &data, NULL);
2401  return x;
2402}
2403
2404/* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2405   and certain changes are made to the folding done.  Or should be (FIXME).  We
2406   never touch maybe_const, as it is only used for the C front-end
2407   C_MAYBE_CONST_EXPR.  */
2408
2409tree
2410c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2411{
2412  return cp_fold_maybe_rvalue (x, !lval);
2413}
2414
2415static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2416
2417/* Dispose of the whole FOLD_CACHE.  */
2418
2419void
2420clear_fold_cache (void)
2421{
2422  if (fold_cache != NULL)
2423    fold_cache->empty ();
2424}
2425
2426/*  This function tries to fold an expression X.
2427    To avoid combinatorial explosion, folding results are kept in fold_cache.
2428    If X is invalid, we don't fold at all.
2429    For performance reasons we don't cache expressions representing a
2430    declaration or constant.
2431    Function returns X or its folded variant.  */
2432
2433static tree
2434cp_fold (tree x)
2435{
2436  tree op0, op1, op2, op3;
2437  tree org_x = x, r = NULL_TREE;
2438  enum tree_code code;
2439  location_t loc;
2440  bool rval_ops = true;
2441
2442  if (!x || x == error_mark_node)
2443    return x;
2444
2445  if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2446    return x;
2447
2448  /* Don't bother to cache DECLs or constants.  */
2449  if (DECL_P (x) || CONSTANT_CLASS_P (x))
2450    return x;
2451
2452  if (fold_cache == NULL)
2453    fold_cache = hash_map<tree, tree>::create_ggc (101);
2454
2455  if (tree *cached = fold_cache->get (x))
2456    return *cached;
2457
2458  uid_sensitive_constexpr_evaluation_checker c;
2459
2460  code = TREE_CODE (x);
2461  switch (code)
2462    {
2463    case CLEANUP_POINT_EXPR:
2464      /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2465	 effects.  */
2466      r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2467      if (!TREE_SIDE_EFFECTS (r))
2468	x = r;
2469      break;
2470
2471    case SIZEOF_EXPR:
2472      x = fold_sizeof_expr (x);
2473      break;
2474
2475    case VIEW_CONVERT_EXPR:
2476      rval_ops = false;
2477      /* FALLTHRU */
2478    case CONVERT_EXPR:
2479    case NOP_EXPR:
2480    case NON_LVALUE_EXPR:
2481
2482      if (VOID_TYPE_P (TREE_TYPE (x)))
2483	{
2484	  /* This is just to make sure we don't end up with casts to
2485	     void from error_mark_node.  If we just return x, then
2486	     cp_fold_r might fold the operand into error_mark_node and
2487	     leave the conversion in the IR.  STRIP_USELESS_TYPE_CONVERSION
2488	     during gimplification doesn't like such casts.
2489	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2490	     folding of the operand should be in the caches and if in cp_fold_r
2491	     it will modify it in place.  */
2492	  op0 = cp_fold (TREE_OPERAND (x, 0));
2493	  if (op0 == error_mark_node)
2494	    x = error_mark_node;
2495	  break;
2496	}
2497
2498      loc = EXPR_LOCATION (x);
2499      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2500
2501      if (code == CONVERT_EXPR
2502	  && SCALAR_TYPE_P (TREE_TYPE (x))
2503	  && op0 != void_node)
2504	/* During parsing we used convert_to_*_nofold; re-convert now using the
2505	   folding variants, since fold() doesn't do those transformations.  */
2506	x = fold (convert (TREE_TYPE (x), op0));
2507      else if (op0 != TREE_OPERAND (x, 0))
2508	{
2509	  if (op0 == error_mark_node)
2510	    x = error_mark_node;
2511	  else
2512	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2513	}
2514      else
2515	x = fold (x);
2516
2517      /* Conversion of an out-of-range value has implementation-defined
2518	 behavior; the language considers it different from arithmetic
2519	 overflow, which is undefined.  */
2520      if (TREE_CODE (op0) == INTEGER_CST
2521	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2522	TREE_OVERFLOW (x) = false;
2523
2524      break;
2525
2526    case INDIRECT_REF:
2527      /* We don't need the decltype(auto) obfuscation anymore.  */
2528      if (REF_PARENTHESIZED_P (x))
2529	{
2530	  tree p = maybe_undo_parenthesized_ref (x);
2531	  if (p != x)
2532	    return cp_fold (p);
2533	}
2534      goto unary;
2535
2536    case ADDR_EXPR:
2537      loc = EXPR_LOCATION (x);
2538      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2539
2540      /* Cope with user tricks that amount to offsetof.  */
2541      if (op0 != error_mark_node
2542	  && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2543	{
2544	  tree val = get_base_address (op0);
2545	  if (val
2546	      && INDIRECT_REF_P (val)
2547	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2548	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2549	    {
2550	      val = TREE_OPERAND (val, 0);
2551	      STRIP_NOPS (val);
2552	      val = maybe_constant_value (val);
2553	      if (TREE_CODE (val) == INTEGER_CST)
2554		return fold_offsetof (op0, TREE_TYPE (x));
2555	    }
2556	}
2557      goto finish_unary;
2558
2559    case REALPART_EXPR:
2560    case IMAGPART_EXPR:
2561      rval_ops = false;
2562      /* FALLTHRU */
2563    case CONJ_EXPR:
2564    case FIX_TRUNC_EXPR:
2565    case FLOAT_EXPR:
2566    case NEGATE_EXPR:
2567    case ABS_EXPR:
2568    case ABSU_EXPR:
2569    case BIT_NOT_EXPR:
2570    case TRUTH_NOT_EXPR:
2571    case FIXED_CONVERT_EXPR:
2572    unary:
2573
2574      loc = EXPR_LOCATION (x);
2575      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2576
2577    finish_unary:
2578      if (op0 != TREE_OPERAND (x, 0))
2579	{
2580	  if (op0 == error_mark_node)
2581	    x = error_mark_node;
2582	  else
2583	    {
2584	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2585	      if (code == INDIRECT_REF
2586		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2587		{
2588		  TREE_READONLY (x) = TREE_READONLY (org_x);
2589		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2590		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2591		}
2592	    }
2593	}
2594      else
2595	x = fold (x);
2596
2597      gcc_assert (TREE_CODE (x) != COND_EXPR
2598		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2599      break;
2600
2601    case UNARY_PLUS_EXPR:
2602      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2603      if (op0 == error_mark_node)
2604	x = error_mark_node;
2605      else
2606	x = fold_convert (TREE_TYPE (x), op0);
2607      break;
2608
2609    case POSTDECREMENT_EXPR:
2610    case POSTINCREMENT_EXPR:
2611    case INIT_EXPR:
2612    case PREDECREMENT_EXPR:
2613    case PREINCREMENT_EXPR:
2614    case COMPOUND_EXPR:
2615    case MODIFY_EXPR:
2616      rval_ops = false;
2617      /* FALLTHRU */
2618    case POINTER_PLUS_EXPR:
2619    case PLUS_EXPR:
2620    case POINTER_DIFF_EXPR:
2621    case MINUS_EXPR:
2622    case MULT_EXPR:
2623    case TRUNC_DIV_EXPR:
2624    case CEIL_DIV_EXPR:
2625    case FLOOR_DIV_EXPR:
2626    case ROUND_DIV_EXPR:
2627    case TRUNC_MOD_EXPR:
2628    case CEIL_MOD_EXPR:
2629    case ROUND_MOD_EXPR:
2630    case RDIV_EXPR:
2631    case EXACT_DIV_EXPR:
2632    case MIN_EXPR:
2633    case MAX_EXPR:
2634    case LSHIFT_EXPR:
2635    case RSHIFT_EXPR:
2636    case LROTATE_EXPR:
2637    case RROTATE_EXPR:
2638    case BIT_AND_EXPR:
2639    case BIT_IOR_EXPR:
2640    case BIT_XOR_EXPR:
2641    case TRUTH_AND_EXPR:
2642    case TRUTH_ANDIF_EXPR:
2643    case TRUTH_OR_EXPR:
2644    case TRUTH_ORIF_EXPR:
2645    case TRUTH_XOR_EXPR:
2646    case LT_EXPR: case LE_EXPR:
2647    case GT_EXPR: case GE_EXPR:
2648    case EQ_EXPR: case NE_EXPR:
2649    case UNORDERED_EXPR: case ORDERED_EXPR:
2650    case UNLT_EXPR: case UNLE_EXPR:
2651    case UNGT_EXPR: case UNGE_EXPR:
2652    case UNEQ_EXPR: case LTGT_EXPR:
2653    case RANGE_EXPR: case COMPLEX_EXPR:
2654
2655      loc = EXPR_LOCATION (x);
2656      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2657      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2658
2659      /* decltype(nullptr) has only one value, so optimize away all comparisons
2660	 with that type right away, keeping them in the IL causes troubles for
2661	 various optimizations.  */
2662      if (COMPARISON_CLASS_P (org_x)
2663	  && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2664	  && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2665	{
2666	  switch (code)
2667	    {
2668	    case EQ_EXPR:
2669	      x = constant_boolean_node (true, TREE_TYPE (x));
2670	      break;
2671	    case NE_EXPR:
2672	      x = constant_boolean_node (false, TREE_TYPE (x));
2673	      break;
2674	    default:
2675	      gcc_unreachable ();
2676	    }
2677	  return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2678					op0, op1);
2679	}
2680
2681      if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2682	{
2683	  if (op0 == error_mark_node || op1 == error_mark_node)
2684	    x = error_mark_node;
2685	  else
2686	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2687	}
2688      else
2689	x = fold (x);
2690
2691      /* This is only needed for -Wnonnull-compare and only if
2692	 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2693	 generation, we do it always.  */
2694      if (COMPARISON_CLASS_P (org_x))
2695	{
2696	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2697	    ;
2698	  else if (COMPARISON_CLASS_P (x))
2699	    {
2700	      if (warn_nonnull_compare
2701		  && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2702		suppress_warning (x, OPT_Wnonnull_compare);
2703	    }
2704	  /* Otherwise give up on optimizing these, let GIMPLE folders
2705	     optimize those later on.  */
2706	  else if (op0 != TREE_OPERAND (org_x, 0)
2707		   || op1 != TREE_OPERAND (org_x, 1))
2708	    {
2709	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2710	      if (warn_nonnull_compare
2711		  && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2712		suppress_warning (x, OPT_Wnonnull_compare);
2713	    }
2714	  else
2715	    x = org_x;
2716	}
2717
2718      break;
2719
2720    case VEC_COND_EXPR:
2721    case COND_EXPR:
2722      loc = EXPR_LOCATION (x);
2723      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2724      op1 = cp_fold (TREE_OPERAND (x, 1));
2725      op2 = cp_fold (TREE_OPERAND (x, 2));
2726
2727      if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2728	{
2729	  warning_sentinel s (warn_int_in_bool_context);
2730	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2731	    op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2732	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2733	    op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2734	}
2735      else if (VOID_TYPE_P (TREE_TYPE (x)))
2736	{
2737	  if (TREE_CODE (op0) == INTEGER_CST)
2738	    {
2739	      /* If the condition is constant, fold can fold away
2740		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2741		 have one of the branches NULL, avoid folding crash.  */
2742	      if (!op1)
2743		op1 = build_empty_stmt (loc);
2744	      if (!op2)
2745		op2 = build_empty_stmt (loc);
2746	    }
2747	  else
2748	    {
2749	      /* Otherwise, don't bother folding a void condition, since
2750		 it can't produce a constant value.  */
2751	      if (op0 != TREE_OPERAND (x, 0)
2752		  || op1 != TREE_OPERAND (x, 1)
2753		  || op2 != TREE_OPERAND (x, 2))
2754		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2755	      break;
2756	    }
2757	}
2758
2759      if (op0 != TREE_OPERAND (x, 0)
2760	  || op1 != TREE_OPERAND (x, 1)
2761	  || op2 != TREE_OPERAND (x, 2))
2762	{
2763	  if (op0 == error_mark_node
2764	      || op1 == error_mark_node
2765	      || op2 == error_mark_node)
2766	    x = error_mark_node;
2767	  else
2768	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2769	}
2770      else
2771	x = fold (x);
2772
2773      /* A COND_EXPR might have incompatible types in branches if one or both
2774	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2775      if (TREE_CODE (x) != code
2776	  && x != error_mark_node
2777	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2778	x = fold_convert (TREE_TYPE (org_x), x);
2779
2780      break;
2781
2782    case CALL_EXPR:
2783      {
2784	tree callee = get_callee_fndecl (x);
2785
2786	/* "Inline" calls to std::move/forward and other cast-like functions
2787	   by simply folding them into a corresponding cast to their return
2788	   type.  This is cheaper than relying on the middle end to do so, and
2789	   also means we avoid generating useless debug info for them at all.
2790
2791	   At this point the argument has already been converted into a
2792	   reference, so it suffices to use a NOP_EXPR to express the
2793	   cast.  */
2794	if ((OPTION_SET_P (flag_fold_simple_inlines)
2795	     ? flag_fold_simple_inlines
2796	     : !flag_no_inline)
2797	    && call_expr_nargs (x) == 1
2798	    && decl_in_std_namespace_p (callee)
2799	    && DECL_NAME (callee) != NULL_TREE
2800	    && (id_equal (DECL_NAME (callee), "move")
2801		|| id_equal (DECL_NAME (callee), "forward")
2802		|| id_equal (DECL_NAME (callee), "addressof")
2803		/* This addressof equivalent is used heavily in libstdc++.  */
2804		|| id_equal (DECL_NAME (callee), "__addressof")
2805		|| id_equal (DECL_NAME (callee), "as_const")))
2806	  {
2807	    r = CALL_EXPR_ARG (x, 0);
2808	    /* Check that the return and argument types are sane before
2809	       folding.  */
2810	    if (INDIRECT_TYPE_P (TREE_TYPE (x))
2811		&& INDIRECT_TYPE_P (TREE_TYPE (r)))
2812	      {
2813		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2814		  r = build_nop (TREE_TYPE (x), r);
2815		x = cp_fold (r);
2816		break;
2817	      }
2818	  }
2819
2820	int sv = optimize, nw = sv;
2821
2822	/* Some built-in function calls will be evaluated at compile-time in
2823	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2824	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2825	if (callee && fndecl_built_in_p (callee) && !optimize
2826	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2827	    && current_function_decl
2828	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2829	  nw = 1;
2830
2831	if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2832	  {
2833	    switch (DECL_FE_FUNCTION_CODE (callee))
2834	      {
2835		/* Defer folding __builtin_is_constant_evaluated.  */
2836	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2837		break;
2838	      case CP_BUILT_IN_SOURCE_LOCATION:
2839		x = fold_builtin_source_location (EXPR_LOCATION (x));
2840		break;
2841	      case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2842	        x = fold_builtin_is_corresponding_member
2843			(EXPR_LOCATION (x), call_expr_nargs (x),
2844			 &CALL_EXPR_ARG (x, 0));
2845		break;
2846	      case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2847                x = fold_builtin_is_pointer_inverconvertible_with_class
2848			(EXPR_LOCATION (x), call_expr_nargs (x),
2849			 &CALL_EXPR_ARG (x, 0));
2850		break;
2851	      default:
2852		break;
2853	      }
2854	    break;
2855	  }
2856
2857	if (callee
2858	    && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2859				  BUILT_IN_FRONTEND))
2860	  {
2861	    x = fold_builtin_source_location (EXPR_LOCATION (x));
2862	    break;
2863	  }
2864
2865	bool changed = false;
2866	int m = call_expr_nargs (x);
2867	for (int i = 0; i < m; i++)
2868	  {
2869	    r = cp_fold (CALL_EXPR_ARG (x, i));
2870	    if (r != CALL_EXPR_ARG (x, i))
2871	      {
2872		if (r == error_mark_node)
2873		  {
2874		    x = error_mark_node;
2875		    break;
2876		  }
2877		if (!changed)
2878		  x = copy_node (x);
2879		CALL_EXPR_ARG (x, i) = r;
2880		changed = true;
2881	      }
2882	  }
2883	if (x == error_mark_node)
2884	  break;
2885
2886	optimize = nw;
2887	r = fold (x);
2888	optimize = sv;
2889
2890	if (TREE_CODE (r) != CALL_EXPR)
2891	  {
2892	    x = cp_fold (r);
2893	    break;
2894	  }
2895
2896	optimize = nw;
2897
2898	/* Invoke maybe_constant_value for functions declared
2899	   constexpr and not called with AGGR_INIT_EXPRs.
2900	   TODO:
2901	   Do constexpr expansion of expressions where the call itself is not
2902	   constant, but the call followed by an INDIRECT_REF is.  */
2903	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2904	    && !flag_no_inline)
2905	  r = maybe_constant_value (x);
2906	optimize = sv;
2907
2908        if (TREE_CODE (r) != CALL_EXPR)
2909	  {
2910	    if (DECL_CONSTRUCTOR_P (callee))
2911	      {
2912		loc = EXPR_LOCATION (x);
2913		tree s = build_fold_indirect_ref_loc (loc,
2914						      CALL_EXPR_ARG (x, 0));
2915		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2916	      }
2917	    x = r;
2918	    break;
2919	  }
2920
2921	break;
2922      }
2923
2924    case CONSTRUCTOR:
2925      {
2926	unsigned i;
2927	constructor_elt *p;
2928	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2929	vec<constructor_elt, va_gc> *nelts = NULL;
2930	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2931	  {
2932	    tree op = cp_fold (p->value);
2933	    if (op != p->value)
2934	      {
2935		if (op == error_mark_node)
2936		  {
2937		    x = error_mark_node;
2938		    vec_free (nelts);
2939		    break;
2940		  }
2941		if (nelts == NULL)
2942		  nelts = elts->copy ();
2943		(*nelts)[i].value = op;
2944	      }
2945	  }
2946	if (nelts)
2947	  {
2948	    x = build_constructor (TREE_TYPE (x), nelts);
2949	    CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2950	      = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2951	  }
2952	if (VECTOR_TYPE_P (TREE_TYPE (x)))
2953	  x = fold (x);
2954	break;
2955      }
2956    case TREE_VEC:
2957      {
2958	bool changed = false;
2959	int n = TREE_VEC_LENGTH (x);
2960
2961	for (int i = 0; i < n; i++)
2962	  {
2963	    tree op = cp_fold (TREE_VEC_ELT (x, i));
2964	    if (op != TREE_VEC_ELT (x, i))
2965	      {
2966		if (!changed)
2967		  x = copy_node (x);
2968		TREE_VEC_ELT (x, i) = op;
2969		changed = true;
2970	      }
2971	  }
2972      }
2973
2974      break;
2975
2976    case ARRAY_REF:
2977    case ARRAY_RANGE_REF:
2978
2979      loc = EXPR_LOCATION (x);
2980      op0 = cp_fold (TREE_OPERAND (x, 0));
2981      op1 = cp_fold (TREE_OPERAND (x, 1));
2982      op2 = cp_fold (TREE_OPERAND (x, 2));
2983      op3 = cp_fold (TREE_OPERAND (x, 3));
2984
2985      if (op0 != TREE_OPERAND (x, 0)
2986	  || op1 != TREE_OPERAND (x, 1)
2987	  || op2 != TREE_OPERAND (x, 2)
2988	  || op3 != TREE_OPERAND (x, 3))
2989	{
2990	  if (op0 == error_mark_node
2991	      || op1 == error_mark_node
2992	      || op2 == error_mark_node
2993	      || op3 == error_mark_node)
2994	    x = error_mark_node;
2995	  else
2996	    {
2997	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2998	      TREE_READONLY (x) = TREE_READONLY (org_x);
2999	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3000	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3001	    }
3002	}
3003
3004      x = fold (x);
3005      break;
3006
3007    case SAVE_EXPR:
3008      /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3009	 folding, evaluates to an invariant.  In that case no need to wrap
3010	 this folded tree with a SAVE_EXPR.  */
3011      r = cp_fold (TREE_OPERAND (x, 0));
3012      if (tree_invariant_p (r))
3013	x = r;
3014      break;
3015
3016    case REQUIRES_EXPR:
3017      x = evaluate_requires_expr (x);
3018      break;
3019
3020    default:
3021      return org_x;
3022    }
3023
3024  if (EXPR_P (x) && TREE_CODE (x) == code)
3025    {
3026      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3027      copy_warning (x, org_x);
3028    }
3029
3030  if (!c.evaluation_restricted_p ())
3031    {
3032      fold_cache->put (org_x, x);
3033      /* Prevent that we try to fold an already folded result again.  */
3034      if (x != org_x)
3035	fold_cache->put (x, x);
3036    }
3037
3038  return x;
3039}
3040
3041/* Look up either "hot" or "cold" in attribute list LIST.  */
3042
3043tree
3044lookup_hotness_attribute (tree list)
3045{
3046  for (; list; list = TREE_CHAIN (list))
3047    {
3048      tree name = get_attribute_name (list);
3049      if (is_attribute_p ("hot", name)
3050	  || is_attribute_p ("cold", name)
3051	  || is_attribute_p ("likely", name)
3052	  || is_attribute_p ("unlikely", name))
3053	break;
3054    }
3055  return list;
3056}
3057
3058/* Remove both "hot" and "cold" attributes from LIST.  */
3059
3060static tree
3061remove_hotness_attribute (tree list)
3062{
3063  list = remove_attribute ("hot", list);
3064  list = remove_attribute ("cold", list);
3065  list = remove_attribute ("likely", list);
3066  list = remove_attribute ("unlikely", list);
3067  return list;
3068}
3069
3070/* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3071   PREDICT_EXPR.  */
3072
3073tree
3074process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3075{
3076  if (std_attrs == error_mark_node)
3077    return std_attrs;
3078  if (tree attr = lookup_hotness_attribute (std_attrs))
3079    {
3080      tree name = get_attribute_name (attr);
3081      bool hot = (is_attribute_p ("hot", name)
3082		  || is_attribute_p ("likely", name));
3083      tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3084				      hot ? TAKEN : NOT_TAKEN);
3085      SET_EXPR_LOCATION (pred, attrs_loc);
3086      add_stmt (pred);
3087      if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3088	warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3089		 get_attribute_name (other), name);
3090      std_attrs = remove_hotness_attribute (std_attrs);
3091    }
3092  return std_attrs;
3093}
3094
3095/* Helper of fold_builtin_source_location, return the
3096   std::source_location::__impl type after performing verification
3097   on it.  LOC is used for reporting any errors.  */
3098
3099static tree
3100get_source_location_impl_type (location_t loc)
3101{
3102  tree name = get_identifier ("source_location");
3103  tree decl = lookup_qualified_name (std_node, name);
3104  if (TREE_CODE (decl) != TYPE_DECL)
3105    {
3106      auto_diagnostic_group d;
3107      if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3108	qualified_name_lookup_error (std_node, name, decl, loc);
3109      else
3110	error_at (loc, "%qD is not a type", decl);
3111      return error_mark_node;
3112    }
3113  name = get_identifier ("__impl");
3114  tree type = TREE_TYPE (decl);
3115  decl = lookup_qualified_name (type, name);
3116  if (TREE_CODE (decl) != TYPE_DECL)
3117    {
3118      auto_diagnostic_group d;
3119      if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3120	qualified_name_lookup_error (type, name, decl, loc);
3121      else
3122	error_at (loc, "%qD is not a type", decl);
3123      return error_mark_node;
3124    }
3125  type = TREE_TYPE (decl);
3126  if (TREE_CODE (type) != RECORD_TYPE)
3127    {
3128      error_at (loc, "%qD is not a class type", decl);
3129      return error_mark_node;
3130    }
3131
3132  int cnt = 0;
3133  for (tree field = TYPE_FIELDS (type);
3134       (field = next_initializable_field (field)) != NULL_TREE;
3135       field = DECL_CHAIN (field))
3136    {
3137      if (DECL_NAME (field) != NULL_TREE)
3138	{
3139	  const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3140	  if (strcmp (n, "_M_file_name") == 0
3141	      || strcmp (n, "_M_function_name") == 0)
3142	    {
3143	      if (TREE_TYPE (field) != const_string_type_node)
3144		{
3145		  error_at (loc, "%qD does not have %<const char *%> type",
3146			    field);
3147		  return error_mark_node;
3148		}
3149	      cnt++;
3150	      continue;
3151	    }
3152	  else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3153	    {
3154	      if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3155		{
3156		  error_at (loc, "%qD does not have integral type", field);
3157		  return error_mark_node;
3158		}
3159	      cnt++;
3160	      continue;
3161	    }
3162	}
3163      cnt = 0;
3164      break;
3165    }
3166  if (cnt != 4)
3167    {
3168      error_at (loc, "%<std::source_location::__impl%> does not contain only "
3169		     "non-static data members %<_M_file_name%>, "
3170		     "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3171      return error_mark_node;
3172    }
3173  return build_qualified_type (type, TYPE_QUAL_CONST);
3174}
3175
3176/* Type for source_location_table hash_set.  */
3177struct GTY((for_user)) source_location_table_entry {
3178  location_t loc;
3179  unsigned uid;
3180  tree var;
3181};
3182
3183/* Traits class for function start hash maps below.  */
3184
3185struct source_location_table_entry_hash
3186  : ggc_remove <source_location_table_entry>
3187{
3188  typedef source_location_table_entry value_type;
3189  typedef source_location_table_entry compare_type;
3190
3191  static hashval_t
3192  hash (const source_location_table_entry &ref)
3193  {
3194    inchash::hash hstate (0);
3195    hstate.add_int (ref.loc);
3196    hstate.add_int (ref.uid);
3197    return hstate.end ();
3198  }
3199
3200  static bool
3201  equal (const source_location_table_entry &ref1,
3202	 const source_location_table_entry &ref2)
3203  {
3204    return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3205  }
3206
3207  static void
3208  mark_deleted (source_location_table_entry &ref)
3209  {
3210    ref.loc = UNKNOWN_LOCATION;
3211    ref.uid = -1U;
3212    ref.var = NULL_TREE;
3213  }
3214
3215  static const bool empty_zero_p = true;
3216
3217  static void
3218  mark_empty (source_location_table_entry &ref)
3219  {
3220    ref.loc = UNKNOWN_LOCATION;
3221    ref.uid = 0;
3222    ref.var = NULL_TREE;
3223  }
3224
3225  static bool
3226  is_deleted (const source_location_table_entry &ref)
3227  {
3228    return (ref.loc == UNKNOWN_LOCATION
3229	    && ref.uid == -1U
3230	    && ref.var == NULL_TREE);
3231  }
3232
3233  static bool
3234  is_empty (const source_location_table_entry &ref)
3235  {
3236    return (ref.loc == UNKNOWN_LOCATION
3237	    && ref.uid == 0
3238	    && ref.var == NULL_TREE);
3239  }
3240
3241  static void
3242  pch_nx (source_location_table_entry &p)
3243  {
3244    extern void gt_pch_nx (source_location_table_entry &);
3245    gt_pch_nx (p);
3246  }
3247
3248  static void
3249  pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3250  {
3251    extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3252			   void *);
3253    gt_pch_nx (&p, op, cookie);
3254  }
3255};
3256
3257static GTY(()) hash_table <source_location_table_entry_hash>
3258  *source_location_table;
3259static GTY(()) unsigned int source_location_id;
3260
3261/* Fold __builtin_source_location () call.  LOC is the location
3262   of the call.  */
3263
3264tree
3265fold_builtin_source_location (location_t loc)
3266{
3267  if (source_location_impl == NULL_TREE)
3268    {
3269      auto_diagnostic_group d;
3270      source_location_impl = get_source_location_impl_type (loc);
3271      if (source_location_impl == error_mark_node)
3272	inform (loc, "evaluating %qs", "__builtin_source_location");
3273    }
3274  if (source_location_impl == error_mark_node)
3275    return build_zero_cst (const_ptr_type_node);
3276  if (source_location_table == NULL)
3277    source_location_table
3278      = hash_table <source_location_table_entry_hash>::create_ggc (64);
3279  const line_map_ordinary *map;
3280  source_location_table_entry entry;
3281  entry.loc
3282    = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3283				&map);
3284  entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3285  entry.var = error_mark_node;
3286  source_location_table_entry *entryp
3287    = source_location_table->find_slot (entry, INSERT);
3288  tree var;
3289  if (entryp->var)
3290    var = entryp->var;
3291  else
3292    {
3293      char tmp_name[32];
3294      ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3295      var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3296			source_location_impl);
3297      TREE_STATIC (var) = 1;
3298      TREE_PUBLIC (var) = 0;
3299      DECL_ARTIFICIAL (var) = 1;
3300      DECL_IGNORED_P (var) = 1;
3301      DECL_EXTERNAL (var) = 0;
3302      DECL_DECLARED_CONSTEXPR_P (var) = 1;
3303      DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3304      layout_decl (var, 0);
3305
3306      vec<constructor_elt, va_gc> *v = NULL;
3307      vec_alloc (v, 4);
3308      for (tree field = TYPE_FIELDS (source_location_impl);
3309	   (field = next_initializable_field (field)) != NULL_TREE;
3310	   field = DECL_CHAIN (field))
3311	{
3312	  const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3313	  tree val = NULL_TREE;
3314	  if (strcmp (n, "_M_file_name") == 0)
3315	    {
3316	      if (const char *fname = LOCATION_FILE (loc))
3317		{
3318		  fname = remap_macro_filename (fname);
3319		  val = build_string_literal (strlen (fname) + 1, fname);
3320		}
3321	      else
3322		val = build_string_literal (1, "");
3323	    }
3324	  else if (strcmp (n, "_M_function_name") == 0)
3325	    {
3326	      const char *name = "";
3327
3328	      if (current_function_decl)
3329		name = cxx_printable_name (current_function_decl, 2);
3330
3331	      val = build_string_literal (strlen (name) + 1, name);
3332	    }
3333	  else if (strcmp (n, "_M_line") == 0)
3334	    val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3335	  else if (strcmp (n, "_M_column") == 0)
3336	    val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3337	  else
3338	    gcc_unreachable ();
3339	  CONSTRUCTOR_APPEND_ELT (v, field, val);
3340	}
3341
3342      tree ctor = build_constructor (source_location_impl, v);
3343      TREE_CONSTANT (ctor) = 1;
3344      TREE_STATIC (ctor) = 1;
3345      DECL_INITIAL (var) = ctor;
3346      varpool_node::finalize_decl (var);
3347      *entryp = entry;
3348      entryp->var = var;
3349    }
3350
3351  return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3352}
3353
3354#include "gt-cp-cp-gimplify.h"
3355