1/* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4   Free Software Foundation, Inc.
5   Contributed by Jason Merrill <jason@redhat.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3.  If not see
21<http://www.gnu.org/licenses/>.  */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "tree.h"
28#include "cp-tree.h"
29#include "c-common.h"
30#include "toplev.h"
31#include "tree-iterator.h"
32#include "gimple.h"
33#include "hashtab.h"
34#include "pointer-set.h"
35#include "flags.h"
36
37/* Local declarations.  */
38
39enum bc_t { bc_break = 0, bc_continue = 1 };
40
41/* Stack of labels which are targets for "break" or "continue",
42   linked through TREE_CHAIN.  */
43static tree bc_label[2];
44
45/* Begin a scope which can be exited by a break or continue statement.  BC
46   indicates which.
47
48   Just creates a label and pushes it into the current context.  */
49
50static tree
51begin_bc_block (enum bc_t bc)
52{
53  tree label = create_artificial_label (input_location);
54  TREE_CHAIN (label) = bc_label[bc];
55  bc_label[bc] = label;
56  return label;
57}
58
59/* Finish a scope which can be exited by a break or continue statement.
60   LABEL was returned from the most recent call to begin_bc_block.  BODY is
61   an expression for the contents of the scope.
62
63   If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64   body.  Otherwise, just forget the label.  */
65
66static gimple_seq
67finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
68{
69  gcc_assert (label == bc_label[bc]);
70
71  if (TREE_USED (label))
72    {
73      gimple_seq_add_stmt (&body, gimple_build_label (label));
74    }
75
76  bc_label[bc] = TREE_CHAIN (label);
77  TREE_CHAIN (label) = NULL_TREE;
78  return body;
79}
80
81/* Get the LABEL_EXPR to represent a break or continue statement
82   in the current block scope.  BC indicates which.  */
83
84static tree
85get_bc_label (enum bc_t bc)
86{
87  tree label = bc_label[bc];
88
89  if (label == NULL_TREE)
90    {
91      if (bc == bc_break)
92	error ("break statement not within loop or switch");
93      else
94	error ("continue statement not within loop or switch");
95
96      return NULL_TREE;
97    }
98
99  /* Mark the label used for finish_bc_block.  */
100  TREE_USED (label) = 1;
101  return label;
102}
103
104/* Genericize a TRY_BLOCK.  */
105
106static void
107genericize_try_block (tree *stmt_p)
108{
109  tree body = TRY_STMTS (*stmt_p);
110  tree cleanup = TRY_HANDLERS (*stmt_p);
111
112  *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
113}
114
115/* Genericize a HANDLER by converting to a CATCH_EXPR.  */
116
117static void
118genericize_catch_block (tree *stmt_p)
119{
120  tree type = HANDLER_TYPE (*stmt_p);
121  tree body = HANDLER_BODY (*stmt_p);
122
123  /* FIXME should the caught type go in TREE_TYPE?  */
124  *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
125}
126
127/* A terser interface for building a representation of an exception
128   specification.  */
129
130static tree
131build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
132{
133  tree t;
134
135  /* FIXME should the allowed types go in TREE_TYPE?  */
136  t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
137  append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
138
139  t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
140  append_to_statement_list (body, &TREE_OPERAND (t, 0));
141
142  return t;
143}
144
145/* Genericize an EH_SPEC_BLOCK by converting it to a
146   TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
147
148static void
149genericize_eh_spec_block (tree *stmt_p)
150{
151  tree body = EH_SPEC_STMTS (*stmt_p);
152  tree allowed = EH_SPEC_RAISES (*stmt_p);
153  tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
154
155  *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
156  TREE_NO_WARNING (*stmt_p) = true;
157  TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
158}
159
160/* Genericize an IF_STMT by turning it into a COND_EXPR.  */
161
162static void
163genericize_if_stmt (tree *stmt_p)
164{
165  tree stmt, cond, then_, else_;
166  location_t locus = EXPR_LOCATION (*stmt_p);
167
168  stmt = *stmt_p;
169  cond = IF_COND (stmt);
170  then_ = THEN_CLAUSE (stmt);
171  else_ = ELSE_CLAUSE (stmt);
172
173  if (!then_)
174    then_ = build_empty_stmt (locus);
175  if (!else_)
176    else_ = build_empty_stmt (locus);
177
178  if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
179    stmt = then_;
180  else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
181    stmt = else_;
182  else
183    stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
184  if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
185    SET_EXPR_LOCATION (stmt, locus);
186  *stmt_p = stmt;
187}
188
189/* Build a generic representation of one of the C loop forms.  COND is the
190   loop condition or NULL_TREE.  BODY is the (possibly compound) statement
191   controlled by the loop.  INCR is the increment expression of a for-loop,
192   or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
193   evaluated before the loop body as in while and for loops, or after the
194   loop body as in do-while loops.  */
195
196static gimple_seq
197gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
198{
199  gimple top, entry, stmt;
200  gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
201  tree cont_block, break_block;
202  location_t stmt_locus;
203
204  stmt_locus = input_location;
205  stmt_list = NULL;
206  body_seq = NULL;
207  incr_seq = NULL;
208  exit_seq = NULL;
209  entry = NULL;
210
211  break_block = begin_bc_block (bc_break);
212  cont_block = begin_bc_block (bc_continue);
213
214  /* If condition is zero don't generate a loop construct.  */
215  if (cond && integer_zerop (cond))
216    {
217      top = NULL;
218      if (cond_is_first)
219	{
220	  stmt = gimple_build_goto (get_bc_label (bc_break));
221	  gimple_set_location (stmt, stmt_locus);
222	  gimple_seq_add_stmt (&stmt_list, stmt);
223	}
224    }
225  else
226    {
227      /* If we use a LOOP_EXPR here, we have to feed the whole thing
228	 back through the main gimplifier to lower it.  Given that we
229	 have to gimplify the loop body NOW so that we can resolve
230	 break/continue stmts, seems easier to just expand to gotos.  */
231      top = gimple_build_label (create_artificial_label (stmt_locus));
232
233      /* If we have an exit condition, then we build an IF with gotos either
234	 out of the loop, or to the top of it.  If there's no exit condition,
235	 then we just build a jump back to the top.  */
236      if (cond && !integer_nonzerop (cond))
237	{
238	  if (cond != error_mark_node)
239	    {
240	      gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
241	      stmt = gimple_build_cond (NE_EXPR, cond,
242					build_int_cst (TREE_TYPE (cond), 0),
243					gimple_label_label (top),
244					get_bc_label (bc_break));
245	      gimple_seq_add_stmt (&exit_seq, stmt);
246	    }
247
248	  if (cond_is_first)
249	    {
250	      if (incr)
251		{
252		  entry = gimple_build_label
253		    (create_artificial_label (stmt_locus));
254		  stmt = gimple_build_goto (gimple_label_label (entry));
255		}
256	      else
257		stmt = gimple_build_goto (get_bc_label (bc_continue));
258	      gimple_set_location (stmt, stmt_locus);
259	      gimple_seq_add_stmt (&stmt_list, stmt);
260	    }
261	}
262      else
263	{
264	  stmt = gimple_build_goto (gimple_label_label (top));
265	  gimple_seq_add_stmt (&exit_seq, stmt);
266	}
267    }
268
269  gimplify_stmt (&body, &body_seq);
270  gimplify_stmt (&incr, &incr_seq);
271
272  body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
273
274  gimple_seq_add_stmt (&stmt_list, top);
275  gimple_seq_add_seq (&stmt_list, body_seq);
276  gimple_seq_add_seq (&stmt_list, incr_seq);
277  gimple_seq_add_stmt (&stmt_list, entry);
278  gimple_seq_add_seq (&stmt_list, exit_seq);
279
280  annotate_all_with_location (stmt_list, stmt_locus);
281
282  return finish_bc_block (bc_break, break_block, stmt_list);
283}
284
285/* Gimplify a FOR_STMT node.  Move the stuff in the for-init-stmt into the
286   prequeue and hand off to gimplify_cp_loop.  */
287
288static void
289gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
290{
291  tree stmt = *stmt_p;
292
293  if (FOR_INIT_STMT (stmt))
294    gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
295
296  gimple_seq_add_seq (pre_p,
297		      gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
298					FOR_EXPR (stmt), 1));
299  *stmt_p = NULL_TREE;
300}
301
302/* Gimplify a WHILE_STMT node.  */
303
304static void
305gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
306{
307  tree stmt = *stmt_p;
308  gimple_seq_add_seq (pre_p,
309		      gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
310					NULL_TREE, 1));
311  *stmt_p = NULL_TREE;
312}
313
314/* Gimplify a DO_STMT node.  */
315
316static void
317gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
318{
319  tree stmt = *stmt_p;
320  gimple_seq_add_seq (pre_p,
321		      gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
322					NULL_TREE, 0));
323  *stmt_p = NULL_TREE;
324}
325
326/* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR.  */
327
328static void
329gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
330{
331  tree stmt = *stmt_p;
332  tree break_block, body, t;
333  location_t stmt_locus = input_location;
334  gimple_seq seq = NULL;
335
336  break_block = begin_bc_block (bc_break);
337
338  body = SWITCH_STMT_BODY (stmt);
339  if (!body)
340    body = build_empty_stmt (stmt_locus);
341
342  t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
343	      SWITCH_STMT_COND (stmt), body, NULL_TREE);
344  SET_EXPR_LOCATION (t, stmt_locus);
345  gimplify_and_add (t, &seq);
346
347  seq = finish_bc_block (bc_break, break_block, seq);
348  gimple_seq_add_seq (pre_p, seq);
349  *stmt_p = NULL_TREE;
350}
351
352/* Hook into the middle of gimplifying an OMP_FOR node.  This is required
353   in order to properly gimplify CONTINUE statements.  Here we merely
354   manage the continue stack; the rest of the job is performed by the
355   regular gimplifier.  */
356
357static enum gimplify_status
358cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
359{
360  tree for_stmt = *expr_p;
361  tree cont_block;
362  gimple stmt;
363  gimple_seq seq = NULL;
364
365  /* Protect ourselves from recursion.  */
366  if (OMP_FOR_GIMPLIFYING_P (for_stmt))
367    return GS_UNHANDLED;
368  OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
369
370  /* Note that while technically the continue label is enabled too soon
371     here, we should have already diagnosed invalid continues nested within
372     statement expressions within the INIT, COND, or INCR expressions.  */
373  cont_block = begin_bc_block (bc_continue);
374
375  gimplify_and_add (for_stmt, &seq);
376  stmt = gimple_seq_last_stmt (seq);
377  if (gimple_code (stmt) == GIMPLE_OMP_FOR)
378    gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
379						gimple_omp_body (stmt)));
380  else
381    seq = finish_bc_block (bc_continue, cont_block, seq);
382  gimple_seq_add_seq (pre_p, seq);
383
384  OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
385
386  return GS_ALL_DONE;
387}
388
389/*  Gimplify an EXPR_STMT node.  */
390
391static void
392gimplify_expr_stmt (tree *stmt_p)
393{
394  tree stmt = EXPR_STMT_EXPR (*stmt_p);
395
396  if (stmt == error_mark_node)
397    stmt = NULL;
398
399  /* Gimplification of a statement expression will nullify the
400     statement if all its side effects are moved to *PRE_P and *POST_P.
401
402     In this case we will not want to emit the gimplified statement.
403     However, we may still want to emit a warning, so we do that before
404     gimplification.  */
405  if (stmt && warn_unused_value)
406    {
407      if (!TREE_SIDE_EFFECTS (stmt))
408	{
409	  if (!IS_EMPTY_STMT (stmt)
410	      && !VOID_TYPE_P (TREE_TYPE (stmt))
411	      && !TREE_NO_WARNING (stmt))
412	    warning (OPT_Wunused_value, "statement with no effect");
413	}
414      else
415	warn_if_unused_value (stmt, input_location);
416    }
417
418  if (stmt == NULL_TREE)
419    stmt = alloc_stmt_list ();
420
421  *stmt_p = stmt;
422}
423
424/* Gimplify initialization from an AGGR_INIT_EXPR.  */
425
426static void
427cp_gimplify_init_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
428{
429  tree from = TREE_OPERAND (*expr_p, 1);
430  tree to = TREE_OPERAND (*expr_p, 0);
431  tree t;
432
433  /* What about code that pulls out the temp and uses it elsewhere?  I
434     think that such code never uses the TARGET_EXPR as an initializer.  If
435     I'm wrong, we'll abort because the temp won't have any RTL.  In that
436     case, I guess we'll need to replace references somehow.  */
437  if (TREE_CODE (from) == TARGET_EXPR)
438    from = TARGET_EXPR_INITIAL (from);
439
440  /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
441     inside the TARGET_EXPR.  */
442  for (t = from; t; )
443    {
444      tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
445
446      /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
447	 replace the slot operand with our target.
448
449	 Should we add a target parm to gimplify_expr instead?  No, as in this
450	 case we want to replace the INIT_EXPR.  */
451      if (TREE_CODE (sub) == AGGR_INIT_EXPR
452	  || TREE_CODE (sub) == VEC_INIT_EXPR)
453	{
454	  gimplify_expr (&to, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
455	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
456	    AGGR_INIT_EXPR_SLOT (sub) = to;
457	  else
458	    VEC_INIT_EXPR_SLOT (sub) = to;
459	  *expr_p = from;
460
461	  /* The initialization is now a side-effect, so the container can
462	     become void.  */
463	  if (from != sub)
464	    TREE_TYPE (from) = void_type_node;
465	}
466
467      if (t == sub)
468	break;
469      else
470	t = TREE_OPERAND (t, 1);
471    }
472
473}
474
475/* Gimplify a MUST_NOT_THROW_EXPR.  */
476
477static enum gimplify_status
478gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
479{
480  tree stmt = *expr_p;
481  tree temp = voidify_wrapper_expr (stmt, NULL);
482  tree body = TREE_OPERAND (stmt, 0);
483  gimple_seq try_ = NULL;
484  gimple_seq catch_ = NULL;
485  gimple mnt;
486
487  gimplify_and_add (body, &try_);
488  mnt = gimple_build_eh_must_not_throw (terminate_node);
489  gimplify_seq_add_stmt (&catch_, mnt);
490  mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
491
492  gimplify_seq_add_stmt (pre_p, mnt);
493  if (temp)
494    {
495      *expr_p = temp;
496      return GS_OK;
497    }
498
499  *expr_p = NULL;
500  return GS_ALL_DONE;
501}
502
503/* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
504
505int
506cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
507{
508  int saved_stmts_are_full_exprs_p = 0;
509  enum tree_code code = TREE_CODE (*expr_p);
510  enum gimplify_status ret;
511
512  if (STATEMENT_CODE_P (code))
513    {
514      saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
515      current_stmt_tree ()->stmts_are_full_exprs_p
516	= STMT_IS_FULL_EXPR_P (*expr_p);
517    }
518
519  switch (code)
520    {
521    case PTRMEM_CST:
522      *expr_p = cplus_expand_constant (*expr_p);
523      ret = GS_OK;
524      break;
525
526    case AGGR_INIT_EXPR:
527      simplify_aggr_init_expr (expr_p);
528      ret = GS_OK;
529      break;
530
531    case VEC_INIT_EXPR:
532      {
533	location_t loc = input_location;
534	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
535	input_location = EXPR_LOCATION (*expr_p);
536	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
537				  VEC_INIT_EXPR_INIT (*expr_p), false, 1,
538				  tf_warning_or_error);
539	ret = GS_OK;
540	input_location = loc;
541      }
542      break;
543
544    case THROW_EXPR:
545      /* FIXME communicate throw type to back end, probably by moving
546	 THROW_EXPR into ../tree.def.  */
547      *expr_p = TREE_OPERAND (*expr_p, 0);
548      ret = GS_OK;
549      break;
550
551    case MUST_NOT_THROW_EXPR:
552      ret = gimplify_must_not_throw_expr (expr_p, pre_p);
553      break;
554
555      /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
556	 LHS of an assignment might also be involved in the RHS, as in bug
557	 25979.  */
558    case INIT_EXPR:
559      cp_gimplify_init_expr (expr_p, pre_p, post_p);
560      if (TREE_CODE (*expr_p) != INIT_EXPR)
561	return GS_OK;
562      /* Otherwise fall through.  */
563    case MODIFY_EXPR:
564      {
565	/* If the back end isn't clever enough to know that the lhs and rhs
566	   types are the same, add an explicit conversion.  */
567	tree op0 = TREE_OPERAND (*expr_p, 0);
568	tree op1 = TREE_OPERAND (*expr_p, 1);
569
570	if (!error_operand_p (op0)
571	    && !error_operand_p (op1)
572	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
573		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
574	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
575	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
576					      TREE_TYPE (op0), op1);
577
578	else if ((rhs_predicate_for (op0)) (op1)
579		 && !(TREE_CODE (op1) == CALL_EXPR
580		      && CALL_EXPR_RETURN_SLOT_OPT (op1))
581		 && is_really_empty_class (TREE_TYPE (op0)))
582	  {
583	    /* Remove any copies of empty classes.  We check that the RHS
584	       has a simple form so that TARGET_EXPRs and CONSTRUCTORs get
585	       reduced properly, and we leave the return slot optimization
586	       alone because it isn't a copy.
587
588	       Also drop volatile variables on the RHS to avoid infinite
589	       recursion from gimplify_expr trying to load the value.  */
590	    if (!TREE_SIDE_EFFECTS (op1)
591		|| (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
592	      *expr_p = op0;
593	    else if (TREE_CODE (op1) == INDIRECT_REF
594		     && TREE_THIS_VOLATILE (op1))
595	      {
596		/* Similarly for volatile INDIRECT_REFs on the RHS.  */
597		if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
598		  *expr_p = op0;
599		else
600		  *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
601				    TREE_OPERAND (op1, 0), op0);
602	      }
603	    else
604	      *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
605				op0, op1);
606	  }
607      }
608      ret = GS_OK;
609      break;
610
611    case EMPTY_CLASS_EXPR:
612      /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
613      *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
614      ret = GS_OK;
615      break;
616
617    case BASELINK:
618      *expr_p = BASELINK_FUNCTIONS (*expr_p);
619      ret = GS_OK;
620      break;
621
622    case TRY_BLOCK:
623      genericize_try_block (expr_p);
624      ret = GS_OK;
625      break;
626
627    case HANDLER:
628      genericize_catch_block (expr_p);
629      ret = GS_OK;
630      break;
631
632    case EH_SPEC_BLOCK:
633      genericize_eh_spec_block (expr_p);
634      ret = GS_OK;
635      break;
636
637    case USING_STMT:
638      gcc_unreachable ();
639
640    case FOR_STMT:
641      gimplify_for_stmt (expr_p, pre_p);
642      ret = GS_OK;
643      break;
644
645    case WHILE_STMT:
646      gimplify_while_stmt (expr_p, pre_p);
647      ret = GS_OK;
648      break;
649
650    case DO_STMT:
651      gimplify_do_stmt (expr_p, pre_p);
652      ret = GS_OK;
653      break;
654
655    case SWITCH_STMT:
656      gimplify_switch_stmt (expr_p, pre_p);
657      ret = GS_OK;
658      break;
659
660    case OMP_FOR:
661      ret = cp_gimplify_omp_for (expr_p, pre_p);
662      break;
663
664    case CONTINUE_STMT:
665      gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
666      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
667      *expr_p = NULL_TREE;
668      ret = GS_ALL_DONE;
669      break;
670
671    case BREAK_STMT:
672      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
673      *expr_p = NULL_TREE;
674      ret = GS_ALL_DONE;
675      break;
676
677    case EXPR_STMT:
678      gimplify_expr_stmt (expr_p);
679      ret = GS_OK;
680      break;
681
682    case UNARY_PLUS_EXPR:
683      {
684	tree arg = TREE_OPERAND (*expr_p, 0);
685	tree type = TREE_TYPE (*expr_p);
686	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
687					    : arg;
688	ret = GS_OK;
689      }
690      break;
691
692    default:
693      ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
694      break;
695    }
696
697  /* Restore saved state.  */
698  if (STATEMENT_CODE_P (code))
699    current_stmt_tree ()->stmts_are_full_exprs_p
700      = saved_stmts_are_full_exprs_p;
701
702  return ret;
703}
704
705static inline bool
706is_invisiref_parm (const_tree t)
707{
708  return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
709	  && DECL_BY_REFERENCE (t));
710}
711
712/* Return true if the uid in both int tree maps are equal.  */
713
714int
715cxx_int_tree_map_eq (const void *va, const void *vb)
716{
717  const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
718  const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
719  return (a->uid == b->uid);
720}
721
722/* Hash a UID in a cxx_int_tree_map.  */
723
724unsigned int
725cxx_int_tree_map_hash (const void *item)
726{
727  return ((const struct cxx_int_tree_map *)item)->uid;
728}
729
730struct cp_genericize_data
731{
732  struct pointer_set_t *p_set;
733  VEC (tree, heap) *bind_expr_stack;
734};
735
736/* Perform any pre-gimplification lowering of C++ front end trees to
737   GENERIC.  */
738
739static tree
740cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
741{
742  tree stmt = *stmt_p;
743  struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
744  struct pointer_set_t *p_set = wtd->p_set;
745
746  if (is_invisiref_parm (stmt)
747      /* Don't dereference parms in a thunk, pass the references through. */
748      && !(DECL_THUNK_P (current_function_decl)
749	   && TREE_CODE (stmt) == PARM_DECL))
750    {
751      *stmt_p = convert_from_reference (stmt);
752      *walk_subtrees = 0;
753      return NULL;
754    }
755
756  /* Map block scope extern declarations to visible declarations with the
757     same name and type in outer scopes if any.  */
758  if (cp_function_chain->extern_decl_map
759      && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
760      && DECL_EXTERNAL (stmt))
761    {
762      struct cxx_int_tree_map *h, in;
763      in.uid = DECL_UID (stmt);
764      h = (struct cxx_int_tree_map *)
765	  htab_find_with_hash (cp_function_chain->extern_decl_map,
766			       &in, in.uid);
767      if (h)
768	{
769	  *stmt_p = h->to;
770	  *walk_subtrees = 0;
771	  return NULL;
772	}
773    }
774
775  /* Other than invisiref parms, don't walk the same tree twice.  */
776  if (pointer_set_contains (p_set, stmt))
777    {
778      *walk_subtrees = 0;
779      return NULL_TREE;
780    }
781
782  if (TREE_CODE (stmt) == ADDR_EXPR
783      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
784    {
785      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
786      *walk_subtrees = 0;
787    }
788  else if (TREE_CODE (stmt) == RETURN_EXPR
789	   && TREE_OPERAND (stmt, 0)
790	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
791    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
792    *walk_subtrees = 0;
793  else if (TREE_CODE (stmt) == OMP_CLAUSE)
794    switch (OMP_CLAUSE_CODE (stmt))
795      {
796      case OMP_CLAUSE_LASTPRIVATE:
797	/* Don't dereference an invisiref in OpenMP clauses.  */
798	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
799	  {
800	    *walk_subtrees = 0;
801	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
802	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
803			    cp_genericize_r, data, NULL);
804	  }
805	break;
806      case OMP_CLAUSE_PRIVATE:
807      case OMP_CLAUSE_SHARED:
808      case OMP_CLAUSE_FIRSTPRIVATE:
809      case OMP_CLAUSE_COPYIN:
810      case OMP_CLAUSE_COPYPRIVATE:
811	/* Don't dereference an invisiref in OpenMP clauses.  */
812	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
813	  *walk_subtrees = 0;
814	break;
815      case OMP_CLAUSE_REDUCTION:
816	gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
817	break;
818      default:
819	break;
820      }
821  else if (IS_TYPE_OR_DECL_P (stmt))
822    *walk_subtrees = 0;
823
824  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
825     to lower this construct before scanning it, so we need to lower these
826     before doing anything else.  */
827  else if (TREE_CODE (stmt) == CLEANUP_STMT)
828    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
829					     : TRY_FINALLY_EXPR,
830		      void_type_node,
831		      CLEANUP_BODY (stmt),
832		      CLEANUP_EXPR (stmt));
833
834  else if (TREE_CODE (stmt) == IF_STMT)
835    {
836      genericize_if_stmt (stmt_p);
837      /* *stmt_p has changed, tail recurse to handle it again.  */
838      return cp_genericize_r (stmt_p, walk_subtrees, data);
839    }
840
841  /* COND_EXPR might have incompatible types in branches if one or both
842     arms are bitfields.  Fix it up now.  */
843  else if (TREE_CODE (stmt) == COND_EXPR)
844    {
845      tree type_left
846	= (TREE_OPERAND (stmt, 1)
847	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
848	   : NULL_TREE);
849      tree type_right
850	= (TREE_OPERAND (stmt, 2)
851	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
852	   : NULL_TREE);
853      if (type_left
854	  && !useless_type_conversion_p (TREE_TYPE (stmt),
855					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
856	{
857	  TREE_OPERAND (stmt, 1)
858	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
859	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
860						 type_left));
861	}
862      if (type_right
863	  && !useless_type_conversion_p (TREE_TYPE (stmt),
864					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
865	{
866	  TREE_OPERAND (stmt, 2)
867	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
868	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
869						 type_right));
870	}
871    }
872
873  else if (TREE_CODE (stmt) == BIND_EXPR)
874    {
875      VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
876      cp_walk_tree (&BIND_EXPR_BODY (stmt),
877		    cp_genericize_r, data, NULL);
878      VEC_pop (tree, wtd->bind_expr_stack);
879    }
880
881  else if (TREE_CODE (stmt) == USING_STMT)
882    {
883      tree block = NULL_TREE;
884
885      /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
886         BLOCK, and append an IMPORTED_DECL to its
887	 BLOCK_VARS chained list.  */
888      if (wtd->bind_expr_stack)
889	{
890	  int i;
891	  for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
892	    if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
893						     wtd->bind_expr_stack, i))))
894	      break;
895	}
896      if (block)
897	{
898	  tree using_directive;
899	  gcc_assert (TREE_OPERAND (stmt, 0));
900
901	  using_directive = make_node (IMPORTED_DECL);
902	  TREE_TYPE (using_directive) = void_type_node;
903
904	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
905	    = TREE_OPERAND (stmt, 0);
906	  TREE_CHAIN (using_directive) = BLOCK_VARS (block);
907	  BLOCK_VARS (block) = using_directive;
908	}
909      /* The USING_STMT won't appear in GENERIC.  */
910      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
911      *walk_subtrees = 0;
912    }
913
914  else if (TREE_CODE (stmt) == DECL_EXPR
915	   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
916    {
917      /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
918      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
919      *walk_subtrees = 0;
920    }
921
922  pointer_set_insert (p_set, *stmt_p);
923
924  return NULL;
925}
926
927void
928cp_genericize (tree fndecl)
929{
930  tree t;
931  struct cp_genericize_data wtd;
932
933  /* Fix up the types of parms passed by invisible reference.  */
934  for (t = DECL_ARGUMENTS (fndecl); t; t = TREE_CHAIN (t))
935    if (TREE_ADDRESSABLE (TREE_TYPE (t)))
936      {
937	/* If a function's arguments are copied to create a thunk,
938	   then DECL_BY_REFERENCE will be set -- but the type of the
939	   argument will be a pointer type, so we will never get
940	   here.  */
941	gcc_assert (!DECL_BY_REFERENCE (t));
942	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
943	TREE_TYPE (t) = DECL_ARG_TYPE (t);
944	DECL_BY_REFERENCE (t) = 1;
945	TREE_ADDRESSABLE (t) = 0;
946	relayout_decl (t);
947      }
948
949  /* Do the same for the return value.  */
950  if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
951    {
952      t = DECL_RESULT (fndecl);
953      TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
954      DECL_BY_REFERENCE (t) = 1;
955      TREE_ADDRESSABLE (t) = 0;
956      relayout_decl (t);
957    }
958
959  /* If we're a clone, the body is already GIMPLE.  */
960  if (DECL_CLONED_FUNCTION_P (fndecl))
961    return;
962
963  /* We do want to see every occurrence of the parms, so we can't just use
964     walk_tree's hash functionality.  */
965  wtd.p_set = pointer_set_create ();
966  wtd.bind_expr_stack = NULL;
967  cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
968  pointer_set_destroy (wtd.p_set);
969  VEC_free (tree, heap, wtd.bind_expr_stack);
970
971  /* Do everything else.  */
972  c_genericize (fndecl);
973
974  gcc_assert (bc_label[bc_break] == NULL);
975  gcc_assert (bc_label[bc_continue] == NULL);
976}
977
978/* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
979   NULL if there is in fact nothing to do.  ARG2 may be null if FN
980   actually only takes one argument.  */
981
982static tree
983cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
984{
985  tree defparm, parm, t;
986  int i = 0;
987  int nargs;
988  tree *argarray;
989
990  if (fn == NULL)
991    return NULL;
992
993  nargs = list_length (DECL_ARGUMENTS (fn));
994  argarray = (tree *) alloca (nargs * sizeof (tree));
995
996  defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
997  if (arg2)
998    defparm = TREE_CHAIN (defparm);
999
1000  if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1001    {
1002      tree inner_type = TREE_TYPE (arg1);
1003      tree start1, end1, p1;
1004      tree start2 = NULL, p2 = NULL;
1005      tree ret = NULL, lab;
1006
1007      start1 = arg1;
1008      start2 = arg2;
1009      do
1010	{
1011	  inner_type = TREE_TYPE (inner_type);
1012	  start1 = build4 (ARRAY_REF, inner_type, start1,
1013			   size_zero_node, NULL, NULL);
1014	  if (arg2)
1015	    start2 = build4 (ARRAY_REF, inner_type, start2,
1016			     size_zero_node, NULL, NULL);
1017	}
1018      while (TREE_CODE (inner_type) == ARRAY_TYPE);
1019      start1 = build_fold_addr_expr_loc (input_location, start1);
1020      if (arg2)
1021	start2 = build_fold_addr_expr_loc (input_location, start2);
1022
1023      end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1024      end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
1025
1026      p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1027      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1028      append_to_statement_list (t, &ret);
1029
1030      if (arg2)
1031	{
1032	  p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1033	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1034	  append_to_statement_list (t, &ret);
1035	}
1036
1037      lab = create_artificial_label (input_location);
1038      t = build1 (LABEL_EXPR, void_type_node, lab);
1039      append_to_statement_list (t, &ret);
1040
1041      argarray[i++] = p1;
1042      if (arg2)
1043	argarray[i++] = p2;
1044      /* Handle default arguments.  */
1045      for (parm = defparm; parm && parm != void_list_node;
1046	   parm = TREE_CHAIN (parm), i++)
1047	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1048					   TREE_PURPOSE (parm), fn, i);
1049      t = build_call_a (fn, i, argarray);
1050      t = fold_convert (void_type_node, t);
1051      t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1052      append_to_statement_list (t, &ret);
1053
1054      t = TYPE_SIZE_UNIT (inner_type);
1055      t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
1056      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1057      append_to_statement_list (t, &ret);
1058
1059      if (arg2)
1060	{
1061	  t = TYPE_SIZE_UNIT (inner_type);
1062	  t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
1063	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1064	  append_to_statement_list (t, &ret);
1065	}
1066
1067      t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1068      t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1069      append_to_statement_list (t, &ret);
1070
1071      return ret;
1072    }
1073  else
1074    {
1075      argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1076      if (arg2)
1077	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1078      /* Handle default arguments.  */
1079      for (parm = defparm; parm && parm != void_list_node;
1080	   parm = TREE_CHAIN (parm), i++)
1081	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1082					   TREE_PURPOSE (parm),
1083					   fn, i);
1084      t = build_call_a (fn, i, argarray);
1085      t = fold_convert (void_type_node, t);
1086      return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1087    }
1088}
1089
1090/* Return code to initialize DECL with its default constructor, or
1091   NULL if there's nothing to do.  */
1092
1093tree
1094cxx_omp_clause_default_ctor (tree clause, tree decl,
1095			     tree outer ATTRIBUTE_UNUSED)
1096{
1097  tree info = CP_OMP_CLAUSE_INFO (clause);
1098  tree ret = NULL;
1099
1100  if (info)
1101    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1102
1103  return ret;
1104}
1105
1106/* Return code to initialize DST with a copy constructor from SRC.  */
1107
1108tree
1109cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1110{
1111  tree info = CP_OMP_CLAUSE_INFO (clause);
1112  tree ret = NULL;
1113
1114  if (info)
1115    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1116  if (ret == NULL)
1117    ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1118
1119  return ret;
1120}
1121
1122/* Similarly, except use an assignment operator instead.  */
1123
1124tree
1125cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1126{
1127  tree info = CP_OMP_CLAUSE_INFO (clause);
1128  tree ret = NULL;
1129
1130  if (info)
1131    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1132  if (ret == NULL)
1133    ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1134
1135  return ret;
1136}
1137
1138/* Return code to destroy DECL.  */
1139
1140tree
1141cxx_omp_clause_dtor (tree clause, tree decl)
1142{
1143  tree info = CP_OMP_CLAUSE_INFO (clause);
1144  tree ret = NULL;
1145
1146  if (info)
1147    ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1148
1149  return ret;
1150}
1151
1152/* True if OpenMP should privatize what this DECL points to rather
1153   than the DECL itself.  */
1154
1155bool
1156cxx_omp_privatize_by_reference (const_tree decl)
1157{
1158  return is_invisiref_parm (decl);
1159}
1160
1161/* True if OpenMP sharing attribute of DECL is predetermined.  */
1162
1163enum omp_clause_default_kind
1164cxx_omp_predetermined_sharing (tree decl)
1165{
1166  tree type;
1167
1168  /* Static data members are predetermined as shared.  */
1169  if (TREE_STATIC (decl))
1170    {
1171      tree ctx = CP_DECL_CONTEXT (decl);
1172      if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1173	return OMP_CLAUSE_DEFAULT_SHARED;
1174    }
1175
1176  type = TREE_TYPE (decl);
1177  if (TREE_CODE (type) == REFERENCE_TYPE)
1178    {
1179      if (!is_invisiref_parm (decl))
1180	return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1181      type = TREE_TYPE (type);
1182
1183      if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1184	{
1185	  /* NVR doesn't preserve const qualification of the
1186	     variable's type.  */
1187	  tree outer = outer_curly_brace_block (current_function_decl);
1188	  tree var;
1189
1190	  if (outer)
1191	    for (var = BLOCK_VARS (outer); var; var = TREE_CHAIN (var))
1192	      if (DECL_NAME (decl) == DECL_NAME (var)
1193		  && (TYPE_MAIN_VARIANT (type)
1194		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1195		{
1196		  if (TYPE_READONLY (TREE_TYPE (var)))
1197		    type = TREE_TYPE (var);
1198		  break;
1199		}
1200	}
1201    }
1202
1203  if (type == error_mark_node)
1204    return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1205
1206  /* Variables with const-qualified type having no mutable member
1207     are predetermined shared.  */
1208  if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1209    return OMP_CLAUSE_DEFAULT_SHARED;
1210
1211  return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1212}
1213
1214/* Finalize an implicitly determined clause.  */
1215
1216void
1217cxx_omp_finish_clause (tree c)
1218{
1219  tree decl, inner_type;
1220  bool make_shared = false;
1221
1222  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1223    return;
1224
1225  decl = OMP_CLAUSE_DECL (c);
1226  decl = require_complete_type (decl);
1227  inner_type = TREE_TYPE (decl);
1228  if (decl == error_mark_node)
1229    make_shared = true;
1230  else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1231    {
1232      if (is_invisiref_parm (decl))
1233	inner_type = TREE_TYPE (inner_type);
1234      else
1235	{
1236	  error ("%qE implicitly determined as %<firstprivate%> has reference type",
1237		 decl);
1238	  make_shared = true;
1239	}
1240    }
1241
1242  /* We're interested in the base element, not arrays.  */
1243  while (TREE_CODE (inner_type) == ARRAY_TYPE)
1244    inner_type = TREE_TYPE (inner_type);
1245
1246  /* Check for special function availability by building a call to one.
1247     Save the results, because later we won't be in the right context
1248     for making these queries.  */
1249  if (!make_shared
1250      && CLASS_TYPE_P (inner_type)
1251      && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1252    make_shared = true;
1253
1254  if (make_shared)
1255    OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1256}
1257