1/* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2
3   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "varray.h"
29#include "gimple.h"
30#include "tree-iterator.h"
31#include "tree-inline.h"
32#include "diagnostic.h"
33#include "langhooks.h"
34#include "langhooks-def.h"
35#include "tree-flow.h"
36#include "timevar.h"
37#include "except.h"
38#include "hashtab.h"
39#include "flags.h"
40#include "function.h"
41#include "expr.h"
42#include "toplev.h"
43#include "tree-pass.h"
44
45/* The differences between High GIMPLE and Low GIMPLE are the
46   following:
47
48   1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
49
50   2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51      flow and exception regions are built as an on-the-side region
52      hierarchy (See tree-eh.c:lower_eh_constructs).
53
54   3- Multiple identical return statements are grouped into a single
55      return and gotos to the unique return site.  */
56
57/* Match a return statement with a label.  During lowering, we identify
58   identical return statements and replace duplicates with a jump to
59   the corresponding label.  */
60struct return_statements_t
61{
62  tree label;
63  gimple stmt;
64};
65typedef struct return_statements_t return_statements_t;
66
67DEF_VEC_O(return_statements_t);
68DEF_VEC_ALLOC_O(return_statements_t,heap);
69
70struct lower_data
71{
72  /* Block the current statement belongs to.  */
73  tree block;
74
75  /* A vector of label and return statements to be moved to the end
76     of the function.  */
77  VEC(return_statements_t,heap) *return_statements;
78
79  /* True if the current statement cannot fall through.  */
80  bool cannot_fallthru;
81
82  /* True if the function calls __builtin_setjmp.  */
83  bool calls_builtin_setjmp;
84};
85
86static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
87static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
88static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
89static void lower_builtin_setjmp (gimple_stmt_iterator *);
90
91
92/* Lower the body of current_function_decl from High GIMPLE into Low
93   GIMPLE.  */
94
95static unsigned int
96lower_function_body (void)
97{
98  struct lower_data data;
99  gimple_seq body = gimple_body (current_function_decl);
100  gimple_seq lowered_body;
101  gimple_stmt_iterator i;
102  gimple bind;
103  tree t;
104  gimple x;
105
106  /* The gimplifier should've left a body of exactly one statement,
107     namely a GIMPLE_BIND.  */
108  gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
109	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
110
111  memset (&data, 0, sizeof (data));
112  data.block = DECL_INITIAL (current_function_decl);
113  BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
114  BLOCK_CHAIN (data.block) = NULL_TREE;
115  TREE_ASM_WRITTEN (data.block) = 1;
116  data.return_statements = VEC_alloc (return_statements_t, heap, 8);
117
118  bind = gimple_seq_first_stmt (body);
119  lowered_body = NULL;
120  gimple_seq_add_stmt (&lowered_body, bind);
121  i = gsi_start (lowered_body);
122  lower_gimple_bind (&i, &data);
123
124  /* Once the old body has been lowered, replace it with the new
125     lowered sequence.  */
126  gimple_set_body (current_function_decl, lowered_body);
127
128  i = gsi_last (lowered_body);
129
130  /* If the function falls off the end, we need a null return statement.
131     If we've already got one in the return_statements vector, we don't
132     need to do anything special.  Otherwise build one by hand.  */
133  if (gimple_seq_may_fallthru (lowered_body)
134      && (VEC_empty (return_statements_t, data.return_statements)
135	  || gimple_return_retval (VEC_last (return_statements_t,
136			           data.return_statements)->stmt) != NULL))
137    {
138      x = gimple_build_return (NULL);
139      gimple_set_location (x, cfun->function_end_locus);
140      gimple_set_block (x, DECL_INITIAL (current_function_decl));
141      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
142    }
143
144  /* If we lowered any return statements, emit the representative
145     at the end of the function.  */
146  while (!VEC_empty (return_statements_t, data.return_statements))
147    {
148      return_statements_t t;
149
150      /* Unfortunately, we can't use VEC_pop because it returns void for
151	 objects.  */
152      t = *VEC_last (return_statements_t, data.return_statements);
153      VEC_truncate (return_statements_t,
154	  	    data.return_statements,
155	  	    VEC_length (return_statements_t,
156		      		data.return_statements) - 1);
157
158      x = gimple_build_label (t.label);
159      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
160
161      /* Remove the line number from the representative return statement.
162	 It now fills in for many such returns.  Failure to remove this
163	 will result in incorrect results for coverage analysis.  */
164      gimple_set_location (t.stmt, UNKNOWN_LOCATION);
165      gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
166    }
167
168  /* If the function calls __builtin_setjmp, we need to emit the computed
169     goto that will serve as the unique dispatcher for all the receivers.  */
170  if (data.calls_builtin_setjmp)
171    {
172      tree disp_label, disp_var, arg;
173
174      /* Build 'DISP_LABEL:' and insert.  */
175      disp_label = create_artificial_label (cfun->function_end_locus);
176      /* This mark will create forward edges from every call site.  */
177      DECL_NONLOCAL (disp_label) = 1;
178      cfun->has_nonlocal_label = 1;
179      x = gimple_build_label (disp_label);
180      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
181
182      /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
183	 and insert.  */
184      disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
185      arg = build_addr (disp_label, current_function_decl);
186      t = implicit_built_in_decls[BUILT_IN_SETJMP_DISPATCHER];
187      x = gimple_build_call (t, 1, arg);
188      gimple_call_set_lhs (x, disp_var);
189
190      /* Build 'goto DISP_VAR;' and insert.  */
191      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
192      x = gimple_build_goto (disp_var);
193      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
194    }
195
196  gcc_assert (data.block == DECL_INITIAL (current_function_decl));
197  BLOCK_SUBBLOCKS (data.block)
198    = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
199
200  clear_block_marks (data.block);
201  VEC_free(return_statements_t, heap, data.return_statements);
202  return 0;
203}
204
205struct gimple_opt_pass pass_lower_cf =
206{
207 {
208  GIMPLE_PASS,
209  "lower",				/* name */
210  NULL,					/* gate */
211  lower_function_body,			/* execute */
212  NULL,					/* sub */
213  NULL,					/* next */
214  0,					/* static_pass_number */
215  TV_NONE,				/* tv_id */
216  PROP_gimple_any,			/* properties_required */
217  PROP_gimple_lcf,			/* properties_provided */
218  0,					/* properties_destroyed */
219  0,					/* todo_flags_start */
220  TODO_dump_func			/* todo_flags_finish */
221 }
222};
223
224
225/* Verify if the type of the argument matches that of the function
226   declaration.  If we cannot verify this or there is a mismatch,
227   return false.  */
228
229bool
230gimple_check_call_args (gimple stmt)
231{
232  tree fndecl, parms, p;
233  unsigned int i, nargs;
234
235  nargs = gimple_call_num_args (stmt);
236
237  /* Get argument types for verification.  */
238  fndecl = gimple_call_fndecl (stmt);
239  parms = NULL_TREE;
240  if (fndecl)
241    parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
242  else if (POINTER_TYPE_P (TREE_TYPE (gimple_call_fn (stmt))))
243    parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
244
245  /* Verify if the type of the argument matches that of the function
246     declaration.  If we cannot verify this or there is a mismatch,
247     return false.  */
248  if (fndecl && DECL_ARGUMENTS (fndecl))
249    {
250      for (i = 0, p = DECL_ARGUMENTS (fndecl);
251	   i < nargs;
252	   i++, p = TREE_CHAIN (p))
253	{
254	  /* We cannot distinguish a varargs function from the case
255	     of excess parameters, still deferring the inlining decision
256	     to the callee is possible.  */
257	  if (!p)
258	    break;
259	  if (p == error_mark_node
260	      || gimple_call_arg (stmt, i) == error_mark_node
261	      || !fold_convertible_p (DECL_ARG_TYPE (p),
262				      gimple_call_arg (stmt, i)))
263            return false;
264	}
265    }
266  else if (parms)
267    {
268      for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
269	{
270	  /* If this is a varargs function defer inlining decision
271	     to callee.  */
272	  if (!p)
273	    break;
274	  if (TREE_VALUE (p) == error_mark_node
275	      || gimple_call_arg (stmt, i) == error_mark_node
276	      || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
277	      || !fold_convertible_p (TREE_VALUE (p),
278				      gimple_call_arg (stmt, i)))
279            return false;
280	}
281    }
282  else
283    {
284      if (nargs != 0)
285        return false;
286    }
287  return true;
288}
289
290
291/* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
292   when they are changed -- if this has to be done, the lowering routine must
293   do it explicitly.  DATA is passed through the recursion.  */
294
295static void
296lower_sequence (gimple_seq seq, struct lower_data *data)
297{
298  gimple_stmt_iterator gsi;
299
300  for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
301    lower_stmt (&gsi, data);
302}
303
304
305/* Lower the OpenMP directive statement pointed by GSI.  DATA is
306   passed through the recursion.  */
307
308static void
309lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
310{
311  gimple stmt;
312
313  stmt = gsi_stmt (*gsi);
314
315  lower_sequence (gimple_omp_body (stmt), data);
316  gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
317  gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
318  gimple_omp_set_body (stmt, NULL);
319  gsi_remove (gsi, false);
320}
321
322
323/* Lower statement GSI.  DATA is passed through the recursion.  We try to
324   track the fallthruness of statements and get rid of unreachable return
325   statements in order to prevent the EH lowering pass from adding useless
326   edges that can cause bogus warnings to be issued later; this guess need
327   not be 100% accurate, simply be conservative and reset cannot_fallthru
328   to false if we don't know.  */
329
330static void
331lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
332{
333  gimple stmt = gsi_stmt (*gsi);
334
335  gimple_set_block (stmt, data->block);
336
337  switch (gimple_code (stmt))
338    {
339    case GIMPLE_BIND:
340      lower_gimple_bind (gsi, data);
341      /* Propagate fallthruness.  */
342      return;
343
344    case GIMPLE_COND:
345    case GIMPLE_GOTO:
346    case GIMPLE_SWITCH:
347      data->cannot_fallthru = true;
348      gsi_next (gsi);
349      return;
350
351    case GIMPLE_RETURN:
352      if (data->cannot_fallthru)
353	{
354	  gsi_remove (gsi, false);
355	  /* Propagate fallthruness.  */
356	}
357      else
358	{
359	  lower_gimple_return (gsi, data);
360	  data->cannot_fallthru = true;
361	}
362      return;
363
364    case GIMPLE_TRY:
365      {
366	bool try_cannot_fallthru;
367	lower_sequence (gimple_try_eval (stmt), data);
368	try_cannot_fallthru = data->cannot_fallthru;
369	data->cannot_fallthru = false;
370	lower_sequence (gimple_try_cleanup (stmt), data);
371	/* See gimple_stmt_may_fallthru for the rationale.  */
372	if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
373	  {
374	    data->cannot_fallthru |= try_cannot_fallthru;
375	    gsi_next (gsi);
376	    return;
377	  }
378      }
379      break;
380
381    case GIMPLE_CATCH:
382      data->cannot_fallthru = false;
383      lower_sequence (gimple_catch_handler (stmt), data);
384      break;
385
386    case GIMPLE_EH_FILTER:
387      data->cannot_fallthru = false;
388      lower_sequence (gimple_eh_filter_failure (stmt), data);
389      break;
390
391    case GIMPLE_NOP:
392    case GIMPLE_ASM:
393    case GIMPLE_ASSIGN:
394    case GIMPLE_PREDICT:
395    case GIMPLE_LABEL:
396    case GIMPLE_EH_MUST_NOT_THROW:
397    case GIMPLE_OMP_FOR:
398    case GIMPLE_OMP_SECTIONS:
399    case GIMPLE_OMP_SECTIONS_SWITCH:
400    case GIMPLE_OMP_SECTION:
401    case GIMPLE_OMP_SINGLE:
402    case GIMPLE_OMP_MASTER:
403    case GIMPLE_OMP_ORDERED:
404    case GIMPLE_OMP_CRITICAL:
405    case GIMPLE_OMP_RETURN:
406    case GIMPLE_OMP_ATOMIC_LOAD:
407    case GIMPLE_OMP_ATOMIC_STORE:
408    case GIMPLE_OMP_CONTINUE:
409      break;
410
411    case GIMPLE_CALL:
412      {
413	tree decl = gimple_call_fndecl (stmt);
414
415	if (decl
416	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
417	    && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
418	  {
419	    lower_builtin_setjmp (gsi);
420	    data->cannot_fallthru = false;
421	    data->calls_builtin_setjmp = true;
422	    return;
423	  }
424
425	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
426	  {
427	    data->cannot_fallthru = true;
428	    gsi_next (gsi);
429	    return;
430	  }
431      }
432      break;
433
434    case GIMPLE_OMP_PARALLEL:
435    case GIMPLE_OMP_TASK:
436      data->cannot_fallthru = false;
437      lower_omp_directive (gsi, data);
438      data->cannot_fallthru = false;
439      return;
440
441    default:
442      gcc_unreachable ();
443    }
444
445  data->cannot_fallthru = false;
446  gsi_next (gsi);
447}
448
449/* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
450
451static void
452lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
453{
454  tree old_block = data->block;
455  gimple stmt = gsi_stmt (*gsi);
456  tree new_block = gimple_bind_block (stmt);
457
458  if (new_block)
459    {
460      if (new_block == old_block)
461	{
462	  /* The outermost block of the original function may not be the
463	     outermost statement chain of the gimplified function.  So we
464	     may see the outermost block just inside the function.  */
465	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
466	  new_block = NULL;
467	}
468      else
469	{
470	  /* We do not expect to handle duplicate blocks.  */
471	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
472	  TREE_ASM_WRITTEN (new_block) = 1;
473
474	  /* Block tree may get clobbered by inlining.  Normally this would
475	     be fixed in rest_of_decl_compilation using block notes, but
476	     since we are not going to emit them, it is up to us.  */
477	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
478	  BLOCK_SUBBLOCKS (old_block) = new_block;
479	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
480	  BLOCK_SUPERCONTEXT (new_block) = old_block;
481
482	  data->block = new_block;
483	}
484    }
485
486  record_vars (gimple_bind_vars (stmt));
487  lower_sequence (gimple_bind_body (stmt), data);
488
489  if (new_block)
490    {
491      gcc_assert (data->block == new_block);
492
493      BLOCK_SUBBLOCKS (new_block)
494	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
495      data->block = old_block;
496    }
497
498  /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
499  gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
500  gsi_remove (gsi, false);
501}
502
503/* Try to determine whether a TRY_CATCH expression can fall through.
504   This is a subroutine of block_may_fallthru.  */
505
506static bool
507try_catch_may_fallthru (const_tree stmt)
508{
509  tree_stmt_iterator i;
510
511  /* If the TRY block can fall through, the whole TRY_CATCH can
512     fall through.  */
513  if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
514    return true;
515
516  i = tsi_start (TREE_OPERAND (stmt, 1));
517  switch (TREE_CODE (tsi_stmt (i)))
518    {
519    case CATCH_EXPR:
520      /* We expect to see a sequence of CATCH_EXPR trees, each with a
521	 catch expression and a body.  The whole TRY_CATCH may fall
522	 through iff any of the catch bodies falls through.  */
523      for (; !tsi_end_p (i); tsi_next (&i))
524	{
525	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
526	    return true;
527	}
528      return false;
529
530    case EH_FILTER_EXPR:
531      /* The exception filter expression only matters if there is an
532	 exception.  If the exception does not match EH_FILTER_TYPES,
533	 we will execute EH_FILTER_FAILURE, and we will fall through
534	 if that falls through.  If the exception does match
535	 EH_FILTER_TYPES, the stack unwinder will continue up the
536	 stack, so we will not fall through.  We don't know whether we
537	 will throw an exception which matches EH_FILTER_TYPES or not,
538	 so we just ignore EH_FILTER_TYPES and assume that we might
539	 throw an exception which doesn't match.  */
540      return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
541
542    default:
543      /* This case represents statements to be executed when an
544	 exception occurs.  Those statements are implicitly followed
545	 by a RESX statement to resume execution after the exception.
546	 So in this case the TRY_CATCH never falls through.  */
547      return false;
548    }
549}
550
551
552/* Same as above, but for a GIMPLE_TRY_CATCH.  */
553
554static bool
555gimple_try_catch_may_fallthru (gimple stmt)
556{
557  gimple_stmt_iterator i;
558
559  /* We don't handle GIMPLE_TRY_FINALLY.  */
560  gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
561
562  /* If the TRY block can fall through, the whole TRY_CATCH can
563     fall through.  */
564  if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
565    return true;
566
567  i = gsi_start (gimple_try_cleanup (stmt));
568  switch (gimple_code (gsi_stmt (i)))
569    {
570    case GIMPLE_CATCH:
571      /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
572	 catch expression and a body.  The whole try/catch may fall
573	 through iff any of the catch bodies falls through.  */
574      for (; !gsi_end_p (i); gsi_next (&i))
575	{
576	  if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
577	    return true;
578	}
579      return false;
580
581    case GIMPLE_EH_FILTER:
582      /* The exception filter expression only matters if there is an
583	 exception.  If the exception does not match EH_FILTER_TYPES,
584	 we will execute EH_FILTER_FAILURE, and we will fall through
585	 if that falls through.  If the exception does match
586	 EH_FILTER_TYPES, the stack unwinder will continue up the
587	 stack, so we will not fall through.  We don't know whether we
588	 will throw an exception which matches EH_FILTER_TYPES or not,
589	 so we just ignore EH_FILTER_TYPES and assume that we might
590	 throw an exception which doesn't match.  */
591      return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
592
593    default:
594      /* This case represents statements to be executed when an
595	 exception occurs.  Those statements are implicitly followed
596	 by a GIMPLE_RESX to resume execution after the exception.  So
597	 in this case the try/catch never falls through.  */
598      return false;
599    }
600}
601
602
603/* Try to determine if we can fall out of the bottom of BLOCK.  This guess
604   need not be 100% accurate; simply be conservative and return true if we
605   don't know.  This is used only to avoid stupidly generating extra code.
606   If we're wrong, we'll just delete the extra code later.  */
607
608bool
609block_may_fallthru (const_tree block)
610{
611  /* This CONST_CAST is okay because expr_last returns its argument
612     unmodified and we assign it to a const_tree.  */
613  const_tree stmt = expr_last (CONST_CAST_TREE(block));
614
615  switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
616    {
617    case GOTO_EXPR:
618    case RETURN_EXPR:
619      /* Easy cases.  If the last statement of the block implies
620	 control transfer, then we can't fall through.  */
621      return false;
622
623    case SWITCH_EXPR:
624      /* If SWITCH_LABELS is set, this is lowered, and represents a
625	 branch to a selected label and hence can not fall through.
626	 Otherwise SWITCH_BODY is set, and the switch can fall
627	 through.  */
628      return SWITCH_LABELS (stmt) == NULL_TREE;
629
630    case COND_EXPR:
631      if (block_may_fallthru (COND_EXPR_THEN (stmt)))
632	return true;
633      return block_may_fallthru (COND_EXPR_ELSE (stmt));
634
635    case BIND_EXPR:
636      return block_may_fallthru (BIND_EXPR_BODY (stmt));
637
638    case TRY_CATCH_EXPR:
639      return try_catch_may_fallthru (stmt);
640
641    case TRY_FINALLY_EXPR:
642      /* The finally clause is always executed after the try clause,
643	 so if it does not fall through, then the try-finally will not
644	 fall through.  Otherwise, if the try clause does not fall
645	 through, then when the finally clause falls through it will
646	 resume execution wherever the try clause was going.  So the
647	 whole try-finally will only fall through if both the try
648	 clause and the finally clause fall through.  */
649      return (block_may_fallthru (TREE_OPERAND (stmt, 0))
650	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
651
652    case MODIFY_EXPR:
653      if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
654	stmt = TREE_OPERAND (stmt, 1);
655      else
656	return true;
657      /* FALLTHRU */
658
659    case CALL_EXPR:
660      /* Functions that do not return do not fall through.  */
661      return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
662
663    case CLEANUP_POINT_EXPR:
664      return block_may_fallthru (TREE_OPERAND (stmt, 0));
665
666    default:
667      return true;
668    }
669}
670
671
672/* Try to determine if we can continue executing the statement
673   immediately following STMT.  This guess need not be 100% accurate;
674   simply be conservative and return true if we don't know.  This is
675   used only to avoid stupidly generating extra code. If we're wrong,
676   we'll just delete the extra code later.  */
677
678bool
679gimple_stmt_may_fallthru (gimple stmt)
680{
681  if (!stmt)
682    return true;
683
684  switch (gimple_code (stmt))
685    {
686    case GIMPLE_GOTO:
687    case GIMPLE_RETURN:
688    case GIMPLE_RESX:
689      /* Easy cases.  If the last statement of the seq implies
690	 control transfer, then we can't fall through.  */
691      return false;
692
693    case GIMPLE_SWITCH:
694      /* Switch has already been lowered and represents a branch
695	 to a selected label and hence can't fall through.  */
696      return false;
697
698    case GIMPLE_COND:
699      /* GIMPLE_COND's are already lowered into a two-way branch.  They
700	 can't fall through.  */
701      return false;
702
703    case GIMPLE_BIND:
704      return gimple_seq_may_fallthru (gimple_bind_body (stmt));
705
706    case GIMPLE_TRY:
707      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
708        return gimple_try_catch_may_fallthru (stmt);
709
710      /* It must be a GIMPLE_TRY_FINALLY.  */
711
712      /* The finally clause is always executed after the try clause,
713	 so if it does not fall through, then the try-finally will not
714	 fall through.  Otherwise, if the try clause does not fall
715	 through, then when the finally clause falls through it will
716	 resume execution wherever the try clause was going.  So the
717	 whole try-finally will only fall through if both the try
718	 clause and the finally clause fall through.  */
719      return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
720	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
721
722    case GIMPLE_CALL:
723      /* Functions that do not return do not fall through.  */
724      return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
725
726    default:
727      return true;
728    }
729}
730
731
732/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
733
734bool
735gimple_seq_may_fallthru (gimple_seq seq)
736{
737  return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
738}
739
740
741/* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
742
743static void
744lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
745{
746  gimple stmt = gsi_stmt (*gsi);
747  gimple t;
748  int i;
749  return_statements_t tmp_rs;
750
751  /* Match this up with an existing return statement that's been created.  */
752  for (i = VEC_length (return_statements_t, data->return_statements) - 1;
753       i >= 0; i--)
754    {
755      tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
756
757      if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
758	goto found;
759    }
760
761  /* Not found.  Create a new label and record the return statement.  */
762  tmp_rs.label = create_artificial_label (cfun->function_end_locus);
763  tmp_rs.stmt = stmt;
764  VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
765
766  /* Generate a goto statement and remove the return statement.  */
767 found:
768  t = gimple_build_goto (tmp_rs.label);
769  gimple_set_location (t, gimple_location (stmt));
770  gimple_set_block (t, gimple_block (stmt));
771  gsi_insert_before (gsi, t, GSI_SAME_STMT);
772  gsi_remove (gsi, false);
773}
774
775/* Lower a __builtin_setjmp GSI.
776
777   __builtin_setjmp is passed a pointer to an array of five words (not
778   all will be used on all machines).  It operates similarly to the C
779   library function of the same name, but is more efficient.
780
781   It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
782   __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
783   __builtin_setjmp_dispatcher shared among all the instances; that's
784   why it is only emitted at the end by lower_function_body.
785
786   After full lowering, the body of the function should look like:
787
788    {
789      void * setjmpvar.0;
790      int D.1844;
791      int D.2844;
792
793      [...]
794
795      __builtin_setjmp_setup (&buf, &<D1847>);
796      D.1844 = 0;
797      goto <D1846>;
798      <D1847>:;
799      __builtin_setjmp_receiver (&<D1847>);
800      D.1844 = 1;
801      <D1846>:;
802      if (D.1844 == 0) goto <D1848>; else goto <D1849>;
803
804      [...]
805
806      __builtin_setjmp_setup (&buf, &<D2847>);
807      D.2844 = 0;
808      goto <D2846>;
809      <D2847>:;
810      __builtin_setjmp_receiver (&<D2847>);
811      D.2844 = 1;
812      <D2846>:;
813      if (D.2844 == 0) goto <D2848>; else goto <D2849>;
814
815      [...]
816
817      <D3850>:;
818      return;
819      <D3853>: [non-local];
820      setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
821      goto setjmpvar.0;
822    }
823
824   The dispatcher block will be both the unique destination of all the
825   abnormal call edges and the unique source of all the abnormal edges
826   to the receivers, thus keeping the complexity explosion localized.  */
827
828static void
829lower_builtin_setjmp (gimple_stmt_iterator *gsi)
830{
831  gimple stmt = gsi_stmt (*gsi);
832  location_t loc = gimple_location (stmt);
833  tree cont_label = create_artificial_label (loc);
834  tree next_label = create_artificial_label (loc);
835  tree dest, t, arg;
836  gimple g;
837
838  /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
839     passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
840  FORCED_LABEL (next_label) = 1;
841
842  dest = gimple_call_lhs (stmt);
843
844  /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
845  arg = build_addr (next_label, current_function_decl);
846  t = implicit_built_in_decls[BUILT_IN_SETJMP_SETUP];
847  g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
848  gimple_set_location (g, loc);
849  gimple_set_block (g, gimple_block (stmt));
850  gsi_insert_before (gsi, g, GSI_SAME_STMT);
851
852  /* Build 'DEST = 0' and insert.  */
853  if (dest)
854    {
855      g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
856						       integer_zero_node));
857      gimple_set_location (g, loc);
858      gimple_set_block (g, gimple_block (stmt));
859      gsi_insert_before (gsi, g, GSI_SAME_STMT);
860    }
861
862  /* Build 'goto CONT_LABEL' and insert.  */
863  g = gimple_build_goto (cont_label);
864  gsi_insert_before (gsi, g, GSI_SAME_STMT);
865
866  /* Build 'NEXT_LABEL:' and insert.  */
867  g = gimple_build_label (next_label);
868  gsi_insert_before (gsi, g, GSI_SAME_STMT);
869
870  /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
871  arg = build_addr (next_label, current_function_decl);
872  t = implicit_built_in_decls[BUILT_IN_SETJMP_RECEIVER];
873  g = gimple_build_call (t, 1, arg);
874  gimple_set_location (g, loc);
875  gimple_set_block (g, gimple_block (stmt));
876  gsi_insert_before (gsi, g, GSI_SAME_STMT);
877
878  /* Build 'DEST = 1' and insert.  */
879  if (dest)
880    {
881      g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
882						       integer_one_node));
883      gimple_set_location (g, loc);
884      gimple_set_block (g, gimple_block (stmt));
885      gsi_insert_before (gsi, g, GSI_SAME_STMT);
886    }
887
888  /* Build 'CONT_LABEL:' and insert.  */
889  g = gimple_build_label (cont_label);
890  gsi_insert_before (gsi, g, GSI_SAME_STMT);
891
892  /* Remove the call to __builtin_setjmp.  */
893  gsi_remove (gsi, false);
894}
895
896
897/* Record the variables in VARS into function FN.  */
898
899void
900record_vars_into (tree vars, tree fn)
901{
902  if (fn != current_function_decl)
903    push_cfun (DECL_STRUCT_FUNCTION (fn));
904
905  for (; vars; vars = TREE_CHAIN (vars))
906    {
907      tree var = vars;
908
909      /* BIND_EXPRs contains also function/type/constant declarations
910         we don't need to care about.  */
911      if (TREE_CODE (var) != VAR_DECL)
912	continue;
913
914      /* Nothing to do in this case.  */
915      if (DECL_EXTERNAL (var))
916	continue;
917
918      /* Record the variable.  */
919      cfun->local_decls = tree_cons (NULL_TREE, var,
920					     cfun->local_decls);
921    }
922
923  if (fn != current_function_decl)
924    pop_cfun ();
925}
926
927
928/* Record the variables in VARS into current_function_decl.  */
929
930void
931record_vars (tree vars)
932{
933  record_vars_into (vars, current_function_decl);
934}
935