1/* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2
3   Copyright (C) 2003-2020 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
27#include "tree-pass.h"
28#include "fold-const.h"
29#include "tree-nested.h"
30#include "calls.h"
31#include "gimple-iterator.h"
32#include "gimple-low.h"
33#include "predict.h"
34#include "gimple-predict.h"
35#include "gimple-fold.h"
36
37/* The differences between High GIMPLE and Low GIMPLE are the
38   following:
39
40   1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41
42   2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43      flow and exception regions are built as an on-the-side region
44      hierarchy (See tree-eh.c:lower_eh_constructs).
45
46   3- Multiple identical return statements are grouped into a single
47      return and gotos to the unique return site.  */
48
49/* Match a return statement with a label.  During lowering, we identify
50   identical return statements and replace duplicates with a jump to
51   the corresponding label.  */
52struct return_statements_t
53{
54  tree label;
55  greturn *stmt;
56};
57typedef struct return_statements_t return_statements_t;
58
59
60struct lower_data
61{
62  /* Block the current statement belongs to.  */
63  tree block;
64
65  /* A vector of label and return statements to be moved to the end
66     of the function.  */
67  vec<return_statements_t> return_statements;
68
69  /* True if the current statement cannot fall through.  */
70  bool cannot_fallthru;
71};
72
73static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
74static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
75static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
76static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
77static void lower_builtin_setjmp (gimple_stmt_iterator *);
78static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
79
80
81/* Lower the body of current_function_decl from High GIMPLE into Low
82   GIMPLE.  */
83
84static unsigned int
85lower_function_body (void)
86{
87  struct lower_data data;
88  gimple_seq body = gimple_body (current_function_decl);
89  gimple_seq lowered_body;
90  gimple_stmt_iterator i;
91  gimple *bind;
92  gimple *x;
93
94  /* The gimplifier should've left a body of exactly one statement,
95     namely a GIMPLE_BIND.  */
96  gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
97	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
98
99  memset (&data, 0, sizeof (data));
100  data.block = DECL_INITIAL (current_function_decl);
101  BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
102  BLOCK_CHAIN (data.block) = NULL_TREE;
103  TREE_ASM_WRITTEN (data.block) = 1;
104  data.return_statements.create (8);
105
106  bind = gimple_seq_first_stmt (body);
107  lowered_body = NULL;
108  gimple_seq_add_stmt (&lowered_body, bind);
109  i = gsi_start (lowered_body);
110  lower_gimple_bind (&i, &data);
111
112  i = gsi_last (lowered_body);
113
114  /* If we had begin stmt markers from e.g. PCH, but this compilation
115     doesn't want them, lower_stmt will have cleaned them up; we can
116     now clear the flag that indicates we had them.  */
117  if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
118    {
119      /* This counter needs not be exact, but before lowering it will
120	 most certainly be.  */
121      gcc_assert (cfun->debug_marker_count == 0);
122      cfun->debug_nonbind_markers = false;
123    }
124
125  /* If the function falls off the end, we need a null return statement.
126     If we've already got one in the return_statements vector, we don't
127     need to do anything special.  Otherwise build one by hand.  */
128  bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
129  if (may_fallthru
130      && (data.return_statements.is_empty ()
131	  || (gimple_return_retval (data.return_statements.last().stmt)
132	      != NULL)))
133    {
134      x = gimple_build_return (NULL);
135      gimple_set_location (x, cfun->function_end_locus);
136      gimple_set_block (x, DECL_INITIAL (current_function_decl));
137      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
138      may_fallthru = false;
139    }
140
141  /* If we lowered any return statements, emit the representative
142     at the end of the function.  */
143  while (!data.return_statements.is_empty ())
144    {
145      return_statements_t t = data.return_statements.pop ();
146      x = gimple_build_label (t.label);
147      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
148      gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
149      if (may_fallthru)
150	{
151	  /* Remove the line number from the representative return statement.
152	     It now fills in for the fallthru too.  Failure to remove this
153	     will result in incorrect results for coverage analysis.  */
154	  gimple_set_location (t.stmt, UNKNOWN_LOCATION);
155	  may_fallthru = false;
156	}
157    }
158
159  /* Once the old body has been lowered, replace it with the new
160     lowered sequence.  */
161  gimple_set_body (current_function_decl, lowered_body);
162
163  gcc_assert (data.block == DECL_INITIAL (current_function_decl));
164  BLOCK_SUBBLOCKS (data.block)
165    = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
166
167  clear_block_marks (data.block);
168  data.return_statements.release ();
169  return 0;
170}
171
172namespace {
173
174const pass_data pass_data_lower_cf =
175{
176  GIMPLE_PASS, /* type */
177  "lower", /* name */
178  OPTGROUP_NONE, /* optinfo_flags */
179  TV_NONE, /* tv_id */
180  PROP_gimple_any, /* properties_required */
181  PROP_gimple_lcf, /* properties_provided */
182  0, /* properties_destroyed */
183  0, /* todo_flags_start */
184  0, /* todo_flags_finish */
185};
186
187class pass_lower_cf : public gimple_opt_pass
188{
189public:
190  pass_lower_cf (gcc::context *ctxt)
191    : gimple_opt_pass (pass_data_lower_cf, ctxt)
192  {}
193
194  /* opt_pass methods: */
195  virtual unsigned int execute (function *) { return lower_function_body (); }
196
197}; // class pass_lower_cf
198
199} // anon namespace
200
201gimple_opt_pass *
202make_pass_lower_cf (gcc::context *ctxt)
203{
204  return new pass_lower_cf (ctxt);
205}
206
207/* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
208   when they are changed -- if this has to be done, the lowering routine must
209   do it explicitly.  DATA is passed through the recursion.  */
210
211static void
212lower_sequence (gimple_seq *seq, struct lower_data *data)
213{
214  gimple_stmt_iterator gsi;
215
216  for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
217    lower_stmt (&gsi, data);
218}
219
220
221/* Lower the OpenMP directive statement pointed by GSI.  DATA is
222   passed through the recursion.  */
223
224static void
225lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
226{
227  gimple *stmt;
228
229  stmt = gsi_stmt (*gsi);
230
231  lower_sequence (gimple_omp_body_ptr (stmt), data);
232  gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
233  gimple_omp_set_body (stmt, NULL);
234  gsi_next (gsi);
235}
236
237
238/* Lower statement GSI.  DATA is passed through the recursion.  We try to
239   track the fallthruness of statements and get rid of unreachable return
240   statements in order to prevent the EH lowering pass from adding useless
241   edges that can cause bogus warnings to be issued later; this guess need
242   not be 100% accurate, simply be conservative and reset cannot_fallthru
243   to false if we don't know.  */
244
245static void
246lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
247{
248  gimple *stmt = gsi_stmt (*gsi);
249
250  gimple_set_block (stmt, data->block);
251
252  switch (gimple_code (stmt))
253    {
254    case GIMPLE_BIND:
255      lower_gimple_bind (gsi, data);
256      /* Propagate fallthruness.  */
257      return;
258
259    case GIMPLE_COND:
260    case GIMPLE_GOTO:
261    case GIMPLE_SWITCH:
262      data->cannot_fallthru = true;
263      gsi_next (gsi);
264      return;
265
266    case GIMPLE_RETURN:
267      if (data->cannot_fallthru)
268	{
269	  gsi_remove (gsi, false);
270	  /* Propagate fallthruness.  */
271	}
272      else
273	{
274	  lower_gimple_return (gsi, data);
275	  data->cannot_fallthru = true;
276	}
277      return;
278
279    case GIMPLE_TRY:
280      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
281	lower_try_catch (gsi, data);
282      else
283	{
284	  /* It must be a GIMPLE_TRY_FINALLY.  */
285	  bool cannot_fallthru;
286	  lower_sequence (gimple_try_eval_ptr (stmt), data);
287	  cannot_fallthru = data->cannot_fallthru;
288
289	  /* The finally clause is always executed after the try clause,
290	     so if it does not fall through, then the try-finally will not
291	     fall through.  Otherwise, if the try clause does not fall
292	     through, then when the finally clause falls through it will
293	     resume execution wherever the try clause was going.  So the
294	     whole try-finally will only fall through if both the try
295	     clause and the finally clause fall through.  */
296	  data->cannot_fallthru = false;
297	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
298	  data->cannot_fallthru |= cannot_fallthru;
299	  gsi_next (gsi);
300	}
301      return;
302
303    case GIMPLE_EH_ELSE:
304      {
305	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
306	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
307	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
308      }
309      break;
310
311    case GIMPLE_DEBUG:
312      gcc_checking_assert (cfun->debug_nonbind_markers);
313      /* We can't possibly have debug bind stmts before lowering, we
314	 first emit them when entering SSA.  */
315      gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
316      /* Propagate fallthruness.  */
317      /* If the function (e.g. from PCH) had debug stmts, but they're
318	 disabled for this compilation, remove them.  */
319      if (!MAY_HAVE_DEBUG_MARKER_STMTS)
320	gsi_remove (gsi, true);
321      else
322	gsi_next (gsi);
323      return;
324
325    case GIMPLE_NOP:
326    case GIMPLE_ASM:
327    case GIMPLE_ASSIGN:
328    case GIMPLE_PREDICT:
329    case GIMPLE_LABEL:
330    case GIMPLE_EH_MUST_NOT_THROW:
331    case GIMPLE_OMP_FOR:
332    case GIMPLE_OMP_SECTIONS:
333    case GIMPLE_OMP_SECTIONS_SWITCH:
334    case GIMPLE_OMP_SECTION:
335    case GIMPLE_OMP_SINGLE:
336    case GIMPLE_OMP_MASTER:
337    case GIMPLE_OMP_TASKGROUP:
338    case GIMPLE_OMP_ORDERED:
339    case GIMPLE_OMP_SCAN:
340    case GIMPLE_OMP_CRITICAL:
341    case GIMPLE_OMP_RETURN:
342    case GIMPLE_OMP_ATOMIC_LOAD:
343    case GIMPLE_OMP_ATOMIC_STORE:
344    case GIMPLE_OMP_CONTINUE:
345      break;
346
347    case GIMPLE_CALL:
348      {
349	tree decl = gimple_call_fndecl (stmt);
350	unsigned i;
351
352	for (i = 0; i < gimple_call_num_args (stmt); i++)
353	  {
354	    tree arg = gimple_call_arg (stmt, i);
355	    if (EXPR_P (arg))
356	      TREE_SET_BLOCK (arg, data->block);
357	  }
358
359	if (decl
360	    && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
361	  {
362	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
363	      {
364		lower_builtin_setjmp (gsi);
365		data->cannot_fallthru = false;
366		return;
367	      }
368	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
369		     && flag_tree_bit_ccp
370		     && gimple_builtin_call_types_compatible_p (stmt, decl))
371	      {
372		lower_builtin_posix_memalign (gsi);
373		return;
374	      }
375	  }
376
377	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
378	  {
379	    data->cannot_fallthru = true;
380	    gsi_next (gsi);
381	    return;
382	  }
383
384	/* We delay folding of built calls from gimplification to
385	   here so the IL is in consistent state for the diagnostic
386	   machineries job.  */
387	if (gimple_call_builtin_p (stmt))
388	  fold_stmt (gsi);
389      }
390      break;
391
392    case GIMPLE_OMP_PARALLEL:
393    case GIMPLE_OMP_TASK:
394    case GIMPLE_OMP_TARGET:
395    case GIMPLE_OMP_TEAMS:
396    case GIMPLE_OMP_GRID_BODY:
397      data->cannot_fallthru = false;
398      lower_omp_directive (gsi, data);
399      data->cannot_fallthru = false;
400      return;
401
402    case GIMPLE_TRANSACTION:
403      lower_sequence (gimple_transaction_body_ptr (
404			as_a <gtransaction *> (stmt)),
405		      data);
406      break;
407
408    default:
409      gcc_unreachable ();
410    }
411
412  data->cannot_fallthru = false;
413  gsi_next (gsi);
414}
415
416/* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
417
418static void
419lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
420{
421  tree old_block = data->block;
422  gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
423  tree new_block = gimple_bind_block (stmt);
424
425  if (new_block)
426    {
427      if (new_block == old_block)
428	{
429	  /* The outermost block of the original function may not be the
430	     outermost statement chain of the gimplified function.  So we
431	     may see the outermost block just inside the function.  */
432	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
433	  new_block = NULL;
434	}
435      else
436	{
437	  /* We do not expect to handle duplicate blocks.  */
438	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
439	  TREE_ASM_WRITTEN (new_block) = 1;
440
441	  /* Block tree may get clobbered by inlining.  Normally this would
442	     be fixed in rest_of_decl_compilation using block notes, but
443	     since we are not going to emit them, it is up to us.  */
444	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
445	  BLOCK_SUBBLOCKS (old_block) = new_block;
446	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
447	  BLOCK_SUPERCONTEXT (new_block) = old_block;
448
449	  data->block = new_block;
450	}
451    }
452
453  record_vars (gimple_bind_vars (stmt));
454
455  /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
456     need gimple_bind_vars.  */
457  tree next;
458  /* BLOCK_VARS and gimple_bind_vars share a common sub-chain.  Find
459     it by marking all BLOCK_VARS.  */
460  if (gimple_bind_block (stmt))
461    for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
462      TREE_VISITED (t) = 1;
463  for (tree var = gimple_bind_vars (stmt);
464       var && ! TREE_VISITED (var); var = next)
465    {
466      next = DECL_CHAIN (var);
467      DECL_CHAIN (var) = NULL_TREE;
468    }
469  /* Unmark BLOCK_VARS.  */
470  if (gimple_bind_block (stmt))
471    for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
472      TREE_VISITED (t) = 0;
473
474  lower_sequence (gimple_bind_body_ptr (stmt), data);
475
476  if (new_block)
477    {
478      gcc_assert (data->block == new_block);
479
480      BLOCK_SUBBLOCKS (new_block)
481	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
482      data->block = old_block;
483    }
484
485  /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
486  gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
487  gsi_remove (gsi, false);
488}
489
490/* Same as above, but for a GIMPLE_TRY_CATCH.  */
491
492static void
493lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
494{
495  bool cannot_fallthru;
496  gimple *stmt = gsi_stmt (*gsi);
497  gimple_stmt_iterator i;
498
499  /* We don't handle GIMPLE_TRY_FINALLY.  */
500  gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
501
502  lower_sequence (gimple_try_eval_ptr (stmt), data);
503  cannot_fallthru = data->cannot_fallthru;
504
505  i = gsi_start (*gimple_try_cleanup_ptr (stmt));
506  switch (gimple_code (gsi_stmt (i)))
507    {
508    case GIMPLE_CATCH:
509      /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
510	 catch expression and a body.  The whole try/catch may fall
511	 through iff any of the catch bodies falls through.  */
512      for (; !gsi_end_p (i); gsi_next (&i))
513	{
514	  data->cannot_fallthru = false;
515	  lower_sequence (gimple_catch_handler_ptr (
516                            as_a <gcatch *> (gsi_stmt (i))),
517			  data);
518	  if (!data->cannot_fallthru)
519	    cannot_fallthru = false;
520	}
521      break;
522
523    case GIMPLE_EH_FILTER:
524      /* The exception filter expression only matters if there is an
525	 exception.  If the exception does not match EH_FILTER_TYPES,
526	 we will execute EH_FILTER_FAILURE, and we will fall through
527	 if that falls through.  If the exception does match
528	 EH_FILTER_TYPES, the stack unwinder will continue up the
529	 stack, so we will not fall through.  We don't know whether we
530	 will throw an exception which matches EH_FILTER_TYPES or not,
531	 so we just ignore EH_FILTER_TYPES and assume that we might
532	 throw an exception which doesn't match.  */
533      data->cannot_fallthru = false;
534      lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
535      if (!data->cannot_fallthru)
536	cannot_fallthru = false;
537      break;
538
539    case GIMPLE_DEBUG:
540      gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
541      break;
542
543    default:
544      /* This case represents statements to be executed when an
545	 exception occurs.  Those statements are implicitly followed
546	 by a GIMPLE_RESX to resume execution after the exception.  So
547	 in this case the try/catch never falls through.  */
548      data->cannot_fallthru = false;
549      lower_sequence (gimple_try_cleanup_ptr (stmt), data);
550      break;
551    }
552
553  data->cannot_fallthru = cannot_fallthru;
554  gsi_next (gsi);
555}
556
557
558/* Try to determine whether a TRY_CATCH expression can fall through.
559   This is a subroutine of gimple_stmt_may_fallthru.  */
560
561static bool
562gimple_try_catch_may_fallthru (gtry *stmt)
563{
564  gimple_stmt_iterator i;
565
566  /* We don't handle GIMPLE_TRY_FINALLY.  */
567  gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
568
569  /* If the TRY block can fall through, the whole TRY_CATCH can
570     fall through.  */
571  if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
572    return true;
573
574  i = gsi_start (*gimple_try_cleanup_ptr (stmt));
575  switch (gimple_code (gsi_stmt (i)))
576    {
577    case GIMPLE_CATCH:
578      /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
579	 catch expression and a body.  The whole try/catch may fall
580	 through iff any of the catch bodies falls through.  */
581      for (; !gsi_end_p (i); gsi_next (&i))
582	{
583	  if (gimple_seq_may_fallthru (gimple_catch_handler (
584					 as_a <gcatch *> (gsi_stmt (i)))))
585	    return true;
586	}
587      return false;
588
589    case GIMPLE_EH_FILTER:
590      /* The exception filter expression only matters if there is an
591	 exception.  If the exception does not match EH_FILTER_TYPES,
592	 we will execute EH_FILTER_FAILURE, and we will fall through
593	 if that falls through.  If the exception does match
594	 EH_FILTER_TYPES, the stack unwinder will continue up the
595	 stack, so we will not fall through.  We don't know whether we
596	 will throw an exception which matches EH_FILTER_TYPES or not,
597	 so we just ignore EH_FILTER_TYPES and assume that we might
598	 throw an exception which doesn't match.  */
599      return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
600
601    default:
602      /* This case represents statements to be executed when an
603	 exception occurs.  Those statements are implicitly followed
604	 by a GIMPLE_RESX to resume execution after the exception.  So
605	 in this case the try/catch never falls through.  */
606      return false;
607    }
608}
609
610
611/* Try to determine if we can continue executing the statement
612   immediately following STMT.  This guess need not be 100% accurate;
613   simply be conservative and return true if we don't know.  This is
614   used only to avoid stupidly generating extra code. If we're wrong,
615   we'll just delete the extra code later.  */
616
617bool
618gimple_stmt_may_fallthru (gimple *stmt)
619{
620  if (!stmt)
621    return true;
622
623  switch (gimple_code (stmt))
624    {
625    case GIMPLE_GOTO:
626    case GIMPLE_RETURN:
627    case GIMPLE_RESX:
628      /* Easy cases.  If the last statement of the seq implies
629	 control transfer, then we can't fall through.  */
630      return false;
631
632    case GIMPLE_SWITCH:
633      /* Switch has already been lowered and represents a branch
634	 to a selected label and hence can't fall through.  */
635      return false;
636
637    case GIMPLE_COND:
638      /* GIMPLE_COND's are already lowered into a two-way branch.  They
639	 can't fall through.  */
640      return false;
641
642    case GIMPLE_BIND:
643      return gimple_seq_may_fallthru (
644	       gimple_bind_body (as_a <gbind *> (stmt)));
645
646    case GIMPLE_TRY:
647      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
648        return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
649
650      /* It must be a GIMPLE_TRY_FINALLY.  */
651
652      /* The finally clause is always executed after the try clause,
653	 so if it does not fall through, then the try-finally will not
654	 fall through.  Otherwise, if the try clause does not fall
655	 through, then when the finally clause falls through it will
656	 resume execution wherever the try clause was going.  So the
657	 whole try-finally will only fall through if both the try
658	 clause and the finally clause fall through.  */
659      return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
660	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
661
662    case GIMPLE_EH_ELSE:
663      {
664	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
665	return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
666		|| gimple_seq_may_fallthru (gimple_eh_else_e_body (
667					      eh_else_stmt)));
668      }
669
670    case GIMPLE_CALL:
671      /* Functions that do not return do not fall through.  */
672      return !gimple_call_noreturn_p (stmt);
673
674    default:
675      return true;
676    }
677}
678
679
680/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
681
682bool
683gimple_seq_may_fallthru (gimple_seq seq)
684{
685  return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
686}
687
688
689/* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
690
691static void
692lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
693{
694  greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
695  gimple *t;
696  int i;
697  return_statements_t tmp_rs;
698
699  /* Match this up with an existing return statement that's been created.  */
700  for (i = data->return_statements.length () - 1;
701       i >= 0; i--)
702    {
703      tmp_rs = data->return_statements[i];
704
705      if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
706	{
707	  /* Remove the line number from the representative return statement.
708	     It now fills in for many such returns.  Failure to remove this
709	     will result in incorrect results for coverage analysis.  */
710	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
711
712	  goto found;
713	}
714    }
715
716  /* Not found.  Create a new label and record the return statement.  */
717  tmp_rs.label = create_artificial_label (cfun->function_end_locus);
718  tmp_rs.stmt = stmt;
719  data->return_statements.safe_push (tmp_rs);
720
721  /* Generate a goto statement and remove the return statement.  */
722 found:
723  /* When not optimizing, make sure user returns are preserved.  */
724  if (!optimize && gimple_has_location (stmt))
725    DECL_ARTIFICIAL (tmp_rs.label) = 0;
726  t = gimple_build_goto (tmp_rs.label);
727  /* location includes block.  */
728  gimple_set_location (t, gimple_location (stmt));
729  gsi_insert_before (gsi, t, GSI_SAME_STMT);
730  gsi_remove (gsi, false);
731}
732
733/* Lower a __builtin_setjmp GSI.
734
735   __builtin_setjmp is passed a pointer to an array of five words (not
736   all will be used on all machines).  It operates similarly to the C
737   library function of the same name, but is more efficient.
738
739   It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
740   __builtin_setjmp_receiver.
741
742   After full lowering, the body of the function should look like:
743
744    {
745      int D.1844;
746      int D.2844;
747
748      [...]
749
750      __builtin_setjmp_setup (&buf, &<D1847>);
751      D.1844 = 0;
752      goto <D1846>;
753      <D1847>:;
754      __builtin_setjmp_receiver (&<D1847>);
755      D.1844 = 1;
756      <D1846>:;
757      if (D.1844 == 0) goto <D1848>; else goto <D1849>;
758
759      [...]
760
761      __builtin_setjmp_setup (&buf, &<D2847>);
762      D.2844 = 0;
763      goto <D2846>;
764      <D2847>:;
765      __builtin_setjmp_receiver (&<D2847>);
766      D.2844 = 1;
767      <D2846>:;
768      if (D.2844 == 0) goto <D2848>; else goto <D2849>;
769
770      [...]
771
772      <D3850>:;
773      return;
774    }
775
776   During cfg creation an extra per-function (or per-OpenMP region)
777   block with ABNORMAL_DISPATCHER internal call will be added, unique
778   destination of all the abnormal call edges and the unique source of
779   all the abnormal edges to the receivers, thus keeping the complexity
780   explosion localized.  */
781
782static void
783lower_builtin_setjmp (gimple_stmt_iterator *gsi)
784{
785  gimple *stmt = gsi_stmt (*gsi);
786  location_t loc = gimple_location (stmt);
787  tree cont_label = create_artificial_label (loc);
788  tree next_label = create_artificial_label (loc);
789  tree dest, t, arg;
790  gimple *g;
791
792  /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
793     these builtins are modelled as non-local label jumps to the label
794     that is passed to these two builtins, so pretend we have a non-local
795     label during GIMPLE passes too.  See PR60003.  */
796  cfun->has_nonlocal_label = 1;
797
798  /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
799     passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
800  FORCED_LABEL (next_label) = 1;
801
802  tree orig_dest = dest = gimple_call_lhs (stmt);
803  if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
804    dest = create_tmp_reg (TREE_TYPE (orig_dest));
805
806  /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
807  arg = build_addr (next_label);
808  t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
809  g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
810  /* location includes block.  */
811  gimple_set_location (g, loc);
812  gsi_insert_before (gsi, g, GSI_SAME_STMT);
813
814  /* Build 'DEST = 0' and insert.  */
815  if (dest)
816    {
817      g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
818      gimple_set_location (g, loc);
819      gsi_insert_before (gsi, g, GSI_SAME_STMT);
820    }
821
822  /* Build 'goto CONT_LABEL' and insert.  */
823  g = gimple_build_goto (cont_label);
824  gsi_insert_before (gsi, g, GSI_SAME_STMT);
825
826  /* Build 'NEXT_LABEL:' and insert.  */
827  g = gimple_build_label (next_label);
828  gsi_insert_before (gsi, g, GSI_SAME_STMT);
829
830  /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
831  arg = build_addr (next_label);
832  t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
833  g = gimple_build_call (t, 1, arg);
834  gimple_set_location (g, loc);
835  gsi_insert_before (gsi, g, GSI_SAME_STMT);
836
837  /* Build 'DEST = 1' and insert.  */
838  if (dest)
839    {
840      g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
841						       integer_one_node));
842      gimple_set_location (g, loc);
843      gsi_insert_before (gsi, g, GSI_SAME_STMT);
844    }
845
846  /* Build 'CONT_LABEL:' and insert.  */
847  g = gimple_build_label (cont_label);
848  gsi_insert_before (gsi, g, GSI_SAME_STMT);
849
850  /* Build orig_dest = dest if necessary.  */
851  if (dest != orig_dest)
852    {
853      g = gimple_build_assign (orig_dest, dest);
854      gsi_insert_before (gsi, g, GSI_SAME_STMT);
855    }
856
857  /* Remove the call to __builtin_setjmp.  */
858  gsi_remove (gsi, false);
859}
860
861/* Lower calls to posix_memalign to
862     res = posix_memalign (ptr, align, size);
863     if (res == 0)
864       *ptr = __builtin_assume_aligned (*ptr, align);
865   or to
866     void *tem;
867     res = posix_memalign (&tem, align, size);
868     if (res == 0)
869       ptr = __builtin_assume_aligned (tem, align);
870   in case the first argument was &ptr.  That way we can get at the
871   alignment of the heap pointer in CCP.  */
872
873static void
874lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
875{
876  gimple *stmt, *call = gsi_stmt (*gsi);
877  tree pptr = gimple_call_arg (call, 0);
878  tree align = gimple_call_arg (call, 1);
879  tree res = gimple_call_lhs (call);
880  tree ptr = create_tmp_reg (ptr_type_node);
881  if (TREE_CODE (pptr) == ADDR_EXPR)
882    {
883      tree tem = create_tmp_var (ptr_type_node);
884      TREE_ADDRESSABLE (tem) = 1;
885      gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
886      stmt = gimple_build_assign (ptr, tem);
887    }
888  else
889    stmt = gimple_build_assign (ptr,
890				fold_build2 (MEM_REF, ptr_type_node, pptr,
891					     build_int_cst (ptr_type_node, 0)));
892  if (res == NULL_TREE)
893    {
894      res = create_tmp_reg (integer_type_node);
895      gimple_call_set_lhs (call, res);
896    }
897  tree align_label = create_artificial_label (UNKNOWN_LOCATION);
898  tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
899  gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
900				   align_label, noalign_label);
901  gsi_insert_after (gsi, cond, GSI_NEW_STMT);
902  gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
903  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
904  stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
905			    2, ptr, align);
906  gimple_call_set_lhs (stmt, ptr);
907  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
908  stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
909					   build_int_cst (ptr_type_node, 0)),
910			      ptr);
911  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
912  gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
913}
914
915
916/* Record the variables in VARS into function FN.  */
917
918void
919record_vars_into (tree vars, tree fn)
920{
921  for (; vars; vars = DECL_CHAIN (vars))
922    {
923      tree var = vars;
924
925      /* BIND_EXPRs contains also function/type/constant declarations
926         we don't need to care about.  */
927      if (!VAR_P (var))
928	continue;
929
930      /* Nothing to do in this case.  */
931      if (DECL_EXTERNAL (var))
932	continue;
933
934      /* Record the variable.  */
935      add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
936    }
937}
938
939
940/* Record the variables in VARS into current_function_decl.  */
941
942void
943record_vars (tree vars)
944{
945  record_vars_into (vars, current_function_decl);
946}
947