1/* Exception handling semantics and decomposition for trees.
2   Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING.  If not, write to
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "flags.h"
29#include "function.h"
30#include "except.h"
31#include "tree-flow.h"
32#include "tree-dump.h"
33#include "tree-inline.h"
34#include "tree-iterator.h"
35#include "tree-pass.h"
36#include "timevar.h"
37#include "langhooks.h"
38#include "ggc.h"
39#include "toplev.h"
40
41
42/* Nonzero if we are using EH to handle cleanups.  */
43static int using_eh_for_cleanups_p = 0;
44
45void
46using_eh_for_cleanups (void)
47{
48  using_eh_for_cleanups_p = 1;
49}
50
51/* Misc functions used in this file.  */
52
53/* Compare and hash for any structure which begins with a canonical
54   pointer.  Assumes all pointers are interchangeable, which is sort
55   of already assumed by gcc elsewhere IIRC.  */
56
57static int
58struct_ptr_eq (const void *a, const void *b)
59{
60  const void * const * x = (const void * const *) a;
61  const void * const * y = (const void * const *) b;
62  return *x == *y;
63}
64
65static hashval_t
66struct_ptr_hash (const void *a)
67{
68  const void * const * x = (const void * const *) a;
69  return (size_t)*x >> 4;
70}
71
72
73/* Remember and lookup EH region data for arbitrary statements.
74   Really this means any statement that could_throw_p.  We could
75   stuff this information into the stmt_ann data structure, but:
76
77   (1) We absolutely rely on this information being kept until
78   we get to rtl.  Once we're done with lowering here, if we lose
79   the information there's no way to recover it!
80
81   (2) There are many more statements that *cannot* throw as
82   compared to those that can.  We should be saving some amount
83   of space by only allocating memory for those that can throw.  */
84
85static void
86record_stmt_eh_region (struct eh_region *region, tree t)
87{
88  if (!region)
89    return;
90
91  add_stmt_to_eh_region (t, get_eh_region_number (region));
92}
93
94void
95add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
96{
97  struct throw_stmt_node *n;
98  void **slot;
99
100  gcc_assert (num >= 0);
101  gcc_assert (TREE_CODE (t) != RESX_EXPR);
102
103  n = GGC_NEW (struct throw_stmt_node);
104  n->stmt = t;
105  n->region_nr = num;
106
107  if (!get_eh_throw_stmt_table (ifun))
108    set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109						    struct_ptr_eq,
110						    ggc_free));
111
112  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113  gcc_assert (!*slot);
114  *slot = n;
115  /* ??? For the benefit of calls.c, converting all this to rtl,
116     we need to record the call expression, not just the outer
117     modify statement.  */
118  if (TREE_CODE (t) == MODIFY_EXPR
119      && (t = get_call_expr_in (t)))
120    add_stmt_to_eh_region_fn (ifun, t, num);
121}
122
123void
124add_stmt_to_eh_region (tree t, int num)
125{
126  add_stmt_to_eh_region_fn (cfun, t, num);
127}
128
129bool
130remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
131{
132  struct throw_stmt_node dummy;
133  void **slot;
134
135  if (!get_eh_throw_stmt_table (ifun))
136    return false;
137
138  dummy.stmt = t;
139  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
140                        NO_INSERT);
141  if (slot)
142    {
143      htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
144      /* ??? For the benefit of calls.c, converting all this to rtl,
145	 we need to record the call expression, not just the outer
146	 modify statement.  */
147      if (TREE_CODE (t) == MODIFY_EXPR
148	  && (t = get_call_expr_in (t)))
149	remove_stmt_from_eh_region_fn (ifun, t);
150      return true;
151    }
152  else
153    return false;
154}
155
156bool
157remove_stmt_from_eh_region (tree t)
158{
159  return remove_stmt_from_eh_region_fn (cfun, t);
160}
161
162int
163lookup_stmt_eh_region_fn (struct function *ifun, tree t)
164{
165  struct throw_stmt_node *p, n;
166
167  if (!get_eh_throw_stmt_table (ifun))
168    return -2;
169
170  n.stmt = t;
171  p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
172                                            &n);
173
174  return (p ? p->region_nr : -1);
175}
176
177int
178lookup_stmt_eh_region (tree t)
179{
180  /* We can get called from initialized data when -fnon-call-exceptions
181     is on; prevent crash.  */
182  if (!cfun)
183    return -1;
184  return lookup_stmt_eh_region_fn (cfun, t);
185}
186
187
188/* First pass of EH node decomposition.  Build up a tree of TRY_FINALLY_EXPR
189   nodes and LABEL_DECL nodes.  We will use this during the second phase to
190   determine if a goto leaves the body of a TRY_FINALLY_EXPR node.  */
191
192struct finally_tree_node
193{
194  tree child, parent;
195};
196
197/* Note that this table is *not* marked GTY.  It is short-lived.  */
198static htab_t finally_tree;
199
200static void
201record_in_finally_tree (tree child, tree parent)
202{
203  struct finally_tree_node *n;
204  void **slot;
205
206  n = XNEW (struct finally_tree_node);
207  n->child = child;
208  n->parent = parent;
209
210  slot = htab_find_slot (finally_tree, n, INSERT);
211  gcc_assert (!*slot);
212  *slot = n;
213}
214
215static void
216collect_finally_tree (tree t, tree region)
217{
218 tailrecurse:
219  switch (TREE_CODE (t))
220    {
221    case LABEL_EXPR:
222      record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
223      break;
224
225    case TRY_FINALLY_EXPR:
226      record_in_finally_tree (t, region);
227      collect_finally_tree (TREE_OPERAND (t, 0), t);
228      t = TREE_OPERAND (t, 1);
229      goto tailrecurse;
230
231    case TRY_CATCH_EXPR:
232      collect_finally_tree (TREE_OPERAND (t, 0), region);
233      t = TREE_OPERAND (t, 1);
234      goto tailrecurse;
235
236    case CATCH_EXPR:
237      t = CATCH_BODY (t);
238      goto tailrecurse;
239
240    case EH_FILTER_EXPR:
241      t = EH_FILTER_FAILURE (t);
242      goto tailrecurse;
243
244    case STATEMENT_LIST:
245      {
246	tree_stmt_iterator i;
247	for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
248	  collect_finally_tree (tsi_stmt (i), region);
249      }
250      break;
251
252    default:
253      /* A type, a decl, or some kind of statement that we're not
254	 interested in.  Don't walk them.  */
255      break;
256    }
257}
258
259/* Use the finally tree to determine if a jump from START to TARGET
260   would leave the try_finally node that START lives in.  */
261
262static bool
263outside_finally_tree (tree start, tree target)
264{
265  struct finally_tree_node n, *p;
266
267  do
268    {
269      n.child = start;
270      p = (struct finally_tree_node *) htab_find (finally_tree, &n);
271      if (!p)
272	return true;
273      start = p->parent;
274    }
275  while (start != target);
276
277  return false;
278}
279
280/* Second pass of EH node decomposition.  Actually transform the TRY_FINALLY
281   and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
282   The eh region creation is straight-forward, but frobbing all the gotos
283   and such into shape isn't.  */
284
285/* State of the world while lowering.  */
286
287struct leh_state
288{
289  /* What's "current" while constructing the eh region tree.  These
290     correspond to variables of the same name in cfun->eh, which we
291     don't have easy access to.  */
292  struct eh_region *cur_region;
293  struct eh_region *prev_try;
294
295  /* Processing of TRY_FINALLY requires a bit more state.  This is
296     split out into a separate structure so that we don't have to
297     copy so much when processing other nodes.  */
298  struct leh_tf_state *tf;
299};
300
301struct leh_tf_state
302{
303  /* Pointer to the TRY_FINALLY node under discussion.  The try_finally_expr
304     is the original TRY_FINALLY_EXPR.  We need to retain this so that
305     outside_finally_tree can reliably reference the tree used in the
306     collect_finally_tree data structures.  */
307  tree try_finally_expr;
308  tree *top_p;
309
310  /* The state outside this try_finally node.  */
311  struct leh_state *outer;
312
313  /* The exception region created for it.  */
314  struct eh_region *region;
315
316  /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
317     that are seen to escape this TRY_FINALLY_EXPR node.  */
318  struct goto_queue_node {
319    tree stmt;
320    tree repl_stmt;
321    tree cont_stmt;
322    int index;
323  } *goto_queue;
324  size_t goto_queue_size;
325  size_t goto_queue_active;
326
327  /* The set of unique labels seen as entries in the goto queue.  */
328  VEC(tree,heap) *dest_array;
329
330  /* A label to be added at the end of the completed transformed
331     sequence.  It will be set if may_fallthru was true *at one time*,
332     though subsequent transformations may have cleared that flag.  */
333  tree fallthru_label;
334
335  /* A label that has been registered with except.c to be the
336     landing pad for this try block.  */
337  tree eh_label;
338
339  /* True if it is possible to fall out the bottom of the try block.
340     Cleared if the fallthru is converted to a goto.  */
341  bool may_fallthru;
342
343  /* True if any entry in goto_queue is a RETURN_EXPR.  */
344  bool may_return;
345
346  /* True if the finally block can receive an exception edge.
347     Cleared if the exception case is handled by code duplication.  */
348  bool may_throw;
349};
350
351static void lower_eh_filter (struct leh_state *, tree *);
352static void lower_eh_constructs_1 (struct leh_state *, tree *);
353
354/* Comparison function for qsort/bsearch.  We're interested in
355   searching goto queue elements for source statements.  */
356
357static int
358goto_queue_cmp (const void *x, const void *y)
359{
360  tree a = ((const struct goto_queue_node *)x)->stmt;
361  tree b = ((const struct goto_queue_node *)y)->stmt;
362  return (a == b ? 0 : a < b ? -1 : 1);
363}
364
365/* Search for STMT in the goto queue.  Return the replacement,
366   or null if the statement isn't in the queue.  */
367
368static tree
369find_goto_replacement (struct leh_tf_state *tf, tree stmt)
370{
371  struct goto_queue_node tmp, *ret;
372  tmp.stmt = stmt;
373  ret = (struct goto_queue_node *)
374     bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
375		 sizeof (struct goto_queue_node), goto_queue_cmp);
376  return (ret ? ret->repl_stmt : NULL);
377}
378
379/* A subroutine of replace_goto_queue_1.  Handles the sub-clauses of a
380   lowered COND_EXPR.  If, by chance, the replacement is a simple goto,
381   then we can just splat it in, otherwise we add the new stmts immediately
382   after the COND_EXPR and redirect.  */
383
384static void
385replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
386				tree_stmt_iterator *tsi)
387{
388  tree new, one, label;
389
390  new = find_goto_replacement (tf, *tp);
391  if (!new)
392    return;
393
394  one = expr_only (new);
395  if (one && TREE_CODE (one) == GOTO_EXPR)
396    {
397      *tp = one;
398      return;
399    }
400
401  label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
402  *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
403
404  tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
405  tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
406}
407
408/* The real work of replace_goto_queue.  Returns with TSI updated to
409   point to the next statement.  */
410
411static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
412
413static void
414replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
415{
416  switch (TREE_CODE (t))
417    {
418    case GOTO_EXPR:
419    case RETURN_EXPR:
420      t = find_goto_replacement (tf, t);
421      if (t)
422	{
423	  tsi_link_before (tsi, t, TSI_SAME_STMT);
424	  tsi_delink (tsi);
425	  return;
426	}
427      break;
428
429    case COND_EXPR:
430      replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
431      replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
432      break;
433
434    case TRY_FINALLY_EXPR:
435    case TRY_CATCH_EXPR:
436      replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
437      replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
438      break;
439    case CATCH_EXPR:
440      replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
441      break;
442    case EH_FILTER_EXPR:
443      replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
444      break;
445
446    case STATEMENT_LIST:
447      gcc_unreachable ();
448
449    default:
450      /* These won't have gotos in them.  */
451      break;
452    }
453
454  tsi_next (tsi);
455}
456
457/* A subroutine of replace_goto_queue.  Handles STATEMENT_LISTs.  */
458
459static void
460replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
461{
462  tree_stmt_iterator i = tsi_start (t);
463  while (!tsi_end_p (i))
464    replace_goto_queue_1 (tsi_stmt (i), tf, &i);
465}
466
467/* Replace all goto queue members.  */
468
469static void
470replace_goto_queue (struct leh_tf_state *tf)
471{
472  if (tf->goto_queue_active == 0)
473    return;
474  replace_goto_queue_stmt_list (*tf->top_p, tf);
475}
476
477/* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
478   node, and if so record that fact in the goto queue associated with that
479   try_finally node.  */
480
481static void
482maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
483{
484  struct leh_tf_state *tf = state->tf;
485  struct goto_queue_node *q;
486  size_t active, size;
487  int index;
488
489  if (!tf)
490    return;
491
492  switch (TREE_CODE (stmt))
493    {
494    case GOTO_EXPR:
495      {
496	tree lab = GOTO_DESTINATION (stmt);
497
498	/* Computed and non-local gotos do not get processed.  Given
499	   their nature we can neither tell whether we've escaped the
500	   finally block nor redirect them if we knew.  */
501	if (TREE_CODE (lab) != LABEL_DECL)
502	  return;
503
504	/* No need to record gotos that don't leave the try block.  */
505	if (! outside_finally_tree (lab, tf->try_finally_expr))
506	  return;
507
508	if (! tf->dest_array)
509	  {
510	    tf->dest_array = VEC_alloc (tree, heap, 10);
511	    VEC_quick_push (tree, tf->dest_array, lab);
512	    index = 0;
513	  }
514	else
515	  {
516	    int n = VEC_length (tree, tf->dest_array);
517	    for (index = 0; index < n; ++index)
518	      if (VEC_index (tree, tf->dest_array, index) == lab)
519		break;
520	    if (index == n)
521	      VEC_safe_push (tree, heap, tf->dest_array, lab);
522	  }
523      }
524      break;
525
526    case RETURN_EXPR:
527      tf->may_return = true;
528      index = -1;
529      break;
530
531    default:
532      gcc_unreachable ();
533    }
534
535  active = tf->goto_queue_active;
536  size = tf->goto_queue_size;
537  if (active >= size)
538    {
539      size = (size ? size * 2 : 32);
540      tf->goto_queue_size = size;
541      tf->goto_queue
542         = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
543    }
544
545  q = &tf->goto_queue[active];
546  tf->goto_queue_active = active + 1;
547
548  memset (q, 0, sizeof (*q));
549  q->stmt = stmt;
550  q->index = index;
551}
552
553#ifdef ENABLE_CHECKING
554/* We do not process SWITCH_EXPRs for now.  As long as the original source
555   was in fact structured, and we've not yet done jump threading, then none
556   of the labels will leave outer TRY_FINALLY_EXPRs.  Verify this.  */
557
558static void
559verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
560{
561  struct leh_tf_state *tf = state->tf;
562  size_t i, n;
563  tree vec;
564
565  if (!tf)
566    return;
567
568  vec = SWITCH_LABELS (switch_expr);
569  n = TREE_VEC_LENGTH (vec);
570
571  for (i = 0; i < n; ++i)
572    {
573      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
574      gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
575    }
576}
577#else
578#define verify_norecord_switch_expr(state, switch_expr)
579#endif
580
581/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB.  Place in CONT_P
582   whatever is needed to finish the return.  If MOD is non-null, insert it
583   before the new branch.  RETURN_VALUE_P is a cache containing a temporary
584   variable to be used in manipulating the value returned from the function.  */
585
586static void
587do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
588		       tree *return_value_p)
589{
590  tree ret_expr = TREE_OPERAND (q->stmt, 0);
591  tree x;
592
593  if (ret_expr)
594    {
595      /* The nasty part about redirecting the return value is that the
596	 return value itself is to be computed before the FINALLY block
597	 is executed.  e.g.
598
599		int x;
600		int foo (void)
601		{
602		  x = 0;
603		  try {
604		    return x;
605		  } finally {
606		    x++;
607		  }
608		}
609
610	  should return 0, not 1.  Arrange for this to happen by copying
611	  computed the return value into a local temporary.  This also
612	  allows us to redirect multiple return statements through the
613	  same destination block; whether this is a net win or not really
614	  depends, I guess, but it does make generation of the switch in
615	  lower_try_finally_switch easier.  */
616
617      switch (TREE_CODE (ret_expr))
618	{
619	case RESULT_DECL:
620	  if (!*return_value_p)
621	    *return_value_p = ret_expr;
622	  else
623	    gcc_assert (*return_value_p == ret_expr);
624	  q->cont_stmt = q->stmt;
625	  break;
626
627	case MODIFY_EXPR:
628	  {
629	    tree result = TREE_OPERAND (ret_expr, 0);
630	    tree new, old = TREE_OPERAND (ret_expr, 1);
631
632	    if (!*return_value_p)
633	      {
634		if (aggregate_value_p (TREE_TYPE (result),
635				      TREE_TYPE (current_function_decl)))
636		  /* If this function returns in memory, copy the argument
637		    into the return slot now.  Otherwise, we might need to
638		    worry about magic return semantics, so we need to use a
639		    temporary to hold the value until we're actually ready
640		    to return.  */
641		  new = result;
642		else
643		  new = create_tmp_var (TREE_TYPE (old), "rettmp");
644		*return_value_p = new;
645	      }
646	    else
647	      new = *return_value_p;
648
649	    x = build2 (MODIFY_EXPR, TREE_TYPE (new), new, old);
650	    append_to_statement_list (x, &q->repl_stmt);
651
652	    if (new == result)
653	      x = result;
654	    else
655	      x = build2 (MODIFY_EXPR, TREE_TYPE (result), result, new);
656	    q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
657	  }
658
659	default:
660	  gcc_unreachable ();
661	}
662    }
663  else
664    {
665      /* If we don't return a value, all return statements are the same.  */
666      q->cont_stmt = q->stmt;
667    }
668
669  if (mod)
670    append_to_statement_list (mod, &q->repl_stmt);
671
672  x = build1 (GOTO_EXPR, void_type_node, finlab);
673  append_to_statement_list (x, &q->repl_stmt);
674}
675
676/* Similar, but easier, for GOTO_EXPR.  */
677
678static void
679do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
680{
681  tree x;
682
683  q->cont_stmt = q->stmt;
684  if (mod)
685    append_to_statement_list (mod, &q->repl_stmt);
686
687  x = build1 (GOTO_EXPR, void_type_node, finlab);
688  append_to_statement_list (x, &q->repl_stmt);
689}
690
691/* We want to transform
692	try { body; } catch { stuff; }
693   to
694	body; goto over; lab: stuff; over:
695
696   T is a TRY_FINALLY or TRY_CATCH node.  LAB is the label that
697   should be placed before the second operand, or NULL.  OVER is
698   an existing label that should be put at the exit, or NULL.  */
699
700static void
701frob_into_branch_around (tree *tp, tree lab, tree over)
702{
703  tree x, op1;
704
705  op1 = TREE_OPERAND (*tp, 1);
706  *tp = TREE_OPERAND (*tp, 0);
707
708  if (block_may_fallthru (*tp))
709    {
710      if (!over)
711	over = create_artificial_label ();
712      x = build1 (GOTO_EXPR, void_type_node, over);
713      append_to_statement_list (x, tp);
714    }
715
716  if (lab)
717    {
718      x = build1 (LABEL_EXPR, void_type_node, lab);
719      append_to_statement_list (x, tp);
720    }
721
722  append_to_statement_list (op1, tp);
723
724  if (over)
725    {
726      x = build1 (LABEL_EXPR, void_type_node, over);
727      append_to_statement_list (x, tp);
728    }
729}
730
731/* A subroutine of lower_try_finally.  Duplicate the tree rooted at T.
732   Make sure to record all new labels found.  */
733
734static tree
735lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
736{
737  tree region = NULL;
738
739  t = unsave_expr_now (t);
740
741  if (outer_state->tf)
742    region = outer_state->tf->try_finally_expr;
743  collect_finally_tree (t, region);
744
745  return t;
746}
747
748/* A subroutine of lower_try_finally.  Create a fallthru label for
749   the given try_finally state.  The only tricky bit here is that
750   we have to make sure to record the label in our outer context.  */
751
752static tree
753lower_try_finally_fallthru_label (struct leh_tf_state *tf)
754{
755  tree label = tf->fallthru_label;
756  if (!label)
757    {
758      label = create_artificial_label ();
759      tf->fallthru_label = label;
760      if (tf->outer->tf)
761        record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
762    }
763  return label;
764}
765
766/* A subroutine of lower_try_finally.  If lang_protect_cleanup_actions
767   returns non-null, then the language requires that the exception path out
768   of a try_finally be treated specially.  To wit: the code within the
769   finally block may not itself throw an exception.  We have two choices here.
770   First we can duplicate the finally block and wrap it in a must_not_throw
771   region.  Second, we can generate code like
772
773	try {
774	  finally_block;
775	} catch {
776	  if (fintmp == eh_edge)
777	    protect_cleanup_actions;
778	}
779
780   where "fintmp" is the temporary used in the switch statement generation
781   alternative considered below.  For the nonce, we always choose the first
782   option.
783
784   THIS_STATE may be null if this is a try-cleanup, not a try-finally.  */
785
786static void
787honor_protect_cleanup_actions (struct leh_state *outer_state,
788			       struct leh_state *this_state,
789			       struct leh_tf_state *tf)
790{
791  tree protect_cleanup_actions, finally, x;
792  tree_stmt_iterator i;
793  bool finally_may_fallthru;
794
795  /* First check for nothing to do.  */
796  if (lang_protect_cleanup_actions)
797    protect_cleanup_actions = lang_protect_cleanup_actions ();
798  else
799    protect_cleanup_actions = NULL;
800
801  finally = TREE_OPERAND (*tf->top_p, 1);
802
803  /* If the EH case of the finally block can fall through, this may be a
804     structure of the form
805	try {
806	  try {
807	    throw ...;
808	  } cleanup {
809	    try {
810	      throw ...;
811	    } catch (...) {
812	    }
813	  }
814	} catch (...) {
815	  yyy;
816	}
817    E.g. with an inline destructor with an embedded try block.  In this
818    case we must save the runtime EH data around the nested exception.
819
820    This complication means that any time the previous runtime data might
821    be used (via fallthru from the finally) we handle the eh case here,
822    whether or not protect_cleanup_actions is active.  */
823
824  finally_may_fallthru = block_may_fallthru (finally);
825  if (!finally_may_fallthru && !protect_cleanup_actions)
826    return;
827
828  /* Duplicate the FINALLY block.  Only need to do this for try-finally,
829     and not for cleanups.  */
830  if (this_state)
831    finally = lower_try_finally_dup_block (finally, outer_state);
832
833  /* Resume execution after the exception.  Adding this now lets
834     lower_eh_filter not add unnecessary gotos, as it is clear that
835     we never fallthru from this copy of the finally block.  */
836  if (finally_may_fallthru)
837    {
838      tree save_eptr, save_filt;
839
840      save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
841      save_filt = create_tmp_var (integer_type_node, "save_filt");
842
843      i = tsi_start (finally);
844      x = build0 (EXC_PTR_EXPR, ptr_type_node);
845      x = build2 (MODIFY_EXPR, void_type_node, save_eptr, x);
846      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
847
848      x = build0 (FILTER_EXPR, integer_type_node);
849      x = build2 (MODIFY_EXPR, void_type_node, save_filt, x);
850      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
851
852      i = tsi_last (finally);
853      x = build0 (EXC_PTR_EXPR, ptr_type_node);
854      x = build2 (MODIFY_EXPR, void_type_node, x, save_eptr);
855      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
856
857      x = build0 (FILTER_EXPR, integer_type_node);
858      x = build2 (MODIFY_EXPR, void_type_node, x, save_filt);
859      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
860
861      x = build_resx (get_eh_region_number (tf->region));
862      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
863    }
864
865  /* Wrap the block with protect_cleanup_actions as the action.  */
866  if (protect_cleanup_actions)
867    {
868      x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
869      append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
870      EH_FILTER_MUST_NOT_THROW (x) = 1;
871      finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
872      lower_eh_filter (outer_state, &finally);
873    }
874  else
875    lower_eh_constructs_1 (outer_state, &finally);
876
877  /* Hook this up to the end of the existing try block.  If we
878     previously fell through the end, we'll have to branch around.
879     This means adding a new goto, and adding it to the queue.  */
880
881  i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
882
883  if (tf->may_fallthru)
884    {
885      x = lower_try_finally_fallthru_label (tf);
886      x = build1 (GOTO_EXPR, void_type_node, x);
887      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
888
889      if (this_state)
890        maybe_record_in_goto_queue (this_state, x);
891
892      tf->may_fallthru = false;
893    }
894
895  x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
896  tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
897  tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
898
899  /* Having now been handled, EH isn't to be considered with
900     the rest of the outgoing edges.  */
901  tf->may_throw = false;
902}
903
904/* A subroutine of lower_try_finally.  We have determined that there is
905   no fallthru edge out of the finally block.  This means that there is
906   no outgoing edge corresponding to any incoming edge.  Restructure the
907   try_finally node for this special case.  */
908
909static void
910lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
911{
912  tree x, finally, lab, return_val;
913  struct goto_queue_node *q, *qe;
914
915  if (tf->may_throw)
916    lab = tf->eh_label;
917  else
918    lab = create_artificial_label ();
919
920  finally = TREE_OPERAND (*tf->top_p, 1);
921  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
922
923  x = build1 (LABEL_EXPR, void_type_node, lab);
924  append_to_statement_list (x, tf->top_p);
925
926  return_val = NULL;
927  q = tf->goto_queue;
928  qe = q + tf->goto_queue_active;
929  for (; q < qe; ++q)
930    if (q->index < 0)
931      do_return_redirection (q, lab, NULL, &return_val);
932    else
933      do_goto_redirection (q, lab, NULL);
934
935  replace_goto_queue (tf);
936
937  lower_eh_constructs_1 (state, &finally);
938  append_to_statement_list (finally, tf->top_p);
939}
940
941/* A subroutine of lower_try_finally.  We have determined that there is
942   exactly one destination of the finally block.  Restructure the
943   try_finally node for this special case.  */
944
945static void
946lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
947{
948  struct goto_queue_node *q, *qe;
949  tree x, finally, finally_label;
950
951  finally = TREE_OPERAND (*tf->top_p, 1);
952  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
953
954  lower_eh_constructs_1 (state, &finally);
955
956  if (tf->may_throw)
957    {
958      /* Only reachable via the exception edge.  Add the given label to
959         the head of the FINALLY block.  Append a RESX at the end.  */
960
961      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
962      append_to_statement_list (x, tf->top_p);
963
964      append_to_statement_list (finally, tf->top_p);
965
966      x = build_resx (get_eh_region_number (tf->region));
967
968      append_to_statement_list (x, tf->top_p);
969
970      return;
971    }
972
973  if (tf->may_fallthru)
974    {
975      /* Only reachable via the fallthru edge.  Do nothing but let
976	 the two blocks run together; we'll fall out the bottom.  */
977      append_to_statement_list (finally, tf->top_p);
978      return;
979    }
980
981  finally_label = create_artificial_label ();
982  x = build1 (LABEL_EXPR, void_type_node, finally_label);
983  append_to_statement_list (x, tf->top_p);
984
985  append_to_statement_list (finally, tf->top_p);
986
987  q = tf->goto_queue;
988  qe = q + tf->goto_queue_active;
989
990  if (tf->may_return)
991    {
992      /* Reachable by return expressions only.  Redirect them.  */
993      tree return_val = NULL;
994      for (; q < qe; ++q)
995	do_return_redirection (q, finally_label, NULL, &return_val);
996      replace_goto_queue (tf);
997    }
998  else
999    {
1000      /* Reachable by goto expressions only.  Redirect them.  */
1001      for (; q < qe; ++q)
1002	do_goto_redirection (q, finally_label, NULL);
1003      replace_goto_queue (tf);
1004
1005      if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1006	{
1007	  /* Reachable by goto to fallthru label only.  Redirect it
1008	     to the new label (already created, sadly), and do not
1009	     emit the final branch out, or the fallthru label.  */
1010	  tf->fallthru_label = NULL;
1011	  return;
1012	}
1013    }
1014
1015  append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1016  maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1017}
1018
1019/* A subroutine of lower_try_finally.  There are multiple edges incoming
1020   and outgoing from the finally block.  Implement this by duplicating the
1021   finally block for every destination.  */
1022
1023static void
1024lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1025{
1026  tree finally, new_stmt;
1027  tree x;
1028
1029  finally = TREE_OPERAND (*tf->top_p, 1);
1030  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1031
1032  new_stmt = NULL_TREE;
1033
1034  if (tf->may_fallthru)
1035    {
1036      x = lower_try_finally_dup_block (finally, state);
1037      lower_eh_constructs_1 (state, &x);
1038      append_to_statement_list (x, &new_stmt);
1039
1040      x = lower_try_finally_fallthru_label (tf);
1041      x = build1 (GOTO_EXPR, void_type_node, x);
1042      append_to_statement_list (x, &new_stmt);
1043    }
1044
1045  if (tf->may_throw)
1046    {
1047      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1048      append_to_statement_list (x, &new_stmt);
1049
1050      x = lower_try_finally_dup_block (finally, state);
1051      lower_eh_constructs_1 (state, &x);
1052      append_to_statement_list (x, &new_stmt);
1053
1054      x = build_resx (get_eh_region_number (tf->region));
1055      append_to_statement_list (x, &new_stmt);
1056    }
1057
1058  if (tf->goto_queue)
1059    {
1060      struct goto_queue_node *q, *qe;
1061      tree return_val = NULL;
1062      int return_index, index;
1063      struct labels_s
1064      {
1065	struct goto_queue_node *q;
1066	tree label;
1067      } *labels;
1068
1069      return_index = VEC_length (tree, tf->dest_array);
1070      labels = XCNEWVEC (struct labels_s, return_index + 1);
1071
1072      q = tf->goto_queue;
1073      qe = q + tf->goto_queue_active;
1074      for (; q < qe; q++)
1075	{
1076	  index = q->index < 0 ? return_index : q->index;
1077
1078	  if (!labels[index].q)
1079	    labels[index].q = q;
1080	}
1081
1082      for (index = 0; index < return_index + 1; index++)
1083	{
1084	  tree lab;
1085
1086	  q = labels[index].q;
1087	  if (! q)
1088	    continue;
1089
1090	  lab = labels[index].label = create_artificial_label ();
1091
1092	  if (index == return_index)
1093	    do_return_redirection (q, lab, NULL, &return_val);
1094	  else
1095	    do_goto_redirection (q, lab, NULL);
1096
1097	  x = build1 (LABEL_EXPR, void_type_node, lab);
1098	  append_to_statement_list (x, &new_stmt);
1099
1100	  x = lower_try_finally_dup_block (finally, state);
1101	  lower_eh_constructs_1 (state, &x);
1102	  append_to_statement_list (x, &new_stmt);
1103
1104	  append_to_statement_list (q->cont_stmt, &new_stmt);
1105	  maybe_record_in_goto_queue (state, q->cont_stmt);
1106	}
1107
1108      for (q = tf->goto_queue; q < qe; q++)
1109	{
1110	  tree lab;
1111
1112	  index = q->index < 0 ? return_index : q->index;
1113
1114	  if (labels[index].q == q)
1115	    continue;
1116
1117	  lab = labels[index].label;
1118
1119	  if (index == return_index)
1120	    do_return_redirection (q, lab, NULL, &return_val);
1121	  else
1122	    do_goto_redirection (q, lab, NULL);
1123	}
1124
1125      replace_goto_queue (tf);
1126      free (labels);
1127    }
1128
1129  /* Need to link new stmts after running replace_goto_queue due
1130     to not wanting to process the same goto stmts twice.  */
1131  append_to_statement_list (new_stmt, tf->top_p);
1132}
1133
1134/* A subroutine of lower_try_finally.  There are multiple edges incoming
1135   and outgoing from the finally block.  Implement this by instrumenting
1136   each incoming edge and creating a switch statement at the end of the
1137   finally block that branches to the appropriate destination.  */
1138
1139static void
1140lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1141{
1142  struct goto_queue_node *q, *qe;
1143  tree return_val = NULL;
1144  tree finally, finally_tmp, finally_label;
1145  int return_index, eh_index, fallthru_index;
1146  int nlabels, ndests, j, last_case_index;
1147  tree case_label_vec, switch_stmt, last_case, switch_body;
1148  tree x;
1149
1150  /* Mash the TRY block to the head of the chain.  */
1151  finally = TREE_OPERAND (*tf->top_p, 1);
1152  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1153
1154  /* Lower the finally block itself.  */
1155  lower_eh_constructs_1 (state, &finally);
1156
1157  /* Prepare for switch statement generation.  */
1158  nlabels = VEC_length (tree, tf->dest_array);
1159  return_index = nlabels;
1160  eh_index = return_index + tf->may_return;
1161  fallthru_index = eh_index + tf->may_throw;
1162  ndests = fallthru_index + tf->may_fallthru;
1163
1164  finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1165  finally_label = create_artificial_label ();
1166
1167  case_label_vec = make_tree_vec (ndests);
1168  switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1169		        NULL_TREE, case_label_vec);
1170  switch_body = NULL;
1171  last_case = NULL;
1172  last_case_index = 0;
1173
1174  /* Begin inserting code for getting to the finally block.  Things
1175     are done in this order to correspond to the sequence the code is
1176     layed out.  */
1177
1178  if (tf->may_fallthru)
1179    {
1180      x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1181		  build_int_cst (NULL_TREE, fallthru_index));
1182      append_to_statement_list (x, tf->top_p);
1183
1184      if (tf->may_throw)
1185	{
1186	  x = build1 (GOTO_EXPR, void_type_node, finally_label);
1187	  append_to_statement_list (x, tf->top_p);
1188	}
1189
1190
1191      last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1192			  build_int_cst (NULL_TREE, fallthru_index), NULL,
1193			  create_artificial_label ());
1194      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1195      last_case_index++;
1196
1197      x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1198      append_to_statement_list (x, &switch_body);
1199
1200      x = lower_try_finally_fallthru_label (tf);
1201      x = build1 (GOTO_EXPR, void_type_node, x);
1202      append_to_statement_list (x, &switch_body);
1203    }
1204
1205  if (tf->may_throw)
1206    {
1207      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1208      append_to_statement_list (x, tf->top_p);
1209
1210      x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1211		  build_int_cst (NULL_TREE, eh_index));
1212      append_to_statement_list (x, tf->top_p);
1213
1214      last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1215			  build_int_cst (NULL_TREE, eh_index), NULL,
1216			  create_artificial_label ());
1217      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1218      last_case_index++;
1219
1220      x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1221      append_to_statement_list (x, &switch_body);
1222      x = build_resx (get_eh_region_number (tf->region));
1223      append_to_statement_list (x, &switch_body);
1224    }
1225
1226  x = build1 (LABEL_EXPR, void_type_node, finally_label);
1227  append_to_statement_list (x, tf->top_p);
1228
1229  append_to_statement_list (finally, tf->top_p);
1230
1231  /* Redirect each incoming goto edge.  */
1232  q = tf->goto_queue;
1233  qe = q + tf->goto_queue_active;
1234  j = last_case_index + tf->may_return;
1235  for (; q < qe; ++q)
1236    {
1237      tree mod;
1238      int switch_id, case_index;
1239
1240      if (q->index < 0)
1241	{
1242	  mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1243		        build_int_cst (NULL_TREE, return_index));
1244	  do_return_redirection (q, finally_label, mod, &return_val);
1245	  switch_id = return_index;
1246	}
1247      else
1248	{
1249	  mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1250		        build_int_cst (NULL_TREE, q->index));
1251	  do_goto_redirection (q, finally_label, mod);
1252	  switch_id = q->index;
1253	}
1254
1255      case_index = j + q->index;
1256      if (!TREE_VEC_ELT (case_label_vec, case_index))
1257	TREE_VEC_ELT (case_label_vec, case_index)
1258	  = build3 (CASE_LABEL_EXPR, void_type_node,
1259		    build_int_cst (NULL_TREE, switch_id), NULL,
1260		    /* We store the cont_stmt in the
1261		       CASE_LABEL, so that we can recover it
1262		       in the loop below.  We don't create
1263		       the new label while walking the
1264		       goto_queue because pointers don't
1265		       offer a stable order.  */
1266		    q->cont_stmt);
1267    }
1268  for (j = last_case_index; j < last_case_index + nlabels; j++)
1269    {
1270      tree label;
1271      tree cont_stmt;
1272
1273      last_case = TREE_VEC_ELT (case_label_vec, j);
1274
1275      gcc_assert (last_case);
1276
1277      cont_stmt = CASE_LABEL (last_case);
1278
1279      label = create_artificial_label ();
1280      CASE_LABEL (last_case) = label;
1281
1282      x = build1 (LABEL_EXPR, void_type_node, label);
1283      append_to_statement_list (x, &switch_body);
1284      append_to_statement_list (cont_stmt, &switch_body);
1285      maybe_record_in_goto_queue (state, cont_stmt);
1286    }
1287  replace_goto_queue (tf);
1288
1289  /* Make sure that the last case is the default label, as one is required.
1290     Then sort the labels, which is also required in GIMPLE.  */
1291  CASE_LOW (last_case) = NULL;
1292  sort_case_labels (case_label_vec);
1293
1294  /* Need to link switch_stmt after running replace_goto_queue due
1295     to not wanting to process the same goto stmts twice.  */
1296  append_to_statement_list (switch_stmt, tf->top_p);
1297  append_to_statement_list (switch_body, tf->top_p);
1298}
1299
1300/* Decide whether or not we are going to duplicate the finally block.
1301   There are several considerations.
1302
1303   First, if this is Java, then the finally block contains code
1304   written by the user.  It has line numbers associated with it,
1305   so duplicating the block means it's difficult to set a breakpoint.
1306   Since controlling code generation via -g is verboten, we simply
1307   never duplicate code without optimization.
1308
1309   Second, we'd like to prevent egregious code growth.  One way to
1310   do this is to estimate the size of the finally block, multiply
1311   that by the number of copies we'd need to make, and compare against
1312   the estimate of the size of the switch machinery we'd have to add.  */
1313
1314static bool
1315decide_copy_try_finally (int ndests, tree finally)
1316{
1317  int f_estimate, sw_estimate;
1318
1319  if (!optimize)
1320    return false;
1321
1322  /* Finally estimate N times, plus N gotos.  */
1323  f_estimate = estimate_num_insns (finally);
1324  f_estimate = (f_estimate + 1) * ndests;
1325
1326  /* Switch statement (cost 10), N variable assignments, N gotos.  */
1327  sw_estimate = 10 + 2 * ndests;
1328
1329  /* Optimize for size clearly wants our best guess.  */
1330  if (optimize_size)
1331    return f_estimate < sw_estimate;
1332
1333  /* ??? These numbers are completely made up so far.  */
1334  if (optimize > 1)
1335    return f_estimate < 100 || f_estimate < sw_estimate * 2;
1336  else
1337    return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1338}
1339
1340/* A subroutine of lower_eh_constructs_1.  Lower a TRY_FINALLY_EXPR nodes
1341   to a sequence of labels and blocks, plus the exception region trees
1342   that record all the magic.  This is complicated by the need to
1343   arrange for the FINALLY block to be executed on all exits.  */
1344
1345static void
1346lower_try_finally (struct leh_state *state, tree *tp)
1347{
1348  struct leh_tf_state this_tf;
1349  struct leh_state this_state;
1350  int ndests;
1351
1352  /* Process the try block.  */
1353
1354  memset (&this_tf, 0, sizeof (this_tf));
1355  this_tf.try_finally_expr = *tp;
1356  this_tf.top_p = tp;
1357  this_tf.outer = state;
1358  if (using_eh_for_cleanups_p)
1359    this_tf.region
1360      = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1361  else
1362    this_tf.region = NULL;
1363
1364  this_state.cur_region = this_tf.region;
1365  this_state.prev_try = state->prev_try;
1366  this_state.tf = &this_tf;
1367
1368  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1369
1370  /* Determine if the try block is escaped through the bottom.  */
1371  this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1372
1373  /* Determine if any exceptions are possible within the try block.  */
1374  if (using_eh_for_cleanups_p)
1375    this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1376  if (this_tf.may_throw)
1377    {
1378      this_tf.eh_label = create_artificial_label ();
1379      set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1380      honor_protect_cleanup_actions (state, &this_state, &this_tf);
1381    }
1382
1383  /* Sort the goto queue for efficient searching later.  */
1384  if (this_tf.goto_queue_active > 1)
1385    qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1386	   sizeof (struct goto_queue_node), goto_queue_cmp);
1387
1388  /* Determine how many edges (still) reach the finally block.  Or rather,
1389     how many destinations are reached by the finally block.  Use this to
1390     determine how we process the finally block itself.  */
1391
1392  ndests = VEC_length (tree, this_tf.dest_array);
1393  ndests += this_tf.may_fallthru;
1394  ndests += this_tf.may_return;
1395  ndests += this_tf.may_throw;
1396
1397  /* If the FINALLY block is not reachable, dike it out.  */
1398  if (ndests == 0)
1399    *tp = TREE_OPERAND (*tp, 0);
1400
1401  /* If the finally block doesn't fall through, then any destination
1402     we might try to impose there isn't reached either.  There may be
1403     some minor amount of cleanup and redirection still needed.  */
1404  else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1405    lower_try_finally_nofallthru (state, &this_tf);
1406
1407  /* We can easily special-case redirection to a single destination.  */
1408  else if (ndests == 1)
1409    lower_try_finally_onedest (state, &this_tf);
1410
1411  else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1412    lower_try_finally_copy (state, &this_tf);
1413  else
1414    lower_try_finally_switch (state, &this_tf);
1415
1416  /* If someone requested we add a label at the end of the transformed
1417     block, do so.  */
1418  if (this_tf.fallthru_label)
1419    {
1420      tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1421      append_to_statement_list (x, tp);
1422    }
1423
1424  VEC_free (tree, heap, this_tf.dest_array);
1425  if (this_tf.goto_queue)
1426    free (this_tf.goto_queue);
1427}
1428
1429/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
1430   list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1431   exception region trees that record all the magic.  */
1432
1433static void
1434lower_catch (struct leh_state *state, tree *tp)
1435{
1436  struct eh_region *try_region;
1437  struct leh_state this_state;
1438  tree_stmt_iterator i;
1439  tree out_label;
1440
1441  try_region = gen_eh_region_try (state->cur_region);
1442  this_state.cur_region = try_region;
1443  this_state.prev_try = try_region;
1444  this_state.tf = state->tf;
1445
1446  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1447
1448  if (!get_eh_region_may_contain_throw (try_region))
1449    {
1450      *tp = TREE_OPERAND (*tp, 0);
1451      return;
1452    }
1453
1454  out_label = NULL;
1455  for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1456    {
1457      struct eh_region *catch_region;
1458      tree catch, x, eh_label;
1459
1460      catch = tsi_stmt (i);
1461      catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1462
1463      this_state.cur_region = catch_region;
1464      this_state.prev_try = state->prev_try;
1465      lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1466
1467      eh_label = create_artificial_label ();
1468      set_eh_region_tree_label (catch_region, eh_label);
1469
1470      x = build1 (LABEL_EXPR, void_type_node, eh_label);
1471      tsi_link_before (&i, x, TSI_SAME_STMT);
1472
1473      if (block_may_fallthru (CATCH_BODY (catch)))
1474	{
1475	  if (!out_label)
1476	    out_label = create_artificial_label ();
1477
1478	  x = build1 (GOTO_EXPR, void_type_node, out_label);
1479	  append_to_statement_list (x, &CATCH_BODY (catch));
1480	}
1481
1482      tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1483      tsi_delink (&i);
1484    }
1485
1486  frob_into_branch_around (tp, NULL, out_label);
1487}
1488
1489/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
1490   EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1491   region trees that record all the magic.  */
1492
1493static void
1494lower_eh_filter (struct leh_state *state, tree *tp)
1495{
1496  struct leh_state this_state;
1497  struct eh_region *this_region;
1498  tree inner = expr_first (TREE_OPERAND (*tp, 1));
1499  tree eh_label;
1500
1501  if (EH_FILTER_MUST_NOT_THROW (inner))
1502    this_region = gen_eh_region_must_not_throw (state->cur_region);
1503  else
1504    this_region = gen_eh_region_allowed (state->cur_region,
1505					 EH_FILTER_TYPES (inner));
1506  this_state = *state;
1507  this_state.cur_region = this_region;
1508  /* For must not throw regions any cleanup regions inside it
1509     can't reach outer catch regions.  */
1510  if (EH_FILTER_MUST_NOT_THROW (inner))
1511    this_state.prev_try = NULL;
1512
1513  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1514
1515  if (!get_eh_region_may_contain_throw (this_region))
1516    {
1517      *tp = TREE_OPERAND (*tp, 0);
1518      return;
1519    }
1520
1521  lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1522  TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1523
1524  eh_label = create_artificial_label ();
1525  set_eh_region_tree_label (this_region, eh_label);
1526
1527  frob_into_branch_around (tp, eh_label, NULL);
1528}
1529
1530/* Implement a cleanup expression.  This is similar to try-finally,
1531   except that we only execute the cleanup block for exception edges.  */
1532
1533static void
1534lower_cleanup (struct leh_state *state, tree *tp)
1535{
1536  struct leh_state this_state;
1537  struct eh_region *this_region;
1538  struct leh_tf_state fake_tf;
1539
1540  /* If not using eh, then exception-only cleanups are no-ops.  */
1541  if (!flag_exceptions)
1542    {
1543      *tp = TREE_OPERAND (*tp, 0);
1544      lower_eh_constructs_1 (state, tp);
1545      return;
1546    }
1547
1548  this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1549  this_state = *state;
1550  this_state.cur_region = this_region;
1551
1552  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1553
1554  if (!get_eh_region_may_contain_throw (this_region))
1555    {
1556      *tp = TREE_OPERAND (*tp, 0);
1557      return;
1558    }
1559
1560  /* Build enough of a try-finally state so that we can reuse
1561     honor_protect_cleanup_actions.  */
1562  memset (&fake_tf, 0, sizeof (fake_tf));
1563  fake_tf.top_p = tp;
1564  fake_tf.outer = state;
1565  fake_tf.region = this_region;
1566  fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1567  fake_tf.may_throw = true;
1568
1569  fake_tf.eh_label = create_artificial_label ();
1570  set_eh_region_tree_label (this_region, fake_tf.eh_label);
1571
1572  honor_protect_cleanup_actions (state, NULL, &fake_tf);
1573
1574  if (fake_tf.may_throw)
1575    {
1576      /* In this case honor_protect_cleanup_actions had nothing to do,
1577	 and we should process this normally.  */
1578      lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1579      frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1580    }
1581  else
1582    {
1583      /* In this case honor_protect_cleanup_actions did nearly all of
1584	 the work.  All we have left is to append the fallthru_label.  */
1585
1586      *tp = TREE_OPERAND (*tp, 0);
1587      if (fake_tf.fallthru_label)
1588	{
1589	  tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1590	  append_to_statement_list (x, tp);
1591	}
1592    }
1593}
1594
1595/* Main loop for lowering eh constructs.  */
1596
1597static void
1598lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1599{
1600  tree_stmt_iterator i;
1601  tree t = *tp;
1602
1603  switch (TREE_CODE (t))
1604    {
1605    case COND_EXPR:
1606      lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1607      lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1608      break;
1609
1610    case CALL_EXPR:
1611      /* Look for things that can throw exceptions, and record them.  */
1612      if (state->cur_region && tree_could_throw_p (t))
1613	{
1614	  record_stmt_eh_region (state->cur_region, t);
1615	  note_eh_region_may_contain_throw (state->cur_region);
1616	}
1617      break;
1618
1619    case MODIFY_EXPR:
1620      /* Look for things that can throw exceptions, and record them.  */
1621      if (state->cur_region && tree_could_throw_p (t))
1622	{
1623	  record_stmt_eh_region (state->cur_region, t);
1624	  note_eh_region_may_contain_throw (state->cur_region);
1625	}
1626      break;
1627
1628    case GOTO_EXPR:
1629    case RETURN_EXPR:
1630      maybe_record_in_goto_queue (state, t);
1631      break;
1632    case SWITCH_EXPR:
1633      verify_norecord_switch_expr (state, t);
1634      break;
1635
1636    case TRY_FINALLY_EXPR:
1637      lower_try_finally (state, tp);
1638      break;
1639
1640    case TRY_CATCH_EXPR:
1641      i = tsi_start (TREE_OPERAND (t, 1));
1642      switch (TREE_CODE (tsi_stmt (i)))
1643	{
1644	case CATCH_EXPR:
1645	  lower_catch (state, tp);
1646	  break;
1647	case EH_FILTER_EXPR:
1648	  lower_eh_filter (state, tp);
1649	  break;
1650	default:
1651	  lower_cleanup (state, tp);
1652	  break;
1653	}
1654      break;
1655
1656    case STATEMENT_LIST:
1657      for (i = tsi_start (t); !tsi_end_p (i); )
1658	{
1659	  lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1660	  t = tsi_stmt (i);
1661	  if (TREE_CODE (t) == STATEMENT_LIST)
1662	    {
1663	      tsi_link_before (&i, t, TSI_SAME_STMT);
1664	      tsi_delink (&i);
1665	    }
1666	  else
1667	    tsi_next (&i);
1668	}
1669      break;
1670
1671    default:
1672      /* A type, a decl, or some kind of statement that we're not
1673	 interested in.  Don't walk them.  */
1674      break;
1675    }
1676}
1677
1678static unsigned int
1679lower_eh_constructs (void)
1680{
1681  struct leh_state null_state;
1682  tree *tp = &DECL_SAVED_TREE (current_function_decl);
1683
1684  finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1685
1686  collect_finally_tree (*tp, NULL);
1687
1688  memset (&null_state, 0, sizeof (null_state));
1689  lower_eh_constructs_1 (&null_state, tp);
1690
1691  htab_delete (finally_tree);
1692
1693  collect_eh_region_array ();
1694  return 0;
1695}
1696
1697struct tree_opt_pass pass_lower_eh =
1698{
1699  "eh",					/* name */
1700  NULL,					/* gate */
1701  lower_eh_constructs,			/* execute */
1702  NULL,					/* sub */
1703  NULL,					/* next */
1704  0,					/* static_pass_number */
1705  TV_TREE_EH,				/* tv_id */
1706  PROP_gimple_lcf,			/* properties_required */
1707  PROP_gimple_leh,			/* properties_provided */
1708  0,					/* properties_destroyed */
1709  0,					/* todo_flags_start */
1710  TODO_dump_func,			/* todo_flags_finish */
1711  0					/* letter */
1712};
1713
1714
1715/* Construct EH edges for STMT.  */
1716
1717static void
1718make_eh_edge (struct eh_region *region, void *data)
1719{
1720  tree stmt, lab;
1721  basic_block src, dst;
1722
1723  stmt = (tree) data;
1724  lab = get_eh_region_tree_label (region);
1725
1726  src = bb_for_stmt (stmt);
1727  dst = label_to_block (lab);
1728
1729  make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1730}
1731
1732void
1733make_eh_edges (tree stmt)
1734{
1735  int region_nr;
1736  bool is_resx;
1737
1738  if (TREE_CODE (stmt) == RESX_EXPR)
1739    {
1740      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1741      is_resx = true;
1742    }
1743  else
1744    {
1745      region_nr = lookup_stmt_eh_region (stmt);
1746      if (region_nr < 0)
1747	return;
1748      is_resx = false;
1749    }
1750
1751  foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1752}
1753
1754static bool mark_eh_edge_found_error;
1755
1756/* Mark edge make_eh_edge would create for given region by setting it aux
1757   field, output error if something goes wrong.  */
1758static void
1759mark_eh_edge (struct eh_region *region, void *data)
1760{
1761  tree stmt, lab;
1762  basic_block src, dst;
1763  edge e;
1764
1765  stmt = (tree) data;
1766  lab = get_eh_region_tree_label (region);
1767
1768  src = bb_for_stmt (stmt);
1769  dst = label_to_block (lab);
1770
1771  e = find_edge (src, dst);
1772  if (!e)
1773    {
1774      error ("EH edge %i->%i is missing", src->index, dst->index);
1775      mark_eh_edge_found_error = true;
1776    }
1777  else if (!(e->flags & EDGE_EH))
1778    {
1779      error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1780      mark_eh_edge_found_error = true;
1781    }
1782  else if (e->aux)
1783    {
1784      /* ??? might not be mistake.  */
1785      error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1786      mark_eh_edge_found_error = true;
1787    }
1788  else
1789    e->aux = (void *)1;
1790}
1791
1792/* Verify that BB containing stmt as last stmt has precisely the edges
1793   make_eh_edges would create.  */
1794bool
1795verify_eh_edges (tree stmt)
1796{
1797  int region_nr;
1798  bool is_resx;
1799  basic_block bb = bb_for_stmt (stmt);
1800  edge_iterator ei;
1801  edge e;
1802
1803  FOR_EACH_EDGE (e, ei, bb->succs)
1804    gcc_assert (!e->aux);
1805  mark_eh_edge_found_error = false;
1806  if (TREE_CODE (stmt) == RESX_EXPR)
1807    {
1808      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1809      is_resx = true;
1810    }
1811  else
1812    {
1813      region_nr = lookup_stmt_eh_region (stmt);
1814      if (region_nr < 0)
1815	{
1816	  FOR_EACH_EDGE (e, ei, bb->succs)
1817	    if (e->flags & EDGE_EH)
1818	      {
1819		error ("BB %i can not throw but has EH edges", bb->index);
1820		return true;
1821	      }
1822	   return false;
1823	}
1824      if (!tree_could_throw_p (stmt))
1825	{
1826	  error ("BB %i last statement has incorrectly set region", bb->index);
1827	  return true;
1828	}
1829      is_resx = false;
1830    }
1831
1832  foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1833  FOR_EACH_EDGE (e, ei, bb->succs)
1834    {
1835      if ((e->flags & EDGE_EH) && !e->aux)
1836	{
1837	  error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1838	  mark_eh_edge_found_error = true;
1839	  return true;
1840	}
1841      e->aux = NULL;
1842    }
1843  return mark_eh_edge_found_error;
1844}
1845
1846
1847/* Return true if the expr can trap, as in dereferencing an invalid pointer
1848   location or floating point arithmetic.  C.f. the rtl version, may_trap_p.
1849   This routine expects only GIMPLE lhs or rhs input.  */
1850
1851bool
1852tree_could_trap_p (tree expr)
1853{
1854  enum tree_code code = TREE_CODE (expr);
1855  bool honor_nans = false;
1856  bool honor_snans = false;
1857  bool fp_operation = false;
1858  bool honor_trapv = false;
1859  tree t, base;
1860
1861  if (TREE_CODE_CLASS (code) == tcc_comparison
1862      || TREE_CODE_CLASS (code) == tcc_unary
1863      || TREE_CODE_CLASS (code) == tcc_binary)
1864    {
1865      t = TREE_TYPE (expr);
1866      fp_operation = FLOAT_TYPE_P (t);
1867      if (fp_operation)
1868	{
1869	  honor_nans = flag_trapping_math && !flag_finite_math_only;
1870	  honor_snans = flag_signaling_nans != 0;
1871	}
1872      else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1873	honor_trapv = true;
1874    }
1875
1876 restart:
1877  switch (code)
1878    {
1879    case TARGET_MEM_REF:
1880      /* For TARGET_MEM_REFs use the information based on the original
1881	 reference.  */
1882      expr = TMR_ORIGINAL (expr);
1883      code = TREE_CODE (expr);
1884      goto restart;
1885
1886    case COMPONENT_REF:
1887    case REALPART_EXPR:
1888    case IMAGPART_EXPR:
1889    case BIT_FIELD_REF:
1890    case VIEW_CONVERT_EXPR:
1891    case WITH_SIZE_EXPR:
1892      expr = TREE_OPERAND (expr, 0);
1893      code = TREE_CODE (expr);
1894      goto restart;
1895
1896    case ARRAY_RANGE_REF:
1897      base = TREE_OPERAND (expr, 0);
1898      if (tree_could_trap_p (base))
1899	return true;
1900
1901      if (TREE_THIS_NOTRAP (expr))
1902	return false;
1903
1904      return !range_in_array_bounds_p (expr);
1905
1906    case ARRAY_REF:
1907      base = TREE_OPERAND (expr, 0);
1908      if (tree_could_trap_p (base))
1909	return true;
1910
1911      if (TREE_THIS_NOTRAP (expr))
1912	return false;
1913
1914      return !in_array_bounds_p (expr);
1915
1916    case INDIRECT_REF:
1917    case ALIGN_INDIRECT_REF:
1918    case MISALIGNED_INDIRECT_REF:
1919      return !TREE_THIS_NOTRAP (expr);
1920
1921    case ASM_EXPR:
1922      return TREE_THIS_VOLATILE (expr);
1923
1924    case TRUNC_DIV_EXPR:
1925    case CEIL_DIV_EXPR:
1926    case FLOOR_DIV_EXPR:
1927    case ROUND_DIV_EXPR:
1928    case EXACT_DIV_EXPR:
1929    case CEIL_MOD_EXPR:
1930    case FLOOR_MOD_EXPR:
1931    case ROUND_MOD_EXPR:
1932    case TRUNC_MOD_EXPR:
1933    case RDIV_EXPR:
1934      if (honor_snans || honor_trapv)
1935	return true;
1936      if (fp_operation)
1937	return flag_trapping_math;
1938      t = TREE_OPERAND (expr, 1);
1939      if (!TREE_CONSTANT (t) || integer_zerop (t))
1940        return true;
1941      return false;
1942
1943    case LT_EXPR:
1944    case LE_EXPR:
1945    case GT_EXPR:
1946    case GE_EXPR:
1947    case LTGT_EXPR:
1948      /* Some floating point comparisons may trap.  */
1949      return honor_nans;
1950
1951    case EQ_EXPR:
1952    case NE_EXPR:
1953    case UNORDERED_EXPR:
1954    case ORDERED_EXPR:
1955    case UNLT_EXPR:
1956    case UNLE_EXPR:
1957    case UNGT_EXPR:
1958    case UNGE_EXPR:
1959    case UNEQ_EXPR:
1960      return honor_snans;
1961
1962    case CONVERT_EXPR:
1963    case FIX_TRUNC_EXPR:
1964    case FIX_CEIL_EXPR:
1965    case FIX_FLOOR_EXPR:
1966    case FIX_ROUND_EXPR:
1967      /* Conversion of floating point might trap.  */
1968      return honor_nans;
1969
1970    case NEGATE_EXPR:
1971    case ABS_EXPR:
1972    case CONJ_EXPR:
1973      /* These operations don't trap with floating point.  */
1974      if (honor_trapv)
1975	return true;
1976      return false;
1977
1978    case PLUS_EXPR:
1979    case MINUS_EXPR:
1980    case MULT_EXPR:
1981      /* Any floating arithmetic may trap.  */
1982      if (fp_operation && flag_trapping_math)
1983	return true;
1984      if (honor_trapv)
1985	return true;
1986      return false;
1987
1988    case CALL_EXPR:
1989      t = get_callee_fndecl (expr);
1990      /* Assume that calls to weak functions may trap.  */
1991      if (!t || !DECL_P (t) || DECL_WEAK (t))
1992	return true;
1993      return false;
1994
1995    default:
1996      /* Any floating arithmetic may trap.  */
1997      if (fp_operation && flag_trapping_math)
1998	return true;
1999      return false;
2000    }
2001}
2002
2003bool
2004tree_could_throw_p (tree t)
2005{
2006  if (!flag_exceptions)
2007    return false;
2008  if (TREE_CODE (t) == MODIFY_EXPR)
2009    {
2010      if (flag_non_call_exceptions
2011	  && tree_could_trap_p (TREE_OPERAND (t, 0)))
2012	return true;
2013      t = TREE_OPERAND (t, 1);
2014    }
2015
2016  if (TREE_CODE (t) == WITH_SIZE_EXPR)
2017    t = TREE_OPERAND (t, 0);
2018  if (TREE_CODE (t) == CALL_EXPR)
2019    return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2020  if (flag_non_call_exceptions)
2021    return tree_could_trap_p (t);
2022  return false;
2023}
2024
2025bool
2026tree_can_throw_internal (tree stmt)
2027{
2028  int region_nr;
2029  bool is_resx = false;
2030
2031  if (TREE_CODE (stmt) == RESX_EXPR)
2032    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2033  else
2034    region_nr = lookup_stmt_eh_region (stmt);
2035  if (region_nr < 0)
2036    return false;
2037  return can_throw_internal_1 (region_nr, is_resx);
2038}
2039
2040bool
2041tree_can_throw_external (tree stmt)
2042{
2043  int region_nr;
2044  bool is_resx = false;
2045
2046  if (TREE_CODE (stmt) == RESX_EXPR)
2047    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2048  else
2049    region_nr = lookup_stmt_eh_region (stmt);
2050  if (region_nr < 0)
2051    return tree_could_throw_p (stmt);
2052  else
2053    return can_throw_external_1 (region_nr, is_resx);
2054}
2055
2056/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2057   OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2058   in the table if it should be in there.  Return TRUE if a replacement was
2059   done that my require an EH edge purge.  */
2060
2061bool
2062maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2063{
2064  int region_nr = lookup_stmt_eh_region (old_stmt);
2065
2066  if (region_nr >= 0)
2067    {
2068      bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2069
2070      if (new_stmt == old_stmt && new_stmt_could_throw)
2071	return false;
2072
2073      remove_stmt_from_eh_region (old_stmt);
2074      if (new_stmt_could_throw)
2075	{
2076	  add_stmt_to_eh_region (new_stmt, region_nr);
2077	  return false;
2078	}
2079      else
2080	return true;
2081    }
2082
2083  return false;
2084}
2085
2086#ifdef ENABLE_CHECKING
2087static int
2088verify_eh_throw_stmt_node (void **slot, void *data ATTRIBUTE_UNUSED)
2089{
2090  struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
2091
2092  gcc_assert (node->stmt->common.ann == NULL);
2093  return 1;
2094}
2095
2096void
2097verify_eh_throw_table_statements (void)
2098{
2099  if (!get_eh_throw_stmt_table (cfun))
2100    return;
2101  htab_traverse (get_eh_throw_stmt_table (cfun),
2102		 verify_eh_throw_stmt_node,
2103		 NULL);
2104}
2105
2106#endif
2107