tree-eh.c revision 1.1.1.2
1/* Exception handling semantics and decomposition for trees.
2   Copyright (C) 2003-2013 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "tree.h"
25#include "flags.h"
26#include "function.h"
27#include "except.h"
28#include "pointer-set.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "langhooks.h"
33#include "ggc.h"
34#include "diagnostic-core.h"
35#include "gimple.h"
36#include "target.h"
37#include "cfgloop.h"
38
39/* In some instances a tree and a gimple need to be stored in a same table,
40   i.e. in hash tables. This is a structure to do this. */
41typedef union {tree *tp; tree t; gimple g;} treemple;
42
43/* Nonzero if we are using EH to handle cleanups.  */
44static int using_eh_for_cleanups_p = 0;
45
46void
47using_eh_for_cleanups (void)
48{
49  using_eh_for_cleanups_p = 1;
50}
51
52/* Misc functions used in this file.  */
53
54/* Remember and lookup EH landing pad data for arbitrary statements.
55   Really this means any statement that could_throw_p.  We could
56   stuff this information into the stmt_ann data structure, but:
57
58   (1) We absolutely rely on this information being kept until
59   we get to rtl.  Once we're done with lowering here, if we lose
60   the information there's no way to recover it!
61
62   (2) There are many more statements that *cannot* throw as
63   compared to those that can.  We should be saving some amount
64   of space by only allocating memory for those that can throw.  */
65
66/* Add statement T in function IFUN to landing pad NUM.  */
67
68void
69add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
70{
71  struct throw_stmt_node *n;
72  void **slot;
73
74  gcc_assert (num != 0);
75
76  n = ggc_alloc_throw_stmt_node ();
77  n->stmt = t;
78  n->lp_nr = num;
79
80  if (!get_eh_throw_stmt_table (ifun))
81    set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
82						    struct_ptr_eq,
83						    ggc_free));
84
85  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
86  gcc_assert (!*slot);
87  *slot = n;
88}
89
90/* Add statement T in the current function (cfun) to EH landing pad NUM.  */
91
92void
93add_stmt_to_eh_lp (gimple t, int num)
94{
95  add_stmt_to_eh_lp_fn (cfun, t, num);
96}
97
98/* Add statement T to the single EH landing pad in REGION.  */
99
100static void
101record_stmt_eh_region (eh_region region, gimple t)
102{
103  if (region == NULL)
104    return;
105  if (region->type == ERT_MUST_NOT_THROW)
106    add_stmt_to_eh_lp_fn (cfun, t, -region->index);
107  else
108    {
109      eh_landing_pad lp = region->landing_pads;
110      if (lp == NULL)
111	lp = gen_eh_landing_pad (region);
112      else
113	gcc_assert (lp->next_lp == NULL);
114      add_stmt_to_eh_lp_fn (cfun, t, lp->index);
115    }
116}
117
118
119/* Remove statement T in function IFUN from its EH landing pad.  */
120
121bool
122remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
123{
124  struct throw_stmt_node dummy;
125  void **slot;
126
127  if (!get_eh_throw_stmt_table (ifun))
128    return false;
129
130  dummy.stmt = t;
131  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
132                        NO_INSERT);
133  if (slot)
134    {
135      htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
136      return true;
137    }
138  else
139    return false;
140}
141
142
143/* Remove statement T in the current function (cfun) from its
144   EH landing pad.  */
145
146bool
147remove_stmt_from_eh_lp (gimple t)
148{
149  return remove_stmt_from_eh_lp_fn (cfun, t);
150}
151
152/* Determine if statement T is inside an EH region in function IFUN.
153   Positive numbers indicate a landing pad index; negative numbers
154   indicate a MUST_NOT_THROW region index; zero indicates that the
155   statement is not recorded in the region table.  */
156
157int
158lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
159{
160  struct throw_stmt_node *p, n;
161
162  if (ifun->eh->throw_stmt_table == NULL)
163    return 0;
164
165  n.stmt = t;
166  p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
167  return p ? p->lp_nr : 0;
168}
169
170/* Likewise, but always use the current function.  */
171
172int
173lookup_stmt_eh_lp (gimple t)
174{
175  /* We can get called from initialized data when -fnon-call-exceptions
176     is on; prevent crash.  */
177  if (!cfun)
178    return 0;
179  return lookup_stmt_eh_lp_fn (cfun, t);
180}
181
182/* First pass of EH node decomposition.  Build up a tree of GIMPLE_TRY_FINALLY
183   nodes and LABEL_DECL nodes.  We will use this during the second phase to
184   determine if a goto leaves the body of a TRY_FINALLY_EXPR node.  */
185
186struct finally_tree_node
187{
188  /* When storing a GIMPLE_TRY, we have to record a gimple.  However
189     when deciding whether a GOTO to a certain LABEL_DECL (which is a
190     tree) leaves the TRY block, its necessary to record a tree in
191     this field.  Thus a treemple is used. */
192  treemple child;
193  gimple parent;
194};
195
196/* Note that this table is *not* marked GTY.  It is short-lived.  */
197static htab_t finally_tree;
198
199static void
200record_in_finally_tree (treemple child, gimple parent)
201{
202  struct finally_tree_node *n;
203  void **slot;
204
205  n = XNEW (struct finally_tree_node);
206  n->child = child;
207  n->parent = parent;
208
209  slot = htab_find_slot (finally_tree, n, INSERT);
210  gcc_assert (!*slot);
211  *slot = n;
212}
213
214static void
215collect_finally_tree (gimple stmt, gimple region);
216
217/* Go through the gimple sequence.  Works with collect_finally_tree to
218   record all GIMPLE_LABEL and GIMPLE_TRY statements. */
219
220static void
221collect_finally_tree_1 (gimple_seq seq, gimple region)
222{
223  gimple_stmt_iterator gsi;
224
225  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
226    collect_finally_tree (gsi_stmt (gsi), region);
227}
228
229static void
230collect_finally_tree (gimple stmt, gimple region)
231{
232  treemple temp;
233
234  switch (gimple_code (stmt))
235    {
236    case GIMPLE_LABEL:
237      temp.t = gimple_label_label (stmt);
238      record_in_finally_tree (temp, region);
239      break;
240
241    case GIMPLE_TRY:
242      if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
243        {
244          temp.g = stmt;
245          record_in_finally_tree (temp, region);
246          collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
247	  collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
248        }
249      else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
250        {
251          collect_finally_tree_1 (gimple_try_eval (stmt), region);
252          collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
253        }
254      break;
255
256    case GIMPLE_CATCH:
257      collect_finally_tree_1 (gimple_catch_handler (stmt), region);
258      break;
259
260    case GIMPLE_EH_FILTER:
261      collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
262      break;
263
264    case GIMPLE_EH_ELSE:
265      collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
266      collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
267      break;
268
269    default:
270      /* A type, a decl, or some kind of statement that we're not
271	 interested in.  Don't walk them.  */
272      break;
273    }
274}
275
276
277/* Use the finally tree to determine if a jump from START to TARGET
278   would leave the try_finally node that START lives in.  */
279
280static bool
281outside_finally_tree (treemple start, gimple target)
282{
283  struct finally_tree_node n, *p;
284
285  do
286    {
287      n.child = start;
288      p = (struct finally_tree_node *) htab_find (finally_tree, &n);
289      if (!p)
290	return true;
291      start.g = p->parent;
292    }
293  while (start.g != target);
294
295  return false;
296}
297
298/* Second pass of EH node decomposition.  Actually transform the GIMPLE_TRY
299   nodes into a set of gotos, magic labels, and eh regions.
300   The eh region creation is straight-forward, but frobbing all the gotos
301   and such into shape isn't.  */
302
303/* The sequence into which we record all EH stuff.  This will be
304   placed at the end of the function when we're all done.  */
305static gimple_seq eh_seq;
306
307/* Record whether an EH region contains something that can throw,
308   indexed by EH region number.  */
309static bitmap eh_region_may_contain_throw_map;
310
311/* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
312   statements that are seen to escape this GIMPLE_TRY_FINALLY node.
313   The idea is to record a gimple statement for everything except for
314   the conditionals, which get their labels recorded. Since labels are
315   of type 'tree', we need this node to store both gimple and tree
316   objects.  REPL_STMT is the sequence used to replace the goto/return
317   statement.  CONT_STMT is used to store the statement that allows
318   the return/goto to jump to the original destination. */
319
320struct goto_queue_node
321{
322  treemple stmt;
323  location_t location;
324  gimple_seq repl_stmt;
325  gimple cont_stmt;
326  int index;
327  /* This is used when index >= 0 to indicate that stmt is a label (as
328     opposed to a goto stmt).  */
329  int is_label;
330};
331
332/* State of the world while lowering.  */
333
334struct leh_state
335{
336  /* What's "current" while constructing the eh region tree.  These
337     correspond to variables of the same name in cfun->eh, which we
338     don't have easy access to.  */
339  eh_region cur_region;
340
341  /* What's "current" for the purposes of __builtin_eh_pointer.  For
342     a CATCH, this is the associated TRY.  For an EH_FILTER, this is
343     the associated ALLOWED_EXCEPTIONS, etc.  */
344  eh_region ehp_region;
345
346  /* Processing of TRY_FINALLY requires a bit more state.  This is
347     split out into a separate structure so that we don't have to
348     copy so much when processing other nodes.  */
349  struct leh_tf_state *tf;
350};
351
352struct leh_tf_state
353{
354  /* Pointer to the GIMPLE_TRY_FINALLY node under discussion.  The
355     try_finally_expr is the original GIMPLE_TRY_FINALLY.  We need to retain
356     this so that outside_finally_tree can reliably reference the tree used
357     in the collect_finally_tree data structures.  */
358  gimple try_finally_expr;
359  gimple top_p;
360
361  /* While lowering a top_p usually it is expanded into multiple statements,
362     thus we need the following field to store them. */
363  gimple_seq top_p_seq;
364
365  /* The state outside this try_finally node.  */
366  struct leh_state *outer;
367
368  /* The exception region created for it.  */
369  eh_region region;
370
371  /* The goto queue.  */
372  struct goto_queue_node *goto_queue;
373  size_t goto_queue_size;
374  size_t goto_queue_active;
375
376  /* Pointer map to help in searching goto_queue when it is large.  */
377  struct pointer_map_t *goto_queue_map;
378
379  /* The set of unique labels seen as entries in the goto queue.  */
380  vec<tree> dest_array;
381
382  /* A label to be added at the end of the completed transformed
383     sequence.  It will be set if may_fallthru was true *at one time*,
384     though subsequent transformations may have cleared that flag.  */
385  tree fallthru_label;
386
387  /* True if it is possible to fall out the bottom of the try block.
388     Cleared if the fallthru is converted to a goto.  */
389  bool may_fallthru;
390
391  /* True if any entry in goto_queue is a GIMPLE_RETURN.  */
392  bool may_return;
393
394  /* True if the finally block can receive an exception edge.
395     Cleared if the exception case is handled by code duplication.  */
396  bool may_throw;
397};
398
399static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
400
401/* Search for STMT in the goto queue.  Return the replacement,
402   or null if the statement isn't in the queue.  */
403
404#define LARGE_GOTO_QUEUE 20
405
406static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
407
408static gimple_seq
409find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
410{
411  unsigned int i;
412  void **slot;
413
414  if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
415    {
416      for (i = 0; i < tf->goto_queue_active; i++)
417	if ( tf->goto_queue[i].stmt.g == stmt.g)
418	  return tf->goto_queue[i].repl_stmt;
419      return NULL;
420    }
421
422  /* If we have a large number of entries in the goto_queue, create a
423     pointer map and use that for searching.  */
424
425  if (!tf->goto_queue_map)
426    {
427      tf->goto_queue_map = pointer_map_create ();
428      for (i = 0; i < tf->goto_queue_active; i++)
429	{
430	  slot = pointer_map_insert (tf->goto_queue_map,
431                                     tf->goto_queue[i].stmt.g);
432          gcc_assert (*slot == NULL);
433	  *slot = &tf->goto_queue[i];
434	}
435    }
436
437  slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
438  if (slot != NULL)
439    return (((struct goto_queue_node *) *slot)->repl_stmt);
440
441  return NULL;
442}
443
444/* A subroutine of replace_goto_queue_1.  Handles the sub-clauses of a
445   lowered GIMPLE_COND.  If, by chance, the replacement is a simple goto,
446   then we can just splat it in, otherwise we add the new stmts immediately
447   after the GIMPLE_COND and redirect.  */
448
449static void
450replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
451				gimple_stmt_iterator *gsi)
452{
453  tree label;
454  gimple_seq new_seq;
455  treemple temp;
456  location_t loc = gimple_location (gsi_stmt (*gsi));
457
458  temp.tp = tp;
459  new_seq = find_goto_replacement (tf, temp);
460  if (!new_seq)
461    return;
462
463  if (gimple_seq_singleton_p (new_seq)
464      && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
465    {
466      *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
467      return;
468    }
469
470  label = create_artificial_label (loc);
471  /* Set the new label for the GIMPLE_COND */
472  *tp = label;
473
474  gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
475  gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
476}
477
478/* The real work of replace_goto_queue.  Returns with TSI updated to
479   point to the next statement.  */
480
481static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
482
483static void
484replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
485		      gimple_stmt_iterator *gsi)
486{
487  gimple_seq seq;
488  treemple temp;
489  temp.g = NULL;
490
491  switch (gimple_code (stmt))
492    {
493    case GIMPLE_GOTO:
494    case GIMPLE_RETURN:
495      temp.g = stmt;
496      seq = find_goto_replacement (tf, temp);
497      if (seq)
498	{
499	  gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
500	  gsi_remove (gsi, false);
501	  return;
502	}
503      break;
504
505    case GIMPLE_COND:
506      replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
507      replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
508      break;
509
510    case GIMPLE_TRY:
511      replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
512      replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
513      break;
514    case GIMPLE_CATCH:
515      replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
516      break;
517    case GIMPLE_EH_FILTER:
518      replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
519      break;
520    case GIMPLE_EH_ELSE:
521      replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
522      replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
523      break;
524
525    default:
526      /* These won't have gotos in them.  */
527      break;
528    }
529
530  gsi_next (gsi);
531}
532
533/* A subroutine of replace_goto_queue.  Handles GIMPLE_SEQ.  */
534
535static void
536replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
537{
538  gimple_stmt_iterator gsi = gsi_start (*seq);
539
540  while (!gsi_end_p (gsi))
541    replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
542}
543
544/* Replace all goto queue members.  */
545
546static void
547replace_goto_queue (struct leh_tf_state *tf)
548{
549  if (tf->goto_queue_active == 0)
550    return;
551  replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
552  replace_goto_queue_stmt_list (&eh_seq, tf);
553}
554
555/* Add a new record to the goto queue contained in TF. NEW_STMT is the
556   data to be added, IS_LABEL indicates whether NEW_STMT is a label or
557   a gimple return. */
558
559static void
560record_in_goto_queue (struct leh_tf_state *tf,
561                      treemple new_stmt,
562                      int index,
563                      bool is_label,
564		      location_t location)
565{
566  size_t active, size;
567  struct goto_queue_node *q;
568
569  gcc_assert (!tf->goto_queue_map);
570
571  active = tf->goto_queue_active;
572  size = tf->goto_queue_size;
573  if (active >= size)
574    {
575      size = (size ? size * 2 : 32);
576      tf->goto_queue_size = size;
577      tf->goto_queue
578         = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
579    }
580
581  q = &tf->goto_queue[active];
582  tf->goto_queue_active = active + 1;
583
584  memset (q, 0, sizeof (*q));
585  q->stmt = new_stmt;
586  q->index = index;
587  q->location = location;
588  q->is_label = is_label;
589}
590
591/* Record the LABEL label in the goto queue contained in TF.
592   TF is not null.  */
593
594static void
595record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
596			    location_t location)
597{
598  int index;
599  treemple temp, new_stmt;
600
601  if (!label)
602    return;
603
604  /* Computed and non-local gotos do not get processed.  Given
605     their nature we can neither tell whether we've escaped the
606     finally block nor redirect them if we knew.  */
607  if (TREE_CODE (label) != LABEL_DECL)
608    return;
609
610  /* No need to record gotos that don't leave the try block.  */
611  temp.t = label;
612  if (!outside_finally_tree (temp, tf->try_finally_expr))
613    return;
614
615  if (! tf->dest_array.exists ())
616    {
617      tf->dest_array.create (10);
618      tf->dest_array.quick_push (label);
619      index = 0;
620    }
621  else
622    {
623      int n = tf->dest_array.length ();
624      for (index = 0; index < n; ++index)
625        if (tf->dest_array[index] == label)
626          break;
627      if (index == n)
628        tf->dest_array.safe_push (label);
629    }
630
631  /* In the case of a GOTO we want to record the destination label,
632     since with a GIMPLE_COND we have an easy access to the then/else
633     labels. */
634  new_stmt = stmt;
635  record_in_goto_queue (tf, new_stmt, index, true, location);
636}
637
638/* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
639   node, and if so record that fact in the goto queue associated with that
640   try_finally node.  */
641
642static void
643maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
644{
645  struct leh_tf_state *tf = state->tf;
646  treemple new_stmt;
647
648  if (!tf)
649    return;
650
651  switch (gimple_code (stmt))
652    {
653    case GIMPLE_COND:
654      new_stmt.tp = gimple_op_ptr (stmt, 2);
655      record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
656				  EXPR_LOCATION (*new_stmt.tp));
657      new_stmt.tp = gimple_op_ptr (stmt, 3);
658      record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
659				  EXPR_LOCATION (*new_stmt.tp));
660      break;
661    case GIMPLE_GOTO:
662      new_stmt.g = stmt;
663      record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
664				  gimple_location (stmt));
665      break;
666
667    case GIMPLE_RETURN:
668      tf->may_return = true;
669      new_stmt.g = stmt;
670      record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
671      break;
672
673    default:
674      gcc_unreachable ();
675    }
676}
677
678
679#ifdef ENABLE_CHECKING
680/* We do not process GIMPLE_SWITCHes for now.  As long as the original source
681   was in fact structured, and we've not yet done jump threading, then none
682   of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this.  */
683
684static void
685verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
686{
687  struct leh_tf_state *tf = state->tf;
688  size_t i, n;
689
690  if (!tf)
691    return;
692
693  n = gimple_switch_num_labels (switch_expr);
694
695  for (i = 0; i < n; ++i)
696    {
697      treemple temp;
698      tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
699      temp.t = lab;
700      gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
701    }
702}
703#else
704#define verify_norecord_switch_expr(state, switch_expr)
705#endif
706
707/* Redirect a RETURN_EXPR pointed to by Q to FINLAB.  If MOD is
708   non-null, insert it before the new branch.  */
709
710static void
711do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
712{
713  gimple x;
714
715  /* In the case of a return, the queue node must be a gimple statement.  */
716  gcc_assert (!q->is_label);
717
718  /* Note that the return value may have already been computed, e.g.,
719
720	int x;
721	int foo (void)
722	{
723	  x = 0;
724	  try {
725	    return x;
726	  } finally {
727	    x++;
728	  }
729	}
730
731     should return 0, not 1.  We don't have to do anything to make
732     this happens because the return value has been placed in the
733     RESULT_DECL already.  */
734
735  q->cont_stmt = q->stmt.g;
736
737  if (mod)
738    gimple_seq_add_seq (&q->repl_stmt, mod);
739
740  x = gimple_build_goto (finlab);
741  gimple_set_location (x, q->location);
742  gimple_seq_add_stmt (&q->repl_stmt, x);
743}
744
745/* Similar, but easier, for GIMPLE_GOTO.  */
746
747static void
748do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
749		     struct leh_tf_state *tf)
750{
751  gimple x;
752
753  gcc_assert (q->is_label);
754
755  q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
756
757  if (mod)
758    gimple_seq_add_seq (&q->repl_stmt, mod);
759
760  x = gimple_build_goto (finlab);
761  gimple_set_location (x, q->location);
762  gimple_seq_add_stmt (&q->repl_stmt, x);
763}
764
765/* Emit a standard landing pad sequence into SEQ for REGION.  */
766
767static void
768emit_post_landing_pad (gimple_seq *seq, eh_region region)
769{
770  eh_landing_pad lp = region->landing_pads;
771  gimple x;
772
773  if (lp == NULL)
774    lp = gen_eh_landing_pad (region);
775
776  lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
777  EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
778
779  x = gimple_build_label (lp->post_landing_pad);
780  gimple_seq_add_stmt (seq, x);
781}
782
783/* Emit a RESX statement into SEQ for REGION.  */
784
785static void
786emit_resx (gimple_seq *seq, eh_region region)
787{
788  gimple x = gimple_build_resx (region->index);
789  gimple_seq_add_stmt (seq, x);
790  if (region->outer)
791    record_stmt_eh_region (region->outer, x);
792}
793
794/* Emit an EH_DISPATCH statement into SEQ for REGION.  */
795
796static void
797emit_eh_dispatch (gimple_seq *seq, eh_region region)
798{
799  gimple x = gimple_build_eh_dispatch (region->index);
800  gimple_seq_add_stmt (seq, x);
801}
802
803/* Note that the current EH region may contain a throw, or a
804   call to a function which itself may contain a throw.  */
805
806static void
807note_eh_region_may_contain_throw (eh_region region)
808{
809  while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
810    {
811      if (region->type == ERT_MUST_NOT_THROW)
812	break;
813      region = region->outer;
814      if (region == NULL)
815	break;
816    }
817}
818
819/* Check if REGION has been marked as containing a throw.  If REGION is
820   NULL, this predicate is false.  */
821
822static inline bool
823eh_region_may_contain_throw (eh_region r)
824{
825  return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
826}
827
828/* We want to transform
829	try { body; } catch { stuff; }
830   to
831	normal_sequence:
832	  body;
833	  over:
834	eh_sequence:
835	  landing_pad:
836	  stuff;
837	  goto over;
838
839   TP is a GIMPLE_TRY node.  REGION is the region whose post_landing_pad
840   should be placed before the second operand, or NULL.  OVER is
841   an existing label that should be put at the exit, or NULL.  */
842
843static gimple_seq
844frob_into_branch_around (gimple tp, eh_region region, tree over)
845{
846  gimple x;
847  gimple_seq cleanup, result;
848  location_t loc = gimple_location (tp);
849
850  cleanup = gimple_try_cleanup (tp);
851  result = gimple_try_eval (tp);
852
853  if (region)
854    emit_post_landing_pad (&eh_seq, region);
855
856  if (gimple_seq_may_fallthru (cleanup))
857    {
858      if (!over)
859	over = create_artificial_label (loc);
860      x = gimple_build_goto (over);
861      gimple_set_location (x, loc);
862      gimple_seq_add_stmt (&cleanup, x);
863    }
864  gimple_seq_add_seq (&eh_seq, cleanup);
865
866  if (over)
867    {
868      x = gimple_build_label (over);
869      gimple_seq_add_stmt (&result, x);
870    }
871  return result;
872}
873
874/* A subroutine of lower_try_finally.  Duplicate the tree rooted at T.
875   Make sure to record all new labels found.  */
876
877static gimple_seq
878lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
879			     location_t loc)
880{
881  gimple region = NULL;
882  gimple_seq new_seq;
883  gimple_stmt_iterator gsi;
884
885  new_seq = copy_gimple_seq_and_replace_locals (seq);
886
887  for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
888    {
889      gimple stmt = gsi_stmt (gsi);
890      if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
891	{
892	  tree block = gimple_block (stmt);
893	  gimple_set_location (stmt, loc);
894	  gimple_set_block (stmt, block);
895	}
896    }
897
898  if (outer_state->tf)
899    region = outer_state->tf->try_finally_expr;
900  collect_finally_tree_1 (new_seq, region);
901
902  return new_seq;
903}
904
905/* A subroutine of lower_try_finally.  Create a fallthru label for
906   the given try_finally state.  The only tricky bit here is that
907   we have to make sure to record the label in our outer context.  */
908
909static tree
910lower_try_finally_fallthru_label (struct leh_tf_state *tf)
911{
912  tree label = tf->fallthru_label;
913  treemple temp;
914
915  if (!label)
916    {
917      label = create_artificial_label (gimple_location (tf->try_finally_expr));
918      tf->fallthru_label = label;
919      if (tf->outer->tf)
920        {
921          temp.t = label;
922          record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
923        }
924    }
925  return label;
926}
927
928/* A subroutine of lower_try_finally.  If FINALLY consits of a
929   GIMPLE_EH_ELSE node, return it.  */
930
931static inline gimple
932get_eh_else (gimple_seq finally)
933{
934  gimple x = gimple_seq_first_stmt (finally);
935  if (gimple_code (x) == GIMPLE_EH_ELSE)
936    {
937      gcc_assert (gimple_seq_singleton_p (finally));
938      return x;
939    }
940  return NULL;
941}
942
943/* A subroutine of lower_try_finally.  If the eh_protect_cleanup_actions
944   langhook returns non-null, then the language requires that the exception
945   path out of a try_finally be treated specially.  To wit: the code within
946   the finally block may not itself throw an exception.  We have two choices
947   here. First we can duplicate the finally block and wrap it in a
948   must_not_throw region.  Second, we can generate code like
949
950	try {
951	  finally_block;
952	} catch {
953	  if (fintmp == eh_edge)
954	    protect_cleanup_actions;
955	}
956
957   where "fintmp" is the temporary used in the switch statement generation
958   alternative considered below.  For the nonce, we always choose the first
959   option.
960
961   THIS_STATE may be null if this is a try-cleanup, not a try-finally.  */
962
963static void
964honor_protect_cleanup_actions (struct leh_state *outer_state,
965			       struct leh_state *this_state,
966			       struct leh_tf_state *tf)
967{
968  tree protect_cleanup_actions;
969  gimple_stmt_iterator gsi;
970  bool finally_may_fallthru;
971  gimple_seq finally;
972  gimple x, eh_else;
973
974  /* First check for nothing to do.  */
975  if (lang_hooks.eh_protect_cleanup_actions == NULL)
976    return;
977  protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
978  if (protect_cleanup_actions == NULL)
979    return;
980
981  finally = gimple_try_cleanup (tf->top_p);
982  eh_else = get_eh_else (finally);
983
984  /* Duplicate the FINALLY block.  Only need to do this for try-finally,
985     and not for cleanups.  If we've got an EH_ELSE, extract it now.  */
986  if (eh_else)
987    {
988      finally = gimple_eh_else_e_body (eh_else);
989      gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
990    }
991  else if (this_state)
992    finally = lower_try_finally_dup_block (finally, outer_state,
993	gimple_location (tf->try_finally_expr));
994  finally_may_fallthru = gimple_seq_may_fallthru (finally);
995
996  /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
997     set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
998     to be in an enclosing scope, but needs to be implemented at this level
999     to avoid a nesting violation (see wrap_temporary_cleanups in
1000     cp/decl.c).  Since it's logically at an outer level, we should call
1001     terminate before we get to it, so strip it away before adding the
1002     MUST_NOT_THROW filter.  */
1003  gsi = gsi_start (finally);
1004  x = gsi_stmt (gsi);
1005  if (gimple_code (x) == GIMPLE_TRY
1006      && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1007      && gimple_try_catch_is_cleanup (x))
1008    {
1009      gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1010      gsi_remove (&gsi, false);
1011    }
1012
1013  /* Wrap the block with protect_cleanup_actions as the action.  */
1014  x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1015  x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1016			GIMPLE_TRY_CATCH);
1017  finally = lower_eh_must_not_throw (outer_state, x);
1018
1019  /* Drop all of this into the exception sequence.  */
1020  emit_post_landing_pad (&eh_seq, tf->region);
1021  gimple_seq_add_seq (&eh_seq, finally);
1022  if (finally_may_fallthru)
1023    emit_resx (&eh_seq, tf->region);
1024
1025  /* Having now been handled, EH isn't to be considered with
1026     the rest of the outgoing edges.  */
1027  tf->may_throw = false;
1028}
1029
1030/* A subroutine of lower_try_finally.  We have determined that there is
1031   no fallthru edge out of the finally block.  This means that there is
1032   no outgoing edge corresponding to any incoming edge.  Restructure the
1033   try_finally node for this special case.  */
1034
1035static void
1036lower_try_finally_nofallthru (struct leh_state *state,
1037			      struct leh_tf_state *tf)
1038{
1039  tree lab;
1040  gimple x, eh_else;
1041  gimple_seq finally;
1042  struct goto_queue_node *q, *qe;
1043
1044  lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1045
1046  /* We expect that tf->top_p is a GIMPLE_TRY. */
1047  finally = gimple_try_cleanup (tf->top_p);
1048  tf->top_p_seq = gimple_try_eval (tf->top_p);
1049
1050  x = gimple_build_label (lab);
1051  gimple_seq_add_stmt (&tf->top_p_seq, x);
1052
1053  q = tf->goto_queue;
1054  qe = q + tf->goto_queue_active;
1055  for (; q < qe; ++q)
1056    if (q->index < 0)
1057      do_return_redirection (q, lab, NULL);
1058    else
1059      do_goto_redirection (q, lab, NULL, tf);
1060
1061  replace_goto_queue (tf);
1062
1063  /* Emit the finally block into the stream.  Lower EH_ELSE at this time.  */
1064  eh_else = get_eh_else (finally);
1065  if (eh_else)
1066    {
1067      finally = gimple_eh_else_n_body (eh_else);
1068      lower_eh_constructs_1 (state, &finally);
1069      gimple_seq_add_seq (&tf->top_p_seq, finally);
1070
1071      if (tf->may_throw)
1072	{
1073	  finally = gimple_eh_else_e_body (eh_else);
1074	  lower_eh_constructs_1 (state, &finally);
1075
1076	  emit_post_landing_pad (&eh_seq, tf->region);
1077	  gimple_seq_add_seq (&eh_seq, finally);
1078	}
1079    }
1080  else
1081    {
1082      lower_eh_constructs_1 (state, &finally);
1083      gimple_seq_add_seq (&tf->top_p_seq, finally);
1084
1085      if (tf->may_throw)
1086	{
1087	  emit_post_landing_pad (&eh_seq, tf->region);
1088
1089	  x = gimple_build_goto (lab);
1090	  gimple_set_location (x, gimple_location (tf->try_finally_expr));
1091	  gimple_seq_add_stmt (&eh_seq, x);
1092	}
1093    }
1094}
1095
1096/* A subroutine of lower_try_finally.  We have determined that there is
1097   exactly one destination of the finally block.  Restructure the
1098   try_finally node for this special case.  */
1099
1100static void
1101lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1102{
1103  struct goto_queue_node *q, *qe;
1104  gimple x;
1105  gimple_seq finally;
1106  gimple_stmt_iterator gsi;
1107  tree finally_label;
1108  location_t loc = gimple_location (tf->try_finally_expr);
1109
1110  finally = gimple_try_cleanup (tf->top_p);
1111  tf->top_p_seq = gimple_try_eval (tf->top_p);
1112
1113  /* Since there's only one destination, and the destination edge can only
1114     either be EH or non-EH, that implies that all of our incoming edges
1115     are of the same type.  Therefore we can lower EH_ELSE immediately.  */
1116  x = get_eh_else (finally);
1117  if (x)
1118    {
1119      if (tf->may_throw)
1120	finally = gimple_eh_else_e_body (x);
1121      else
1122	finally = gimple_eh_else_n_body (x);
1123    }
1124
1125  lower_eh_constructs_1 (state, &finally);
1126
1127  for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1128    {
1129      gimple stmt = gsi_stmt (gsi);
1130      if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1131	{
1132	  tree block = gimple_block (stmt);
1133	  gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1134	  gimple_set_block (stmt, block);
1135	}
1136    }
1137
1138  if (tf->may_throw)
1139    {
1140      /* Only reachable via the exception edge.  Add the given label to
1141         the head of the FINALLY block.  Append a RESX at the end.  */
1142      emit_post_landing_pad (&eh_seq, tf->region);
1143      gimple_seq_add_seq (&eh_seq, finally);
1144      emit_resx (&eh_seq, tf->region);
1145      return;
1146    }
1147
1148  if (tf->may_fallthru)
1149    {
1150      /* Only reachable via the fallthru edge.  Do nothing but let
1151	 the two blocks run together; we'll fall out the bottom.  */
1152      gimple_seq_add_seq (&tf->top_p_seq, finally);
1153      return;
1154    }
1155
1156  finally_label = create_artificial_label (loc);
1157  x = gimple_build_label (finally_label);
1158  gimple_seq_add_stmt (&tf->top_p_seq, x);
1159
1160  gimple_seq_add_seq (&tf->top_p_seq, finally);
1161
1162  q = tf->goto_queue;
1163  qe = q + tf->goto_queue_active;
1164
1165  if (tf->may_return)
1166    {
1167      /* Reachable by return expressions only.  Redirect them.  */
1168      for (; q < qe; ++q)
1169	do_return_redirection (q, finally_label, NULL);
1170      replace_goto_queue (tf);
1171    }
1172  else
1173    {
1174      /* Reachable by goto expressions only.  Redirect them.  */
1175      for (; q < qe; ++q)
1176	do_goto_redirection (q, finally_label, NULL, tf);
1177      replace_goto_queue (tf);
1178
1179      if (tf->dest_array[0] == tf->fallthru_label)
1180	{
1181	  /* Reachable by goto to fallthru label only.  Redirect it
1182	     to the new label (already created, sadly), and do not
1183	     emit the final branch out, or the fallthru label.  */
1184	  tf->fallthru_label = NULL;
1185	  return;
1186	}
1187    }
1188
1189  /* Place the original return/goto to the original destination
1190     immediately after the finally block. */
1191  x = tf->goto_queue[0].cont_stmt;
1192  gimple_seq_add_stmt (&tf->top_p_seq, x);
1193  maybe_record_in_goto_queue (state, x);
1194}
1195
1196/* A subroutine of lower_try_finally.  There are multiple edges incoming
1197   and outgoing from the finally block.  Implement this by duplicating the
1198   finally block for every destination.  */
1199
1200static void
1201lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1202{
1203  gimple_seq finally;
1204  gimple_seq new_stmt;
1205  gimple_seq seq;
1206  gimple x, eh_else;
1207  tree tmp;
1208  location_t tf_loc = gimple_location (tf->try_finally_expr);
1209
1210  finally = gimple_try_cleanup (tf->top_p);
1211
1212  /* Notice EH_ELSE, and simplify some of the remaining code
1213     by considering FINALLY to be the normal return path only.  */
1214  eh_else = get_eh_else (finally);
1215  if (eh_else)
1216    finally = gimple_eh_else_n_body (eh_else);
1217
1218  tf->top_p_seq = gimple_try_eval (tf->top_p);
1219  new_stmt = NULL;
1220
1221  if (tf->may_fallthru)
1222    {
1223      seq = lower_try_finally_dup_block (finally, state, tf_loc);
1224      lower_eh_constructs_1 (state, &seq);
1225      gimple_seq_add_seq (&new_stmt, seq);
1226
1227      tmp = lower_try_finally_fallthru_label (tf);
1228      x = gimple_build_goto (tmp);
1229      gimple_set_location (x, tf_loc);
1230      gimple_seq_add_stmt (&new_stmt, x);
1231    }
1232
1233  if (tf->may_throw)
1234    {
1235      /* We don't need to copy the EH path of EH_ELSE,
1236	 since it is only emitted once.  */
1237      if (eh_else)
1238	seq = gimple_eh_else_e_body (eh_else);
1239      else
1240	seq = lower_try_finally_dup_block (finally, state, tf_loc);
1241      lower_eh_constructs_1 (state, &seq);
1242
1243      emit_post_landing_pad (&eh_seq, tf->region);
1244      gimple_seq_add_seq (&eh_seq, seq);
1245      emit_resx (&eh_seq, tf->region);
1246    }
1247
1248  if (tf->goto_queue)
1249    {
1250      struct goto_queue_node *q, *qe;
1251      int return_index, index;
1252      struct labels_s
1253      {
1254	struct goto_queue_node *q;
1255	tree label;
1256      } *labels;
1257
1258      return_index = tf->dest_array.length ();
1259      labels = XCNEWVEC (struct labels_s, return_index + 1);
1260
1261      q = tf->goto_queue;
1262      qe = q + tf->goto_queue_active;
1263      for (; q < qe; q++)
1264	{
1265	  index = q->index < 0 ? return_index : q->index;
1266
1267	  if (!labels[index].q)
1268	    labels[index].q = q;
1269	}
1270
1271      for (index = 0; index < return_index + 1; index++)
1272	{
1273	  tree lab;
1274
1275	  q = labels[index].q;
1276	  if (! q)
1277	    continue;
1278
1279	  lab = labels[index].label
1280	    = create_artificial_label (tf_loc);
1281
1282	  if (index == return_index)
1283	    do_return_redirection (q, lab, NULL);
1284	  else
1285	    do_goto_redirection (q, lab, NULL, tf);
1286
1287	  x = gimple_build_label (lab);
1288          gimple_seq_add_stmt (&new_stmt, x);
1289
1290	  seq = lower_try_finally_dup_block (finally, state, q->location);
1291	  lower_eh_constructs_1 (state, &seq);
1292          gimple_seq_add_seq (&new_stmt, seq);
1293
1294          gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1295	  maybe_record_in_goto_queue (state, q->cont_stmt);
1296	}
1297
1298      for (q = tf->goto_queue; q < qe; q++)
1299	{
1300	  tree lab;
1301
1302	  index = q->index < 0 ? return_index : q->index;
1303
1304	  if (labels[index].q == q)
1305	    continue;
1306
1307	  lab = labels[index].label;
1308
1309	  if (index == return_index)
1310	    do_return_redirection (q, lab, NULL);
1311	  else
1312	    do_goto_redirection (q, lab, NULL, tf);
1313	}
1314
1315      replace_goto_queue (tf);
1316      free (labels);
1317    }
1318
1319  /* Need to link new stmts after running replace_goto_queue due
1320     to not wanting to process the same goto stmts twice.  */
1321  gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1322}
1323
1324/* A subroutine of lower_try_finally.  There are multiple edges incoming
1325   and outgoing from the finally block.  Implement this by instrumenting
1326   each incoming edge and creating a switch statement at the end of the
1327   finally block that branches to the appropriate destination.  */
1328
1329static void
1330lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1331{
1332  struct goto_queue_node *q, *qe;
1333  tree finally_tmp, finally_label;
1334  int return_index, eh_index, fallthru_index;
1335  int nlabels, ndests, j, last_case_index;
1336  tree last_case;
1337  vec<tree> case_label_vec;
1338  gimple_seq switch_body = NULL;
1339  gimple x, eh_else;
1340  tree tmp;
1341  gimple switch_stmt;
1342  gimple_seq finally;
1343  struct pointer_map_t *cont_map = NULL;
1344  /* The location of the TRY_FINALLY stmt.  */
1345  location_t tf_loc = gimple_location (tf->try_finally_expr);
1346  /* The location of the finally block.  */
1347  location_t finally_loc;
1348
1349  finally = gimple_try_cleanup (tf->top_p);
1350  eh_else = get_eh_else (finally);
1351
1352  /* Mash the TRY block to the head of the chain.  */
1353  tf->top_p_seq = gimple_try_eval (tf->top_p);
1354
1355  /* The location of the finally is either the last stmt in the finally
1356     block or the location of the TRY_FINALLY itself.  */
1357  x = gimple_seq_last_stmt (finally);
1358  finally_loc = x ? gimple_location (x) : tf_loc;
1359
1360  /* Prepare for switch statement generation.  */
1361  nlabels = tf->dest_array.length ();
1362  return_index = nlabels;
1363  eh_index = return_index + tf->may_return;
1364  fallthru_index = eh_index + (tf->may_throw && !eh_else);
1365  ndests = fallthru_index + tf->may_fallthru;
1366
1367  finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1368  finally_label = create_artificial_label (finally_loc);
1369
1370  /* We use vec::quick_push on case_label_vec throughout this function,
1371     since we know the size in advance and allocate precisely as muce
1372     space as needed.  */
1373  case_label_vec.create (ndests);
1374  last_case = NULL;
1375  last_case_index = 0;
1376
1377  /* Begin inserting code for getting to the finally block.  Things
1378     are done in this order to correspond to the sequence the code is
1379     laid out.  */
1380
1381  if (tf->may_fallthru)
1382    {
1383      x = gimple_build_assign (finally_tmp,
1384			       build_int_cst (integer_type_node,
1385					      fallthru_index));
1386      gimple_seq_add_stmt (&tf->top_p_seq, x);
1387
1388      tmp = build_int_cst (integer_type_node, fallthru_index);
1389      last_case = build_case_label (tmp, NULL,
1390				    create_artificial_label (tf_loc));
1391      case_label_vec.quick_push (last_case);
1392      last_case_index++;
1393
1394      x = gimple_build_label (CASE_LABEL (last_case));
1395      gimple_seq_add_stmt (&switch_body, x);
1396
1397      tmp = lower_try_finally_fallthru_label (tf);
1398      x = gimple_build_goto (tmp);
1399      gimple_set_location (x, tf_loc);
1400      gimple_seq_add_stmt (&switch_body, x);
1401    }
1402
1403  /* For EH_ELSE, emit the exception path (plus resx) now, then
1404     subsequently we only need consider the normal path.  */
1405  if (eh_else)
1406    {
1407      if (tf->may_throw)
1408	{
1409	  finally = gimple_eh_else_e_body (eh_else);
1410	  lower_eh_constructs_1 (state, &finally);
1411
1412	  emit_post_landing_pad (&eh_seq, tf->region);
1413	  gimple_seq_add_seq (&eh_seq, finally);
1414	  emit_resx (&eh_seq, tf->region);
1415	}
1416
1417      finally = gimple_eh_else_n_body (eh_else);
1418    }
1419  else if (tf->may_throw)
1420    {
1421      emit_post_landing_pad (&eh_seq, tf->region);
1422
1423      x = gimple_build_assign (finally_tmp,
1424			       build_int_cst (integer_type_node, eh_index));
1425      gimple_seq_add_stmt (&eh_seq, x);
1426
1427      x = gimple_build_goto (finally_label);
1428      gimple_set_location (x, tf_loc);
1429      gimple_seq_add_stmt (&eh_seq, x);
1430
1431      tmp = build_int_cst (integer_type_node, eh_index);
1432      last_case = build_case_label (tmp, NULL,
1433				    create_artificial_label (tf_loc));
1434      case_label_vec.quick_push (last_case);
1435      last_case_index++;
1436
1437      x = gimple_build_label (CASE_LABEL (last_case));
1438      gimple_seq_add_stmt (&eh_seq, x);
1439      emit_resx (&eh_seq, tf->region);
1440    }
1441
1442  x = gimple_build_label (finally_label);
1443  gimple_seq_add_stmt (&tf->top_p_seq, x);
1444
1445  lower_eh_constructs_1 (state, &finally);
1446  gimple_seq_add_seq (&tf->top_p_seq, finally);
1447
1448  /* Redirect each incoming goto edge.  */
1449  q = tf->goto_queue;
1450  qe = q + tf->goto_queue_active;
1451  j = last_case_index + tf->may_return;
1452  /* Prepare the assignments to finally_tmp that are executed upon the
1453     entrance through a particular edge. */
1454  for (; q < qe; ++q)
1455    {
1456      gimple_seq mod = NULL;
1457      int switch_id;
1458      unsigned int case_index;
1459
1460      if (q->index < 0)
1461	{
1462	  x = gimple_build_assign (finally_tmp,
1463				   build_int_cst (integer_type_node,
1464						  return_index));
1465	  gimple_seq_add_stmt (&mod, x);
1466	  do_return_redirection (q, finally_label, mod);
1467	  switch_id = return_index;
1468	}
1469      else
1470	{
1471	  x = gimple_build_assign (finally_tmp,
1472				   build_int_cst (integer_type_node, q->index));
1473	  gimple_seq_add_stmt (&mod, x);
1474	  do_goto_redirection (q, finally_label, mod, tf);
1475	  switch_id = q->index;
1476	}
1477
1478      case_index = j + q->index;
1479      if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
1480        {
1481          tree case_lab;
1482          void **slot;
1483	  tmp = build_int_cst (integer_type_node, switch_id);
1484          case_lab = build_case_label (tmp, NULL,
1485				       create_artificial_label (tf_loc));
1486          /* We store the cont_stmt in the pointer map, so that we can recover
1487             it in the loop below.  */
1488          if (!cont_map)
1489            cont_map = pointer_map_create ();
1490          slot = pointer_map_insert (cont_map, case_lab);
1491          *slot = q->cont_stmt;
1492          case_label_vec.quick_push (case_lab);
1493        }
1494    }
1495  for (j = last_case_index; j < last_case_index + nlabels; j++)
1496    {
1497      gimple cont_stmt;
1498      void **slot;
1499
1500      last_case = case_label_vec[j];
1501
1502      gcc_assert (last_case);
1503      gcc_assert (cont_map);
1504
1505      slot = pointer_map_contains (cont_map, last_case);
1506      gcc_assert (slot);
1507      cont_stmt = *(gimple *) slot;
1508
1509      x = gimple_build_label (CASE_LABEL (last_case));
1510      gimple_seq_add_stmt (&switch_body, x);
1511      gimple_seq_add_stmt (&switch_body, cont_stmt);
1512      maybe_record_in_goto_queue (state, cont_stmt);
1513    }
1514  if (cont_map)
1515    pointer_map_destroy (cont_map);
1516
1517  replace_goto_queue (tf);
1518
1519  /* Make sure that the last case is the default label, as one is required.
1520     Then sort the labels, which is also required in GIMPLE.  */
1521  CASE_LOW (last_case) = NULL;
1522  sort_case_labels (case_label_vec);
1523
1524  /* Build the switch statement, setting last_case to be the default
1525     label.  */
1526  switch_stmt = gimple_build_switch (finally_tmp, last_case,
1527				     case_label_vec);
1528  gimple_set_location (switch_stmt, finally_loc);
1529
1530  /* Need to link SWITCH_STMT after running replace_goto_queue
1531     due to not wanting to process the same goto stmts twice.  */
1532  gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1533  gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1534}
1535
1536/* Decide whether or not we are going to duplicate the finally block.
1537   There are several considerations.
1538
1539   First, if this is Java, then the finally block contains code
1540   written by the user.  It has line numbers associated with it,
1541   so duplicating the block means it's difficult to set a breakpoint.
1542   Since controlling code generation via -g is verboten, we simply
1543   never duplicate code without optimization.
1544
1545   Second, we'd like to prevent egregious code growth.  One way to
1546   do this is to estimate the size of the finally block, multiply
1547   that by the number of copies we'd need to make, and compare against
1548   the estimate of the size of the switch machinery we'd have to add.  */
1549
1550static bool
1551decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1552{
1553  int f_estimate, sw_estimate;
1554  gimple eh_else;
1555
1556  /* If there's an EH_ELSE involved, the exception path is separate
1557     and really doesn't come into play for this computation.  */
1558  eh_else = get_eh_else (finally);
1559  if (eh_else)
1560    {
1561      ndests -= may_throw;
1562      finally = gimple_eh_else_n_body (eh_else);
1563    }
1564
1565  if (!optimize)
1566    {
1567      gimple_stmt_iterator gsi;
1568
1569      if (ndests == 1)
1570        return true;
1571
1572      for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1573	{
1574	  gimple stmt = gsi_stmt (gsi);
1575	  if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1576	    return false;
1577	}
1578      return true;
1579    }
1580
1581  /* Finally estimate N times, plus N gotos.  */
1582  f_estimate = count_insns_seq (finally, &eni_size_weights);
1583  f_estimate = (f_estimate + 1) * ndests;
1584
1585  /* Switch statement (cost 10), N variable assignments, N gotos.  */
1586  sw_estimate = 10 + 2 * ndests;
1587
1588  /* Optimize for size clearly wants our best guess.  */
1589  if (optimize_function_for_size_p (cfun))
1590    return f_estimate < sw_estimate;
1591
1592  /* ??? These numbers are completely made up so far.  */
1593  if (optimize > 1)
1594    return f_estimate < 100 || f_estimate < sw_estimate * 2;
1595  else
1596    return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1597}
1598
1599/* REG is the enclosing region for a possible cleanup region, or the region
1600   itself.  Returns TRUE if such a region would be unreachable.
1601
1602   Cleanup regions within a must-not-throw region aren't actually reachable
1603   even if there are throwing stmts within them, because the personality
1604   routine will call terminate before unwinding.  */
1605
1606static bool
1607cleanup_is_dead_in (eh_region reg)
1608{
1609  while (reg && reg->type == ERT_CLEANUP)
1610    reg = reg->outer;
1611  return (reg && reg->type == ERT_MUST_NOT_THROW);
1612}
1613
1614/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY_FINALLY nodes
1615   to a sequence of labels and blocks, plus the exception region trees
1616   that record all the magic.  This is complicated by the need to
1617   arrange for the FINALLY block to be executed on all exits.  */
1618
1619static gimple_seq
1620lower_try_finally (struct leh_state *state, gimple tp)
1621{
1622  struct leh_tf_state this_tf;
1623  struct leh_state this_state;
1624  int ndests;
1625  gimple_seq old_eh_seq;
1626
1627  /* Process the try block.  */
1628
1629  memset (&this_tf, 0, sizeof (this_tf));
1630  this_tf.try_finally_expr = tp;
1631  this_tf.top_p = tp;
1632  this_tf.outer = state;
1633  if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1634    {
1635      this_tf.region = gen_eh_region_cleanup (state->cur_region);
1636      this_state.cur_region = this_tf.region;
1637    }
1638  else
1639    {
1640      this_tf.region = NULL;
1641      this_state.cur_region = state->cur_region;
1642    }
1643
1644  this_state.ehp_region = state->ehp_region;
1645  this_state.tf = &this_tf;
1646
1647  old_eh_seq = eh_seq;
1648  eh_seq = NULL;
1649
1650  lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1651
1652  /* Determine if the try block is escaped through the bottom.  */
1653  this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1654
1655  /* Determine if any exceptions are possible within the try block.  */
1656  if (this_tf.region)
1657    this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1658  if (this_tf.may_throw)
1659    honor_protect_cleanup_actions (state, &this_state, &this_tf);
1660
1661  /* Determine how many edges (still) reach the finally block.  Or rather,
1662     how many destinations are reached by the finally block.  Use this to
1663     determine how we process the finally block itself.  */
1664
1665  ndests = this_tf.dest_array.length ();
1666  ndests += this_tf.may_fallthru;
1667  ndests += this_tf.may_return;
1668  ndests += this_tf.may_throw;
1669
1670  /* If the FINALLY block is not reachable, dike it out.  */
1671  if (ndests == 0)
1672    {
1673      gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1674      gimple_try_set_cleanup (tp, NULL);
1675    }
1676  /* If the finally block doesn't fall through, then any destination
1677     we might try to impose there isn't reached either.  There may be
1678     some minor amount of cleanup and redirection still needed.  */
1679  else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1680    lower_try_finally_nofallthru (state, &this_tf);
1681
1682  /* We can easily special-case redirection to a single destination.  */
1683  else if (ndests == 1)
1684    lower_try_finally_onedest (state, &this_tf);
1685  else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1686				    gimple_try_cleanup (tp)))
1687    lower_try_finally_copy (state, &this_tf);
1688  else
1689    lower_try_finally_switch (state, &this_tf);
1690
1691  /* If someone requested we add a label at the end of the transformed
1692     block, do so.  */
1693  if (this_tf.fallthru_label)
1694    {
1695      /* This must be reached only if ndests == 0. */
1696      gimple x = gimple_build_label (this_tf.fallthru_label);
1697      gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1698    }
1699
1700  this_tf.dest_array.release ();
1701  free (this_tf.goto_queue);
1702  if (this_tf.goto_queue_map)
1703    pointer_map_destroy (this_tf.goto_queue_map);
1704
1705  /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1706     If there was no old eh_seq, then the append is trivially already done.  */
1707  if (old_eh_seq)
1708    {
1709      if (eh_seq == NULL)
1710	eh_seq = old_eh_seq;
1711      else
1712	{
1713	  gimple_seq new_eh_seq = eh_seq;
1714	  eh_seq = old_eh_seq;
1715	  gimple_seq_add_seq(&eh_seq, new_eh_seq);
1716	}
1717    }
1718
1719  return this_tf.top_p_seq;
1720}
1721
1722/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY_CATCH with a
1723   list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1724   exception region trees that records all the magic.  */
1725
1726static gimple_seq
1727lower_catch (struct leh_state *state, gimple tp)
1728{
1729  eh_region try_region = NULL;
1730  struct leh_state this_state = *state;
1731  gimple_stmt_iterator gsi;
1732  tree out_label;
1733  gimple_seq new_seq, cleanup;
1734  gimple x;
1735  location_t try_catch_loc = gimple_location (tp);
1736
1737  if (flag_exceptions)
1738    {
1739      try_region = gen_eh_region_try (state->cur_region);
1740      this_state.cur_region = try_region;
1741    }
1742
1743  lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1744
1745  if (!eh_region_may_contain_throw (try_region))
1746    return gimple_try_eval (tp);
1747
1748  new_seq = NULL;
1749  emit_eh_dispatch (&new_seq, try_region);
1750  emit_resx (&new_seq, try_region);
1751
1752  this_state.cur_region = state->cur_region;
1753  this_state.ehp_region = try_region;
1754
1755  /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1756     itself, so that e.g. for coverage purposes the nested cleanups don't
1757     appear before the cleanup body.  See PR64634 for details.  */
1758  gimple_seq old_eh_seq = eh_seq;
1759  eh_seq = NULL;
1760
1761  out_label = NULL;
1762  cleanup = gimple_try_cleanup (tp);
1763  for (gsi = gsi_start (cleanup);
1764       !gsi_end_p (gsi);
1765       gsi_next (&gsi))
1766    {
1767      eh_catch c;
1768      gimple gcatch;
1769      gimple_seq handler;
1770
1771      gcatch = gsi_stmt (gsi);
1772      c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1773
1774      handler = gimple_catch_handler (gcatch);
1775      lower_eh_constructs_1 (&this_state, &handler);
1776
1777      c->label = create_artificial_label (UNKNOWN_LOCATION);
1778      x = gimple_build_label (c->label);
1779      gimple_seq_add_stmt (&new_seq, x);
1780
1781      gimple_seq_add_seq (&new_seq, handler);
1782
1783      if (gimple_seq_may_fallthru (new_seq))
1784	{
1785	  if (!out_label)
1786	    out_label = create_artificial_label (try_catch_loc);
1787
1788	  x = gimple_build_goto (out_label);
1789	  gimple_seq_add_stmt (&new_seq, x);
1790	}
1791      if (!c->type_list)
1792	break;
1793    }
1794
1795  gimple_try_set_cleanup (tp, new_seq);
1796
1797  gimple_seq new_eh_seq = eh_seq;
1798  eh_seq = old_eh_seq;
1799  gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label);
1800  gimple_seq_add_seq (&eh_seq, new_eh_seq);
1801  return ret_seq;
1802}
1803
1804/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY with a
1805   GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1806   region trees that record all the magic.  */
1807
1808static gimple_seq
1809lower_eh_filter (struct leh_state *state, gimple tp)
1810{
1811  struct leh_state this_state = *state;
1812  eh_region this_region = NULL;
1813  gimple inner, x;
1814  gimple_seq new_seq;
1815
1816  inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1817
1818  if (flag_exceptions)
1819    {
1820      this_region = gen_eh_region_allowed (state->cur_region,
1821				           gimple_eh_filter_types (inner));
1822      this_state.cur_region = this_region;
1823    }
1824
1825  lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1826
1827  if (!eh_region_may_contain_throw (this_region))
1828    return gimple_try_eval (tp);
1829
1830  new_seq = NULL;
1831  this_state.cur_region = state->cur_region;
1832  this_state.ehp_region = this_region;
1833
1834  emit_eh_dispatch (&new_seq, this_region);
1835  emit_resx (&new_seq, this_region);
1836
1837  this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1838  x = gimple_build_label (this_region->u.allowed.label);
1839  gimple_seq_add_stmt (&new_seq, x);
1840
1841  lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1842  gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1843
1844  gimple_try_set_cleanup (tp, new_seq);
1845
1846  return frob_into_branch_around (tp, this_region, NULL);
1847}
1848
1849/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY with
1850   an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1851   plus the exception region trees that record all the magic.  */
1852
1853static gimple_seq
1854lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1855{
1856  struct leh_state this_state = *state;
1857
1858  if (flag_exceptions)
1859    {
1860      gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1861      eh_region this_region;
1862
1863      this_region = gen_eh_region_must_not_throw (state->cur_region);
1864      this_region->u.must_not_throw.failure_decl
1865	= gimple_eh_must_not_throw_fndecl (inner);
1866      this_region->u.must_not_throw.failure_loc
1867	= LOCATION_LOCUS (gimple_location (tp));
1868
1869      /* In order to get mangling applied to this decl, we must mark it
1870	 used now.  Otherwise, pass_ipa_free_lang_data won't think it
1871	 needs to happen.  */
1872      TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1873
1874      this_state.cur_region = this_region;
1875    }
1876
1877  lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1878
1879  return gimple_try_eval (tp);
1880}
1881
1882/* Implement a cleanup expression.  This is similar to try-finally,
1883   except that we only execute the cleanup block for exception edges.  */
1884
1885static gimple_seq
1886lower_cleanup (struct leh_state *state, gimple tp)
1887{
1888  struct leh_state this_state = *state;
1889  eh_region this_region = NULL;
1890  struct leh_tf_state fake_tf;
1891  gimple_seq result;
1892  bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1893
1894  if (flag_exceptions && !cleanup_dead)
1895    {
1896      this_region = gen_eh_region_cleanup (state->cur_region);
1897      this_state.cur_region = this_region;
1898    }
1899
1900  lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1901
1902  if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1903    return gimple_try_eval (tp);
1904
1905  /* Build enough of a try-finally state so that we can reuse
1906     honor_protect_cleanup_actions.  */
1907  memset (&fake_tf, 0, sizeof (fake_tf));
1908  fake_tf.top_p = fake_tf.try_finally_expr = tp;
1909  fake_tf.outer = state;
1910  fake_tf.region = this_region;
1911  fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1912  fake_tf.may_throw = true;
1913
1914  honor_protect_cleanup_actions (state, NULL, &fake_tf);
1915
1916  if (fake_tf.may_throw)
1917    {
1918      /* In this case honor_protect_cleanup_actions had nothing to do,
1919	 and we should process this normally.  */
1920      lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1921      result = frob_into_branch_around (tp, this_region,
1922                                        fake_tf.fallthru_label);
1923    }
1924  else
1925    {
1926      /* In this case honor_protect_cleanup_actions did nearly all of
1927	 the work.  All we have left is to append the fallthru_label.  */
1928
1929      result = gimple_try_eval (tp);
1930      if (fake_tf.fallthru_label)
1931	{
1932	  gimple x = gimple_build_label (fake_tf.fallthru_label);
1933	  gimple_seq_add_stmt (&result, x);
1934	}
1935    }
1936  return result;
1937}
1938
1939/* Main loop for lowering eh constructs. Also moves gsi to the next
1940   statement. */
1941
1942static void
1943lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1944{
1945  gimple_seq replace;
1946  gimple x;
1947  gimple stmt = gsi_stmt (*gsi);
1948
1949  switch (gimple_code (stmt))
1950    {
1951    case GIMPLE_CALL:
1952      {
1953	tree fndecl = gimple_call_fndecl (stmt);
1954	tree rhs, lhs;
1955
1956	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1957	  switch (DECL_FUNCTION_CODE (fndecl))
1958	    {
1959	    case BUILT_IN_EH_POINTER:
1960	      /* The front end may have generated a call to
1961		 __builtin_eh_pointer (0) within a catch region.  Replace
1962		 this zero argument with the current catch region number.  */
1963	      if (state->ehp_region)
1964		{
1965		  tree nr = build_int_cst (integer_type_node,
1966					   state->ehp_region->index);
1967		  gimple_call_set_arg (stmt, 0, nr);
1968		}
1969	      else
1970		{
1971		  /* The user has dome something silly.  Remove it.  */
1972		  rhs = null_pointer_node;
1973		  goto do_replace;
1974		}
1975	      break;
1976
1977	    case BUILT_IN_EH_FILTER:
1978	      /* ??? This should never appear, but since it's a builtin it
1979		 is accessible to abuse by users.  Just remove it and
1980		 replace the use with the arbitrary value zero.  */
1981	      rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1982	    do_replace:
1983	      lhs = gimple_call_lhs (stmt);
1984	      x = gimple_build_assign (lhs, rhs);
1985	      gsi_insert_before (gsi, x, GSI_SAME_STMT);
1986	      /* FALLTHRU */
1987
1988	    case BUILT_IN_EH_COPY_VALUES:
1989	      /* Likewise this should not appear.  Remove it.  */
1990	      gsi_remove (gsi, true);
1991	      return;
1992
1993	    default:
1994	      break;
1995	    }
1996      }
1997      /* FALLTHRU */
1998
1999    case GIMPLE_ASSIGN:
2000      /* If the stmt can throw use a new temporary for the assignment
2001         to a LHS.  This makes sure the old value of the LHS is
2002	 available on the EH edge.  Only do so for statements that
2003	 potentially fall through (no noreturn calls e.g.), otherwise
2004	 this new assignment might create fake fallthru regions.  */
2005      if (stmt_could_throw_p (stmt)
2006	  && gimple_has_lhs (stmt)
2007	  && gimple_stmt_may_fallthru (stmt)
2008	  && !tree_could_throw_p (gimple_get_lhs (stmt))
2009	  && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2010	{
2011	  tree lhs = gimple_get_lhs (stmt);
2012	  tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
2013	  gimple s = gimple_build_assign (lhs, tmp);
2014	  gimple_set_location (s, gimple_location (stmt));
2015	  gimple_set_block (s, gimple_block (stmt));
2016	  gimple_set_lhs (stmt, tmp);
2017	  if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2018	      || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2019	    DECL_GIMPLE_REG_P (tmp) = 1;
2020	  gsi_insert_after (gsi, s, GSI_SAME_STMT);
2021	}
2022      /* Look for things that can throw exceptions, and record them.  */
2023      if (state->cur_region && stmt_could_throw_p (stmt))
2024	{
2025	  record_stmt_eh_region (state->cur_region, stmt);
2026	  note_eh_region_may_contain_throw (state->cur_region);
2027	}
2028      break;
2029
2030    case GIMPLE_COND:
2031    case GIMPLE_GOTO:
2032    case GIMPLE_RETURN:
2033      maybe_record_in_goto_queue (state, stmt);
2034      break;
2035
2036    case GIMPLE_SWITCH:
2037      verify_norecord_switch_expr (state, stmt);
2038      break;
2039
2040    case GIMPLE_TRY:
2041      if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2042	replace = lower_try_finally (state, stmt);
2043      else
2044	{
2045	  x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2046	  if (!x)
2047	    {
2048	      replace = gimple_try_eval (stmt);
2049	      lower_eh_constructs_1 (state, &replace);
2050	    }
2051	  else
2052	    switch (gimple_code (x))
2053	      {
2054		case GIMPLE_CATCH:
2055		    replace = lower_catch (state, stmt);
2056		    break;
2057		case GIMPLE_EH_FILTER:
2058		    replace = lower_eh_filter (state, stmt);
2059		    break;
2060		case GIMPLE_EH_MUST_NOT_THROW:
2061		    replace = lower_eh_must_not_throw (state, stmt);
2062		    break;
2063		case GIMPLE_EH_ELSE:
2064		    /* This code is only valid with GIMPLE_TRY_FINALLY.  */
2065		    gcc_unreachable ();
2066		default:
2067		    replace = lower_cleanup (state, stmt);
2068		    break;
2069	      }
2070	}
2071
2072      /* Remove the old stmt and insert the transformed sequence
2073	 instead. */
2074      gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2075      gsi_remove (gsi, true);
2076
2077      /* Return since we don't want gsi_next () */
2078      return;
2079
2080    case GIMPLE_EH_ELSE:
2081      /* We should be eliminating this in lower_try_finally et al.  */
2082      gcc_unreachable ();
2083
2084    default:
2085      /* A type, a decl, or some kind of statement that we're not
2086	 interested in.  Don't walk them.  */
2087      break;
2088    }
2089
2090  gsi_next (gsi);
2091}
2092
2093/* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2094
2095static void
2096lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2097{
2098  gimple_stmt_iterator gsi;
2099  for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2100    lower_eh_constructs_2 (state, &gsi);
2101}
2102
2103static unsigned int
2104lower_eh_constructs (void)
2105{
2106  struct leh_state null_state;
2107  gimple_seq bodyp;
2108
2109  bodyp = gimple_body (current_function_decl);
2110  if (bodyp == NULL)
2111    return 0;
2112
2113  finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2114  eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2115  memset (&null_state, 0, sizeof (null_state));
2116
2117  collect_finally_tree_1 (bodyp, NULL);
2118  lower_eh_constructs_1 (&null_state, &bodyp);
2119  gimple_set_body (current_function_decl, bodyp);
2120
2121  /* We assume there's a return statement, or something, at the end of
2122     the function, and thus ploping the EH sequence afterward won't
2123     change anything.  */
2124  gcc_assert (!gimple_seq_may_fallthru (bodyp));
2125  gimple_seq_add_seq (&bodyp, eh_seq);
2126
2127  /* We assume that since BODYP already existed, adding EH_SEQ to it
2128     didn't change its value, and we don't have to re-set the function.  */
2129  gcc_assert (bodyp == gimple_body (current_function_decl));
2130
2131  htab_delete (finally_tree);
2132  BITMAP_FREE (eh_region_may_contain_throw_map);
2133  eh_seq = NULL;
2134
2135  /* If this function needs a language specific EH personality routine
2136     and the frontend didn't already set one do so now.  */
2137  if (function_needs_eh_personality (cfun) == eh_personality_lang
2138      && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2139    DECL_FUNCTION_PERSONALITY (current_function_decl)
2140      = lang_hooks.eh_personality ();
2141
2142  return 0;
2143}
2144
2145struct gimple_opt_pass pass_lower_eh =
2146{
2147 {
2148  GIMPLE_PASS,
2149  "eh",					/* name */
2150  OPTGROUP_NONE,                        /* optinfo_flags */
2151  NULL,					/* gate */
2152  lower_eh_constructs,			/* execute */
2153  NULL,					/* sub */
2154  NULL,					/* next */
2155  0,					/* static_pass_number */
2156  TV_TREE_EH,				/* tv_id */
2157  PROP_gimple_lcf,			/* properties_required */
2158  PROP_gimple_leh,			/* properties_provided */
2159  0,					/* properties_destroyed */
2160  0,					/* todo_flags_start */
2161  0             			/* todo_flags_finish */
2162 }
2163};
2164
2165/* Create the multiple edges from an EH_DISPATCH statement to all of
2166   the possible handlers for its EH region.  Return true if there's
2167   no fallthru edge; false if there is.  */
2168
2169bool
2170make_eh_dispatch_edges (gimple stmt)
2171{
2172  eh_region r;
2173  eh_catch c;
2174  basic_block src, dst;
2175
2176  r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2177  src = gimple_bb (stmt);
2178
2179  switch (r->type)
2180    {
2181    case ERT_TRY:
2182      for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2183	{
2184	  dst = label_to_block (c->label);
2185	  make_edge (src, dst, 0);
2186
2187	  /* A catch-all handler doesn't have a fallthru.  */
2188	  if (c->type_list == NULL)
2189	    return false;
2190	}
2191      break;
2192
2193    case ERT_ALLOWED_EXCEPTIONS:
2194      dst = label_to_block (r->u.allowed.label);
2195      make_edge (src, dst, 0);
2196      break;
2197
2198    default:
2199      gcc_unreachable ();
2200    }
2201
2202  return true;
2203}
2204
2205/* Create the single EH edge from STMT to its nearest landing pad,
2206   if there is such a landing pad within the current function.  */
2207
2208void
2209make_eh_edges (gimple stmt)
2210{
2211  basic_block src, dst;
2212  eh_landing_pad lp;
2213  int lp_nr;
2214
2215  lp_nr = lookup_stmt_eh_lp (stmt);
2216  if (lp_nr <= 0)
2217    return;
2218
2219  lp = get_eh_landing_pad_from_number (lp_nr);
2220  gcc_assert (lp != NULL);
2221
2222  src = gimple_bb (stmt);
2223  dst = label_to_block (lp->post_landing_pad);
2224  make_edge (src, dst, EDGE_EH);
2225}
2226
2227/* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2228   do not actually perform the final edge redirection.
2229
2230   CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2231   we intend to change the destination EH region as well; this means
2232   EH_LANDING_PAD_NR must already be set on the destination block label.
2233   If false, we're being called from generic cfg manipulation code and we
2234   should preserve our place within the region tree.  */
2235
2236static void
2237redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2238{
2239  eh_landing_pad old_lp, new_lp;
2240  basic_block old_bb;
2241  gimple throw_stmt;
2242  int old_lp_nr, new_lp_nr;
2243  tree old_label, new_label;
2244  edge_iterator ei;
2245  edge e;
2246
2247  old_bb = edge_in->dest;
2248  old_label = gimple_block_label (old_bb);
2249  old_lp_nr = EH_LANDING_PAD_NR (old_label);
2250  gcc_assert (old_lp_nr > 0);
2251  old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2252
2253  throw_stmt = last_stmt (edge_in->src);
2254  gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2255
2256  new_label = gimple_block_label (new_bb);
2257
2258  /* Look for an existing region that might be using NEW_BB already.  */
2259  new_lp_nr = EH_LANDING_PAD_NR (new_label);
2260  if (new_lp_nr)
2261    {
2262      new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2263      gcc_assert (new_lp);
2264
2265      /* Unless CHANGE_REGION is true, the new and old landing pad
2266	 had better be associated with the same EH region.  */
2267      gcc_assert (change_region || new_lp->region == old_lp->region);
2268    }
2269  else
2270    {
2271      new_lp = NULL;
2272      gcc_assert (!change_region);
2273    }
2274
2275  /* Notice when we redirect the last EH edge away from OLD_BB.  */
2276  FOR_EACH_EDGE (e, ei, old_bb->preds)
2277    if (e != edge_in && (e->flags & EDGE_EH))
2278      break;
2279
2280  if (new_lp)
2281    {
2282      /* NEW_LP already exists.  If there are still edges into OLD_LP,
2283	 there's nothing to do with the EH tree.  If there are no more
2284	 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2285	 If CHANGE_REGION is true, then our caller is expecting to remove
2286	 the landing pad.  */
2287      if (e == NULL && !change_region)
2288	remove_eh_landing_pad (old_lp);
2289    }
2290  else
2291    {
2292      /* No correct landing pad exists.  If there are no more edges
2293	 into OLD_LP, then we can simply re-use the existing landing pad.
2294	 Otherwise, we have to create a new landing pad.  */
2295      if (e == NULL)
2296	{
2297	  EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2298	  new_lp = old_lp;
2299	}
2300      else
2301	new_lp = gen_eh_landing_pad (old_lp->region);
2302      new_lp->post_landing_pad = new_label;
2303      EH_LANDING_PAD_NR (new_label) = new_lp->index;
2304    }
2305
2306  /* Maybe move the throwing statement to the new region.  */
2307  if (old_lp != new_lp)
2308    {
2309      remove_stmt_from_eh_lp (throw_stmt);
2310      add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2311    }
2312}
2313
2314/* Redirect EH edge E to NEW_BB.  */
2315
2316edge
2317redirect_eh_edge (edge edge_in, basic_block new_bb)
2318{
2319  redirect_eh_edge_1 (edge_in, new_bb, false);
2320  return ssa_redirect_edge (edge_in, new_bb);
2321}
2322
2323/* This is a subroutine of gimple_redirect_edge_and_branch.  Update the
2324   labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2325   The actual edge update will happen in the caller.  */
2326
2327void
2328redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2329{
2330  tree new_lab = gimple_block_label (new_bb);
2331  bool any_changed = false;
2332  basic_block old_bb;
2333  eh_region r;
2334  eh_catch c;
2335
2336  r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2337  switch (r->type)
2338    {
2339    case ERT_TRY:
2340      for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2341	{
2342	  old_bb = label_to_block (c->label);
2343	  if (old_bb == e->dest)
2344	    {
2345	      c->label = new_lab;
2346	      any_changed = true;
2347	    }
2348	}
2349      break;
2350
2351    case ERT_ALLOWED_EXCEPTIONS:
2352      old_bb = label_to_block (r->u.allowed.label);
2353      gcc_assert (old_bb == e->dest);
2354      r->u.allowed.label = new_lab;
2355      any_changed = true;
2356      break;
2357
2358    default:
2359      gcc_unreachable ();
2360    }
2361
2362  gcc_assert (any_changed);
2363}
2364
2365/* Helper function for operation_could_trap_p and stmt_could_throw_p.  */
2366
2367bool
2368operation_could_trap_helper_p (enum tree_code op,
2369			       bool fp_operation,
2370			       bool honor_trapv,
2371			       bool honor_nans,
2372			       bool honor_snans,
2373			       tree divisor,
2374			       bool *handled)
2375{
2376  *handled = true;
2377  switch (op)
2378    {
2379    case TRUNC_DIV_EXPR:
2380    case CEIL_DIV_EXPR:
2381    case FLOOR_DIV_EXPR:
2382    case ROUND_DIV_EXPR:
2383    case EXACT_DIV_EXPR:
2384    case CEIL_MOD_EXPR:
2385    case FLOOR_MOD_EXPR:
2386    case ROUND_MOD_EXPR:
2387    case TRUNC_MOD_EXPR:
2388    case RDIV_EXPR:
2389      if (honor_snans || honor_trapv)
2390	return true;
2391      if (fp_operation)
2392	return flag_trapping_math;
2393      if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2394        return true;
2395      return false;
2396
2397    case LT_EXPR:
2398    case LE_EXPR:
2399    case GT_EXPR:
2400    case GE_EXPR:
2401    case LTGT_EXPR:
2402      /* Some floating point comparisons may trap.  */
2403      return honor_nans;
2404
2405    case EQ_EXPR:
2406    case NE_EXPR:
2407    case UNORDERED_EXPR:
2408    case ORDERED_EXPR:
2409    case UNLT_EXPR:
2410    case UNLE_EXPR:
2411    case UNGT_EXPR:
2412    case UNGE_EXPR:
2413    case UNEQ_EXPR:
2414      return honor_snans;
2415
2416    case CONVERT_EXPR:
2417    case FIX_TRUNC_EXPR:
2418      /* Conversion of floating point might trap.  */
2419      return honor_nans;
2420
2421    case NEGATE_EXPR:
2422    case ABS_EXPR:
2423    case CONJ_EXPR:
2424      /* These operations don't trap with floating point.  */
2425      if (honor_trapv)
2426	return true;
2427      return false;
2428
2429    case PLUS_EXPR:
2430    case MINUS_EXPR:
2431    case MULT_EXPR:
2432      /* Any floating arithmetic may trap.  */
2433      if (fp_operation && flag_trapping_math)
2434	return true;
2435      if (honor_trapv)
2436	return true;
2437      return false;
2438
2439    case COMPLEX_EXPR:
2440    case CONSTRUCTOR:
2441      /* Constructing an object cannot trap.  */
2442      return false;
2443
2444    default:
2445      /* Any floating arithmetic may trap.  */
2446      if (fp_operation && flag_trapping_math)
2447	return true;
2448
2449      *handled = false;
2450      return false;
2451    }
2452}
2453
2454/* Return true if operation OP may trap.  FP_OPERATION is true if OP is applied
2455   on floating-point values.  HONOR_TRAPV is true if OP is applied on integer
2456   type operands that may trap.  If OP is a division operator, DIVISOR contains
2457   the value of the divisor.  */
2458
2459bool
2460operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2461			tree divisor)
2462{
2463  bool honor_nans = (fp_operation && flag_trapping_math
2464		     && !flag_finite_math_only);
2465  bool honor_snans = fp_operation && flag_signaling_nans != 0;
2466  bool handled;
2467
2468  if (TREE_CODE_CLASS (op) != tcc_comparison
2469      && TREE_CODE_CLASS (op) != tcc_unary
2470      && TREE_CODE_CLASS (op) != tcc_binary)
2471    return false;
2472
2473  return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2474					honor_nans, honor_snans, divisor,
2475					&handled);
2476}
2477
2478/* Return true if EXPR can trap, as in dereferencing an invalid pointer
2479   location or floating point arithmetic.  C.f. the rtl version, may_trap_p.
2480   This routine expects only GIMPLE lhs or rhs input.  */
2481
2482bool
2483tree_could_trap_p (tree expr)
2484{
2485  enum tree_code code;
2486  bool fp_operation = false;
2487  bool honor_trapv = false;
2488  tree t, base, div = NULL_TREE;
2489
2490  if (!expr)
2491    return false;
2492
2493  code = TREE_CODE (expr);
2494  t = TREE_TYPE (expr);
2495
2496  if (t)
2497    {
2498      if (COMPARISON_CLASS_P (expr))
2499	fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2500      else
2501	fp_operation = FLOAT_TYPE_P (t);
2502      honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2503    }
2504
2505  if (TREE_CODE_CLASS (code) == tcc_binary)
2506    div = TREE_OPERAND (expr, 1);
2507  if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2508    return true;
2509
2510 restart:
2511  switch (code)
2512    {
2513    case COMPONENT_REF:
2514    case REALPART_EXPR:
2515    case IMAGPART_EXPR:
2516    case BIT_FIELD_REF:
2517    case VIEW_CONVERT_EXPR:
2518    case WITH_SIZE_EXPR:
2519      expr = TREE_OPERAND (expr, 0);
2520      code = TREE_CODE (expr);
2521      goto restart;
2522
2523    case ARRAY_RANGE_REF:
2524      base = TREE_OPERAND (expr, 0);
2525      if (tree_could_trap_p (base))
2526	return true;
2527      if (TREE_THIS_NOTRAP (expr))
2528	return false;
2529      return !range_in_array_bounds_p (expr);
2530
2531    case ARRAY_REF:
2532      base = TREE_OPERAND (expr, 0);
2533      if (tree_could_trap_p (base))
2534	return true;
2535      if (TREE_THIS_NOTRAP (expr))
2536	return false;
2537      return !in_array_bounds_p (expr);
2538
2539    case TARGET_MEM_REF:
2540    case MEM_REF:
2541      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2542	  && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2543	return true;
2544      if (TREE_THIS_NOTRAP (expr))
2545	return false;
2546      /* We cannot prove that the access is in-bounds when we have
2547         variable-index TARGET_MEM_REFs.  */
2548      if (code == TARGET_MEM_REF
2549	  && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2550	return true;
2551      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2552	{
2553	  tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2554	  double_int off = mem_ref_offset (expr);
2555	  if (off.is_negative ())
2556	    return true;
2557	  if (TREE_CODE (base) == STRING_CST)
2558	    return double_int::from_uhwi (TREE_STRING_LENGTH (base)).ule (off);
2559	  else if (DECL_SIZE_UNIT (base) == NULL_TREE
2560		   || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
2561		   || tree_to_double_int (DECL_SIZE_UNIT (base)).ule (off))
2562	    return true;
2563	  /* Now we are sure the first byte of the access is inside
2564	     the object.  */
2565	  return false;
2566	}
2567      return true;
2568
2569    case INDIRECT_REF:
2570      return !TREE_THIS_NOTRAP (expr);
2571
2572    case ASM_EXPR:
2573      return TREE_THIS_VOLATILE (expr);
2574
2575    case CALL_EXPR:
2576      t = get_callee_fndecl (expr);
2577      /* Assume that calls to weak functions may trap.  */
2578      if (!t || !DECL_P (t))
2579	return true;
2580      if (DECL_WEAK (t))
2581	return tree_could_trap_p (t);
2582      return false;
2583
2584    case FUNCTION_DECL:
2585      /* Assume that accesses to weak functions may trap, unless we know
2586	 they are certainly defined in current TU or in some other
2587	 LTO partition.  */
2588      if (DECL_WEAK (expr))
2589	{
2590	  struct cgraph_node *node;
2591	  if (!DECL_EXTERNAL (expr))
2592	    return false;
2593	  node = cgraph_function_node (cgraph_get_node (expr), NULL);
2594	  if (node && node->symbol.in_other_partition)
2595	    return false;
2596	  return true;
2597	}
2598      return false;
2599
2600    case VAR_DECL:
2601      /* Assume that accesses to weak vars may trap, unless we know
2602	 they are certainly defined in current TU or in some other
2603	 LTO partition.  */
2604      if (DECL_WEAK (expr))
2605	{
2606	  struct varpool_node *node;
2607	  if (!DECL_EXTERNAL (expr))
2608	    return false;
2609	  node = varpool_variable_node (varpool_get_node (expr), NULL);
2610	  if (node && node->symbol.in_other_partition)
2611	    return false;
2612	  return true;
2613	}
2614      return false;
2615
2616    default:
2617      return false;
2618    }
2619}
2620
2621
2622/* Helper for stmt_could_throw_p.  Return true if STMT (assumed to be a
2623   an assignment or a conditional) may throw.  */
2624
2625static bool
2626stmt_could_throw_1_p (gimple stmt)
2627{
2628  enum tree_code code = gimple_expr_code (stmt);
2629  bool honor_nans = false;
2630  bool honor_snans = false;
2631  bool fp_operation = false;
2632  bool honor_trapv = false;
2633  tree t;
2634  size_t i;
2635  bool handled, ret;
2636
2637  if (TREE_CODE_CLASS (code) == tcc_comparison
2638      || TREE_CODE_CLASS (code) == tcc_unary
2639      || TREE_CODE_CLASS (code) == tcc_binary)
2640    {
2641      if (is_gimple_assign (stmt)
2642	  && TREE_CODE_CLASS (code) == tcc_comparison)
2643	t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2644      else if (gimple_code (stmt) == GIMPLE_COND)
2645	t = TREE_TYPE (gimple_cond_lhs (stmt));
2646      else
2647	t = gimple_expr_type (stmt);
2648      fp_operation = FLOAT_TYPE_P (t);
2649      if (fp_operation)
2650	{
2651	  honor_nans = flag_trapping_math && !flag_finite_math_only;
2652	  honor_snans = flag_signaling_nans != 0;
2653	}
2654      else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2655	honor_trapv = true;
2656    }
2657
2658  /* Check if the main expression may trap.  */
2659  t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2660  ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2661				       honor_nans, honor_snans, t,
2662				       &handled);
2663  if (handled)
2664    return ret;
2665
2666  /* If the expression does not trap, see if any of the individual operands may
2667     trap.  */
2668  for (i = 0; i < gimple_num_ops (stmt); i++)
2669    if (tree_could_trap_p (gimple_op (stmt, i)))
2670      return true;
2671
2672  return false;
2673}
2674
2675
2676/* Return true if statement STMT could throw an exception.  */
2677
2678bool
2679stmt_could_throw_p (gimple stmt)
2680{
2681  if (!flag_exceptions)
2682    return false;
2683
2684  /* The only statements that can throw an exception are assignments,
2685     conditionals, calls, resx, and asms.  */
2686  switch (gimple_code (stmt))
2687    {
2688    case GIMPLE_RESX:
2689      return true;
2690
2691    case GIMPLE_CALL:
2692      return !gimple_call_nothrow_p (stmt);
2693
2694    case GIMPLE_ASSIGN:
2695    case GIMPLE_COND:
2696      if (!cfun->can_throw_non_call_exceptions)
2697        return false;
2698      return stmt_could_throw_1_p (stmt);
2699
2700    case GIMPLE_ASM:
2701      if (!cfun->can_throw_non_call_exceptions)
2702        return false;
2703      return gimple_asm_volatile_p (stmt);
2704
2705    default:
2706      return false;
2707    }
2708}
2709
2710
2711/* Return true if expression T could throw an exception.  */
2712
2713bool
2714tree_could_throw_p (tree t)
2715{
2716  if (!flag_exceptions)
2717    return false;
2718  if (TREE_CODE (t) == MODIFY_EXPR)
2719    {
2720      if (cfun->can_throw_non_call_exceptions
2721          && tree_could_trap_p (TREE_OPERAND (t, 0)))
2722        return true;
2723      t = TREE_OPERAND (t, 1);
2724    }
2725
2726  if (TREE_CODE (t) == WITH_SIZE_EXPR)
2727    t = TREE_OPERAND (t, 0);
2728  if (TREE_CODE (t) == CALL_EXPR)
2729    return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2730  if (cfun->can_throw_non_call_exceptions)
2731    return tree_could_trap_p (t);
2732  return false;
2733}
2734
2735/* Return true if STMT can throw an exception that is not caught within
2736   the current function (CFUN).  */
2737
2738bool
2739stmt_can_throw_external (gimple stmt)
2740{
2741  int lp_nr;
2742
2743  if (!stmt_could_throw_p (stmt))
2744    return false;
2745
2746  lp_nr = lookup_stmt_eh_lp (stmt);
2747  return lp_nr == 0;
2748}
2749
2750/* Return true if STMT can throw an exception that is caught within
2751   the current function (CFUN).  */
2752
2753bool
2754stmt_can_throw_internal (gimple stmt)
2755{
2756  int lp_nr;
2757
2758  if (!stmt_could_throw_p (stmt))
2759    return false;
2760
2761  lp_nr = lookup_stmt_eh_lp (stmt);
2762  return lp_nr > 0;
2763}
2764
2765/* Given a statement STMT in IFUN, if STMT can no longer throw, then
2766   remove any entry it might have from the EH table.  Return true if
2767   any change was made.  */
2768
2769bool
2770maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2771{
2772  if (stmt_could_throw_p (stmt))
2773    return false;
2774  return remove_stmt_from_eh_lp_fn (ifun, stmt);
2775}
2776
2777/* Likewise, but always use the current function.  */
2778
2779bool
2780maybe_clean_eh_stmt (gimple stmt)
2781{
2782  return maybe_clean_eh_stmt_fn (cfun, stmt);
2783}
2784
2785/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2786   OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2787   in the table if it should be in there.  Return TRUE if a replacement was
2788   done that my require an EH edge purge.  */
2789
2790bool
2791maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2792{
2793  int lp_nr = lookup_stmt_eh_lp (old_stmt);
2794
2795  if (lp_nr != 0)
2796    {
2797      bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2798
2799      if (new_stmt == old_stmt && new_stmt_could_throw)
2800	return false;
2801
2802      remove_stmt_from_eh_lp (old_stmt);
2803      if (new_stmt_could_throw)
2804	{
2805	  add_stmt_to_eh_lp (new_stmt, lp_nr);
2806	  return false;
2807	}
2808      else
2809	return true;
2810    }
2811
2812  return false;
2813}
2814
2815/* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2816   in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT.  The MAP
2817   operand is the return value of duplicate_eh_regions.  */
2818
2819bool
2820maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2821			    struct function *old_fun, gimple old_stmt,
2822			    struct pointer_map_t *map, int default_lp_nr)
2823{
2824  int old_lp_nr, new_lp_nr;
2825  void **slot;
2826
2827  if (!stmt_could_throw_p (new_stmt))
2828    return false;
2829
2830  old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2831  if (old_lp_nr == 0)
2832    {
2833      if (default_lp_nr == 0)
2834	return false;
2835      new_lp_nr = default_lp_nr;
2836    }
2837  else if (old_lp_nr > 0)
2838    {
2839      eh_landing_pad old_lp, new_lp;
2840
2841      old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
2842      slot = pointer_map_contains (map, old_lp);
2843      new_lp = (eh_landing_pad) *slot;
2844      new_lp_nr = new_lp->index;
2845    }
2846  else
2847    {
2848      eh_region old_r, new_r;
2849
2850      old_r = (*old_fun->eh->region_array)[-old_lp_nr];
2851      slot = pointer_map_contains (map, old_r);
2852      new_r = (eh_region) *slot;
2853      new_lp_nr = -new_r->index;
2854    }
2855
2856  add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2857  return true;
2858}
2859
2860/* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2861   and thus no remapping is required.  */
2862
2863bool
2864maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2865{
2866  int lp_nr;
2867
2868  if (!stmt_could_throw_p (new_stmt))
2869    return false;
2870
2871  lp_nr = lookup_stmt_eh_lp (old_stmt);
2872  if (lp_nr == 0)
2873    return false;
2874
2875  add_stmt_to_eh_lp (new_stmt, lp_nr);
2876  return true;
2877}
2878
2879/* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2880   GIMPLE_TRY) that are similar enough to be considered the same.  Currently
2881   this only handles handlers consisting of a single call, as that's the
2882   important case for C++: a destructor call for a particular object showing
2883   up in multiple handlers.  */
2884
2885static bool
2886same_handler_p (gimple_seq oneh, gimple_seq twoh)
2887{
2888  gimple_stmt_iterator gsi;
2889  gimple ones, twos;
2890  unsigned int ai;
2891
2892  gsi = gsi_start (oneh);
2893  if (!gsi_one_before_end_p (gsi))
2894    return false;
2895  ones = gsi_stmt (gsi);
2896
2897  gsi = gsi_start (twoh);
2898  if (!gsi_one_before_end_p (gsi))
2899    return false;
2900  twos = gsi_stmt (gsi);
2901
2902  if (!is_gimple_call (ones)
2903      || !is_gimple_call (twos)
2904      || gimple_call_lhs (ones)
2905      || gimple_call_lhs (twos)
2906      || gimple_call_chain (ones)
2907      || gimple_call_chain (twos)
2908      || !gimple_call_same_target_p (ones, twos)
2909      || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2910    return false;
2911
2912  for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2913    if (!operand_equal_p (gimple_call_arg (ones, ai),
2914                          gimple_call_arg (twos, ai), 0))
2915      return false;
2916
2917  return true;
2918}
2919
2920/* Optimize
2921    try { A() } finally { try { ~B() } catch { ~A() } }
2922    try { ... } finally { ~A() }
2923   into
2924    try { A() } catch { ~B() }
2925    try { ~B() ... } finally { ~A() }
2926
2927   This occurs frequently in C++, where A is a local variable and B is a
2928   temporary used in the initializer for A.  */
2929
2930static void
2931optimize_double_finally (gimple one, gimple two)
2932{
2933  gimple oneh;
2934  gimple_stmt_iterator gsi;
2935  gimple_seq cleanup;
2936
2937  cleanup = gimple_try_cleanup (one);
2938  gsi = gsi_start (cleanup);
2939  if (!gsi_one_before_end_p (gsi))
2940    return;
2941
2942  oneh = gsi_stmt (gsi);
2943  if (gimple_code (oneh) != GIMPLE_TRY
2944      || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2945    return;
2946
2947  if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2948    {
2949      gimple_seq seq = gimple_try_eval (oneh);
2950
2951      gimple_try_set_cleanup (one, seq);
2952      gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2953      seq = copy_gimple_seq_and_replace_locals (seq);
2954      gimple_seq_add_seq (&seq, gimple_try_eval (two));
2955      gimple_try_set_eval (two, seq);
2956    }
2957}
2958
2959/* Perform EH refactoring optimizations that are simpler to do when code
2960   flow has been lowered but EH structures haven't.  */
2961
2962static void
2963refactor_eh_r (gimple_seq seq)
2964{
2965  gimple_stmt_iterator gsi;
2966  gimple one, two;
2967
2968  one = NULL;
2969  two = NULL;
2970  gsi = gsi_start (seq);
2971  while (1)
2972    {
2973      one = two;
2974      if (gsi_end_p (gsi))
2975	two = NULL;
2976      else
2977	two = gsi_stmt (gsi);
2978      if (one
2979	  && two
2980	  && gimple_code (one) == GIMPLE_TRY
2981	  && gimple_code (two) == GIMPLE_TRY
2982	  && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2983	  && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2984	optimize_double_finally (one, two);
2985      if (one)
2986	switch (gimple_code (one))
2987	  {
2988	  case GIMPLE_TRY:
2989	    refactor_eh_r (gimple_try_eval (one));
2990	    refactor_eh_r (gimple_try_cleanup (one));
2991	    break;
2992	  case GIMPLE_CATCH:
2993	    refactor_eh_r (gimple_catch_handler (one));
2994	    break;
2995	  case GIMPLE_EH_FILTER:
2996	    refactor_eh_r (gimple_eh_filter_failure (one));
2997	    break;
2998	  case GIMPLE_EH_ELSE:
2999	    refactor_eh_r (gimple_eh_else_n_body (one));
3000	    refactor_eh_r (gimple_eh_else_e_body (one));
3001	    break;
3002	  default:
3003	    break;
3004	  }
3005      if (two)
3006	gsi_next (&gsi);
3007      else
3008	break;
3009    }
3010}
3011
3012static unsigned
3013refactor_eh (void)
3014{
3015  refactor_eh_r (gimple_body (current_function_decl));
3016  return 0;
3017}
3018
3019static bool
3020gate_refactor_eh (void)
3021{
3022  return flag_exceptions != 0;
3023}
3024
3025struct gimple_opt_pass pass_refactor_eh =
3026{
3027 {
3028  GIMPLE_PASS,
3029  "ehopt",				/* name */
3030  OPTGROUP_NONE,                        /* optinfo_flags */
3031  gate_refactor_eh,			/* gate */
3032  refactor_eh,				/* execute */
3033  NULL,					/* sub */
3034  NULL,					/* next */
3035  0,					/* static_pass_number */
3036  TV_TREE_EH,				/* tv_id */
3037  PROP_gimple_lcf,			/* properties_required */
3038  0,					/* properties_provided */
3039  0,					/* properties_destroyed */
3040  0,					/* todo_flags_start */
3041  0             			/* todo_flags_finish */
3042 }
3043};
3044
3045/* At the end of gimple optimization, we can lower RESX.  */
3046
3047static bool
3048lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
3049{
3050  int lp_nr;
3051  eh_region src_r, dst_r;
3052  gimple_stmt_iterator gsi;
3053  gimple x;
3054  tree fn, src_nr;
3055  bool ret = false;
3056
3057  lp_nr = lookup_stmt_eh_lp (stmt);
3058  if (lp_nr != 0)
3059    dst_r = get_eh_region_from_lp_number (lp_nr);
3060  else
3061    dst_r = NULL;
3062
3063  src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3064  gsi = gsi_last_bb (bb);
3065
3066  if (src_r == NULL)
3067    {
3068      /* We can wind up with no source region when pass_cleanup_eh shows
3069	 that there are no entries into an eh region and deletes it, but
3070	 then the block that contains the resx isn't removed.  This can
3071	 happen without optimization when the switch statement created by
3072	 lower_try_finally_switch isn't simplified to remove the eh case.
3073
3074	 Resolve this by expanding the resx node to an abort.  */
3075
3076      fn = builtin_decl_implicit (BUILT_IN_TRAP);
3077      x = gimple_build_call (fn, 0);
3078      gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3079
3080      while (EDGE_COUNT (bb->succs) > 0)
3081	remove_edge (EDGE_SUCC (bb, 0));
3082    }
3083  else if (dst_r)
3084    {
3085      /* When we have a destination region, we resolve this by copying
3086	 the excptr and filter values into place, and changing the edge
3087	 to immediately after the landing pad.  */
3088      edge e;
3089
3090      if (lp_nr < 0)
3091	{
3092	  basic_block new_bb;
3093	  void **slot;
3094	  tree lab;
3095
3096	  /* We are resuming into a MUST_NOT_CALL region.  Expand a call to
3097	     the failure decl into a new block, if needed.  */
3098	  gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3099
3100	  slot = pointer_map_contains (mnt_map, dst_r);
3101	  if (slot == NULL)
3102	    {
3103	      gimple_stmt_iterator gsi2;
3104
3105	      new_bb = create_empty_bb (bb);
3106	      if (current_loops)
3107		add_bb_to_loop (new_bb, bb->loop_father);
3108	      lab = gimple_block_label (new_bb);
3109	      gsi2 = gsi_start_bb (new_bb);
3110
3111	      fn = dst_r->u.must_not_throw.failure_decl;
3112	      x = gimple_build_call (fn, 0);
3113	      gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3114	      gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3115
3116	      slot = pointer_map_insert (mnt_map, dst_r);
3117	      *slot = lab;
3118	    }
3119	  else
3120	    {
3121	      lab = (tree) *slot;
3122	      new_bb = label_to_block (lab);
3123	    }
3124
3125	  gcc_assert (EDGE_COUNT (bb->succs) == 0);
3126	  e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3127	  e->count = bb->count;
3128	  e->probability = REG_BR_PROB_BASE;
3129	}
3130      else
3131	{
3132	  edge_iterator ei;
3133	  tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3134
3135	  fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3136	  src_nr = build_int_cst (integer_type_node, src_r->index);
3137	  x = gimple_build_call (fn, 2, dst_nr, src_nr);
3138	  gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3139
3140	  /* Update the flags for the outgoing edge.  */
3141	  e = single_succ_edge (bb);
3142	  gcc_assert (e->flags & EDGE_EH);
3143	  e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3144
3145	  /* If there are no more EH users of the landing pad, delete it.  */
3146	  FOR_EACH_EDGE (e, ei, e->dest->preds)
3147	    if (e->flags & EDGE_EH)
3148	      break;
3149	  if (e == NULL)
3150	    {
3151	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3152	      remove_eh_landing_pad (lp);
3153	    }
3154	}
3155
3156      ret = true;
3157    }
3158  else
3159    {
3160      tree var;
3161
3162      /* When we don't have a destination region, this exception escapes
3163	 up the call chain.  We resolve this by generating a call to the
3164	 _Unwind_Resume library function.  */
3165
3166      /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3167	 with no arguments for C++ and Java.  Check for that.  */
3168      if (src_r->use_cxa_end_cleanup)
3169	{
3170	  fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3171	  x = gimple_build_call (fn, 0);
3172	  gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3173	}
3174      else
3175	{
3176	  fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3177	  src_nr = build_int_cst (integer_type_node, src_r->index);
3178	  x = gimple_build_call (fn, 1, src_nr);
3179	  var = create_tmp_var (ptr_type_node, NULL);
3180	  var = make_ssa_name (var, x);
3181	  gimple_call_set_lhs (x, var);
3182	  gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3183
3184	  fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3185	  x = gimple_build_call (fn, 1, var);
3186	  gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3187	}
3188
3189      gcc_assert (EDGE_COUNT (bb->succs) == 0);
3190    }
3191
3192  gsi_remove (&gsi, true);
3193
3194  return ret;
3195}
3196
3197static unsigned
3198execute_lower_resx (void)
3199{
3200  basic_block bb;
3201  struct pointer_map_t *mnt_map;
3202  bool dominance_invalidated = false;
3203  bool any_rewritten = false;
3204
3205  mnt_map = pointer_map_create ();
3206
3207  FOR_EACH_BB (bb)
3208    {
3209      gimple last = last_stmt (bb);
3210      if (last && is_gimple_resx (last))
3211	{
3212	  dominance_invalidated |= lower_resx (bb, last, mnt_map);
3213	  any_rewritten = true;
3214	}
3215    }
3216
3217  pointer_map_destroy (mnt_map);
3218
3219  if (dominance_invalidated)
3220    {
3221      free_dominance_info (CDI_DOMINATORS);
3222      free_dominance_info (CDI_POST_DOMINATORS);
3223    }
3224
3225  return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3226}
3227
3228static bool
3229gate_lower_resx (void)
3230{
3231  return flag_exceptions != 0;
3232}
3233
3234struct gimple_opt_pass pass_lower_resx =
3235{
3236 {
3237  GIMPLE_PASS,
3238  "resx",				/* name */
3239  OPTGROUP_NONE,                        /* optinfo_flags */
3240  gate_lower_resx,			/* gate */
3241  execute_lower_resx,			/* execute */
3242  NULL,					/* sub */
3243  NULL,					/* next */
3244  0,					/* static_pass_number */
3245  TV_TREE_EH,				/* tv_id */
3246  PROP_gimple_lcf,			/* properties_required */
3247  0,					/* properties_provided */
3248  0,					/* properties_destroyed */
3249  0,					/* todo_flags_start */
3250  TODO_verify_flow	                /* todo_flags_finish */
3251 }
3252};
3253
3254/* Try to optimize var = {v} {CLOBBER} stmts followed just by
3255   external throw.  */
3256
3257static void
3258optimize_clobbers (basic_block bb)
3259{
3260  gimple_stmt_iterator gsi = gsi_last_bb (bb);
3261  for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3262    {
3263      gimple stmt = gsi_stmt (gsi);
3264      if (is_gimple_debug (stmt))
3265	continue;
3266      if (!gimple_clobber_p (stmt)
3267	  || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3268	return;
3269      unlink_stmt_vdef (stmt);
3270      gsi_remove (&gsi, true);
3271      release_defs (stmt);
3272    }
3273}
3274
3275/* Try to sink var = {v} {CLOBBER} stmts followed just by
3276   internal throw to successor BB.  */
3277
3278static int
3279sink_clobbers (basic_block bb)
3280{
3281  edge e;
3282  edge_iterator ei;
3283  gimple_stmt_iterator gsi, dgsi;
3284  basic_block succbb;
3285  bool any_clobbers = false;
3286
3287  /* Only optimize if BB has a single EH successor and
3288     all predecessor edges are EH too.  */
3289  if (!single_succ_p (bb)
3290      || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3291    return 0;
3292
3293  FOR_EACH_EDGE (e, ei, bb->preds)
3294    {
3295      if ((e->flags & EDGE_EH) == 0)
3296	return 0;
3297    }
3298
3299  /* And BB contains only CLOBBER stmts before the final
3300     RESX.  */
3301  gsi = gsi_last_bb (bb);
3302  for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3303    {
3304      gimple stmt = gsi_stmt (gsi);
3305      if (is_gimple_debug (stmt))
3306	continue;
3307      if (gimple_code (stmt) == GIMPLE_LABEL)
3308	break;
3309      if (!gimple_clobber_p (stmt)
3310	  || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3311	return 0;
3312      any_clobbers = true;
3313    }
3314  if (!any_clobbers)
3315    return 0;
3316
3317  succbb = single_succ (bb);
3318  dgsi = gsi_after_labels (succbb);
3319  gsi = gsi_last_bb (bb);
3320  for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3321    {
3322      gimple stmt = gsi_stmt (gsi);
3323      if (is_gimple_debug (stmt))
3324	continue;
3325      if (gimple_code (stmt) == GIMPLE_LABEL)
3326	break;
3327      unlink_stmt_vdef (stmt);
3328      gsi_remove (&gsi, false);
3329      /* Trigger the operand scanner to cause renaming for virtual
3330         operands for this statement.
3331	 ???  Given the simple structure of this code manually
3332	 figuring out the reaching definition should not be too hard.  */
3333      if (gimple_vuse (stmt))
3334	gimple_set_vuse (stmt, NULL_TREE);
3335      gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
3336    }
3337
3338  return TODO_update_ssa_only_virtuals;
3339}
3340
3341/* At the end of inlining, we can lower EH_DISPATCH.  Return true when
3342   we have found some duplicate labels and removed some edges.  */
3343
3344static bool
3345lower_eh_dispatch (basic_block src, gimple stmt)
3346{
3347  gimple_stmt_iterator gsi;
3348  int region_nr;
3349  eh_region r;
3350  tree filter, fn;
3351  gimple x;
3352  bool redirected = false;
3353
3354  region_nr = gimple_eh_dispatch_region (stmt);
3355  r = get_eh_region_from_number (region_nr);
3356
3357  gsi = gsi_last_bb (src);
3358
3359  switch (r->type)
3360    {
3361    case ERT_TRY:
3362      {
3363	vec<tree> labels = vNULL;
3364	tree default_label = NULL;
3365	eh_catch c;
3366	edge_iterator ei;
3367	edge e;
3368	struct pointer_set_t *seen_values = pointer_set_create ();
3369
3370	/* Collect the labels for a switch.  Zero the post_landing_pad
3371	   field becase we'll no longer have anything keeping these labels
3372	   in existence and the optimizer will be free to merge these
3373	   blocks at will.  */
3374	for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3375	  {
3376	    tree tp_node, flt_node, lab = c->label;
3377	    bool have_label = false;
3378
3379	    c->label = NULL;
3380	    tp_node = c->type_list;
3381	    flt_node = c->filter_list;
3382
3383	    if (tp_node == NULL)
3384	      {
3385	        default_label = lab;
3386		break;
3387	      }
3388	    do
3389	      {
3390		/* Filter out duplicate labels that arise when this handler
3391		   is shadowed by an earlier one.  When no labels are
3392		   attached to the handler anymore, we remove
3393		   the corresponding edge and then we delete unreachable
3394		   blocks at the end of this pass.  */
3395		if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3396		  {
3397		    tree t = build_case_label (TREE_VALUE (flt_node),
3398					       NULL, lab);
3399		    labels.safe_push (t);
3400		    pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3401		    have_label = true;
3402		  }
3403
3404		tp_node = TREE_CHAIN (tp_node);
3405		flt_node = TREE_CHAIN (flt_node);
3406	      }
3407	    while (tp_node);
3408	    if (! have_label)
3409	      {
3410	        remove_edge (find_edge (src, label_to_block (lab)));
3411	        redirected = true;
3412	      }
3413	  }
3414
3415	/* Clean up the edge flags.  */
3416	FOR_EACH_EDGE (e, ei, src->succs)
3417	  {
3418	    if (e->flags & EDGE_FALLTHRU)
3419	      {
3420		/* If there was no catch-all, use the fallthru edge.  */
3421		if (default_label == NULL)
3422		  default_label = gimple_block_label (e->dest);
3423		e->flags &= ~EDGE_FALLTHRU;
3424	      }
3425	  }
3426	gcc_assert (default_label != NULL);
3427
3428	/* Don't generate a switch if there's only a default case.
3429	   This is common in the form of try { A; } catch (...) { B; }.  */
3430	if (!labels.exists ())
3431	  {
3432	    e = single_succ_edge (src);
3433	    e->flags |= EDGE_FALLTHRU;
3434	  }
3435	else
3436	  {
3437	    fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3438	    x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3439							 region_nr));
3440	    filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3441	    filter = make_ssa_name (filter, x);
3442	    gimple_call_set_lhs (x, filter);
3443	    gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3444
3445	    /* Turn the default label into a default case.  */
3446	    default_label = build_case_label (NULL, NULL, default_label);
3447	    sort_case_labels (labels);
3448
3449	    x = gimple_build_switch (filter, default_label, labels);
3450	    gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3451
3452	    labels.release ();
3453	  }
3454	pointer_set_destroy (seen_values);
3455      }
3456      break;
3457
3458    case ERT_ALLOWED_EXCEPTIONS:
3459      {
3460	edge b_e = BRANCH_EDGE (src);
3461	edge f_e = FALLTHRU_EDGE (src);
3462
3463	fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3464	x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3465						     region_nr));
3466	filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3467	filter = make_ssa_name (filter, x);
3468	gimple_call_set_lhs (x, filter);
3469	gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3470
3471	r->u.allowed.label = NULL;
3472	x = gimple_build_cond (EQ_EXPR, filter,
3473			       build_int_cst (TREE_TYPE (filter),
3474					      r->u.allowed.filter),
3475			       NULL_TREE, NULL_TREE);
3476	gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3477
3478	b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3479        f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3480      }
3481      break;
3482
3483    default:
3484      gcc_unreachable ();
3485    }
3486
3487  /* Replace the EH_DISPATCH with the SWITCH or COND generated above.  */
3488  gsi_remove (&gsi, true);
3489  return redirected;
3490}
3491
3492static unsigned
3493execute_lower_eh_dispatch (void)
3494{
3495  basic_block bb;
3496  int flags = 0;
3497  bool redirected = false;
3498
3499  assign_filter_values ();
3500
3501  FOR_EACH_BB (bb)
3502    {
3503      gimple last = last_stmt (bb);
3504      if (last == NULL)
3505	continue;
3506      if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3507	{
3508	  redirected |= lower_eh_dispatch (bb, last);
3509	  flags |= TODO_update_ssa_only_virtuals;
3510	}
3511      else if (gimple_code (last) == GIMPLE_RESX)
3512	{
3513	  if (stmt_can_throw_external (last))
3514	    optimize_clobbers (bb);
3515	  else
3516	    flags |= sink_clobbers (bb);
3517	}
3518    }
3519
3520  if (redirected)
3521    delete_unreachable_blocks ();
3522  return flags;
3523}
3524
3525static bool
3526gate_lower_eh_dispatch (void)
3527{
3528  return cfun->eh->region_tree != NULL;
3529}
3530
3531struct gimple_opt_pass pass_lower_eh_dispatch =
3532{
3533 {
3534  GIMPLE_PASS,
3535  "ehdisp",				/* name */
3536  OPTGROUP_NONE,                        /* optinfo_flags */
3537  gate_lower_eh_dispatch,		/* gate */
3538  execute_lower_eh_dispatch,		/* execute */
3539  NULL,					/* sub */
3540  NULL,					/* next */
3541  0,					/* static_pass_number */
3542  TV_TREE_EH,				/* tv_id */
3543  PROP_gimple_lcf,			/* properties_required */
3544  0,					/* properties_provided */
3545  0,					/* properties_destroyed */
3546  0,					/* todo_flags_start */
3547  TODO_verify_flow	                /* todo_flags_finish */
3548 }
3549};
3550
3551/* Walk statements, see what regions and, optionally, landing pads
3552   are really referenced.
3553
3554   Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3555   and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3556
3557   Passing NULL for LP_REACHABLE is valid, in this case only reachable
3558   regions are marked.
3559
3560   The caller is responsible for freeing the returned sbitmaps.  */
3561
3562static void
3563mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
3564{
3565  sbitmap r_reachable, lp_reachable;
3566  basic_block bb;
3567  bool mark_landing_pads = (lp_reachablep != NULL);
3568  gcc_checking_assert (r_reachablep != NULL);
3569
3570  r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
3571  bitmap_clear (r_reachable);
3572  *r_reachablep = r_reachable;
3573
3574  if (mark_landing_pads)
3575    {
3576      lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3577      bitmap_clear (lp_reachable);
3578      *lp_reachablep = lp_reachable;
3579    }
3580  else
3581    lp_reachable = NULL;
3582
3583  FOR_EACH_BB (bb)
3584    {
3585      gimple_stmt_iterator gsi;
3586
3587      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3588	{
3589	  gimple stmt = gsi_stmt (gsi);
3590
3591	  if (mark_landing_pads)
3592	    {
3593	      int lp_nr = lookup_stmt_eh_lp (stmt);
3594
3595	      /* Negative LP numbers are MUST_NOT_THROW regions which
3596		 are not considered BB enders.  */
3597	      if (lp_nr < 0)
3598		bitmap_set_bit (r_reachable, -lp_nr);
3599
3600	      /* Positive LP numbers are real landing pads, and BB enders.  */
3601	      else if (lp_nr > 0)
3602		{
3603		  gcc_assert (gsi_one_before_end_p (gsi));
3604		  eh_region region = get_eh_region_from_lp_number (lp_nr);
3605		  bitmap_set_bit (r_reachable, region->index);
3606		  bitmap_set_bit (lp_reachable, lp_nr);
3607		}
3608	    }
3609
3610	  /* Avoid removing regions referenced from RESX/EH_DISPATCH.  */
3611	  switch (gimple_code (stmt))
3612	    {
3613	    case GIMPLE_RESX:
3614	      bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
3615	      break;
3616	    case GIMPLE_EH_DISPATCH:
3617	      bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
3618	      break;
3619	    default:
3620	      break;
3621	    }
3622	}
3623    }
3624}
3625
3626/* Remove unreachable handlers and unreachable landing pads.  */
3627
3628static void
3629remove_unreachable_handlers (void)
3630{
3631  sbitmap r_reachable, lp_reachable;
3632  eh_region region;
3633  eh_landing_pad lp;
3634  unsigned i;
3635
3636  mark_reachable_handlers (&r_reachable, &lp_reachable);
3637
3638  if (dump_file)
3639    {
3640      fprintf (dump_file, "Before removal of unreachable regions:\n");
3641      dump_eh_tree (dump_file, cfun);
3642      fprintf (dump_file, "Reachable regions: ");
3643      dump_bitmap_file (dump_file, r_reachable);
3644      fprintf (dump_file, "Reachable landing pads: ");
3645      dump_bitmap_file (dump_file, lp_reachable);
3646    }
3647
3648  if (dump_file)
3649    {
3650      FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3651	if (region && !bitmap_bit_p (r_reachable, region->index))
3652	  fprintf (dump_file,
3653		   "Removing unreachable region %d\n",
3654		   region->index);
3655    }
3656
3657  remove_unreachable_eh_regions (r_reachable);
3658
3659  FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3660    if (lp && !bitmap_bit_p (lp_reachable, lp->index))
3661      {
3662	if (dump_file)
3663	  fprintf (dump_file,
3664		   "Removing unreachable landing pad %d\n",
3665		   lp->index);
3666	remove_eh_landing_pad (lp);
3667      }
3668
3669  if (dump_file)
3670    {
3671      fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3672      dump_eh_tree (dump_file, cfun);
3673      fprintf (dump_file, "\n\n");
3674    }
3675
3676  sbitmap_free (r_reachable);
3677  sbitmap_free (lp_reachable);
3678
3679#ifdef ENABLE_CHECKING
3680  verify_eh_tree (cfun);
3681#endif
3682}
3683
3684/* Remove unreachable handlers if any landing pads have been removed after
3685   last ehcleanup pass (due to gimple_purge_dead_eh_edges).  */
3686
3687void
3688maybe_remove_unreachable_handlers (void)
3689{
3690  eh_landing_pad lp;
3691  unsigned i;
3692
3693  if (cfun->eh == NULL)
3694    return;
3695
3696  FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3697    if (lp && lp->post_landing_pad)
3698      {
3699	if (label_to_block (lp->post_landing_pad) == NULL)
3700	  {
3701	    remove_unreachable_handlers ();
3702	    return;
3703	  }
3704      }
3705}
3706
3707/* Remove regions that do not have landing pads.  This assumes
3708   that remove_unreachable_handlers has already been run, and
3709   that we've just manipulated the landing pads since then.
3710
3711   Preserve regions with landing pads and regions that prevent
3712   exceptions from propagating further, even if these regions
3713   are not reachable.  */
3714
3715static void
3716remove_unreachable_handlers_no_lp (void)
3717{
3718  eh_region region;
3719  sbitmap r_reachable;
3720  unsigned i;
3721
3722  mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
3723
3724  FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3725    {
3726      if (! region)
3727	continue;
3728
3729      if (region->landing_pads != NULL
3730	  || region->type == ERT_MUST_NOT_THROW)
3731	bitmap_set_bit (r_reachable, region->index);
3732
3733      if (dump_file
3734	  && !bitmap_bit_p (r_reachable, region->index))
3735	fprintf (dump_file,
3736		 "Removing unreachable region %d\n",
3737		 region->index);
3738    }
3739
3740  remove_unreachable_eh_regions (r_reachable);
3741
3742  sbitmap_free (r_reachable);
3743}
3744
3745/* Undo critical edge splitting on an EH landing pad.  Earlier, we
3746   optimisticaly split all sorts of edges, including EH edges.  The
3747   optimization passes in between may not have needed them; if not,
3748   we should undo the split.
3749
3750   Recognize this case by having one EH edge incoming to the BB and
3751   one normal edge outgoing; BB should be empty apart from the
3752   post_landing_pad label.
3753
3754   Note that this is slightly different from the empty handler case
3755   handled by cleanup_empty_eh, in that the actual handler may yet
3756   have actual code but the landing pad has been separated from the
3757   handler.  As such, cleanup_empty_eh relies on this transformation
3758   having been done first.  */
3759
3760static bool
3761unsplit_eh (eh_landing_pad lp)
3762{
3763  basic_block bb = label_to_block (lp->post_landing_pad);
3764  gimple_stmt_iterator gsi;
3765  edge e_in, e_out;
3766
3767  /* Quickly check the edge counts on BB for singularity.  */
3768  if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3769    return false;
3770  e_in = EDGE_PRED (bb, 0);
3771  e_out = EDGE_SUCC (bb, 0);
3772
3773  /* Input edge must be EH and output edge must be normal.  */
3774  if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3775    return false;
3776
3777  /* The block must be empty except for the labels and debug insns.  */
3778  gsi = gsi_after_labels (bb);
3779  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3780    gsi_next_nondebug (&gsi);
3781  if (!gsi_end_p (gsi))
3782    return false;
3783
3784  /* The destination block must not already have a landing pad
3785     for a different region.  */
3786  for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3787    {
3788      gimple stmt = gsi_stmt (gsi);
3789      tree lab;
3790      int lp_nr;
3791
3792      if (gimple_code (stmt) != GIMPLE_LABEL)
3793	break;
3794      lab = gimple_label_label (stmt);
3795      lp_nr = EH_LANDING_PAD_NR (lab);
3796      if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3797	return false;
3798    }
3799
3800  /* The new destination block must not already be a destination of
3801     the source block, lest we merge fallthru and eh edges and get
3802     all sorts of confused.  */
3803  if (find_edge (e_in->src, e_out->dest))
3804    return false;
3805
3806  /* ??? We can get degenerate phis due to cfg cleanups.  I would have
3807     thought this should have been cleaned up by a phicprop pass, but
3808     that doesn't appear to handle virtuals.  Propagate by hand.  */
3809  if (!gimple_seq_empty_p (phi_nodes (bb)))
3810    {
3811      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3812	{
3813	  gimple use_stmt, phi = gsi_stmt (gsi);
3814	  tree lhs = gimple_phi_result (phi);
3815	  tree rhs = gimple_phi_arg_def (phi, 0);
3816	  use_operand_p use_p;
3817	  imm_use_iterator iter;
3818
3819	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3820	    {
3821	      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3822		SET_USE (use_p, rhs);
3823	    }
3824
3825	  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3826	    SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3827
3828	  remove_phi_node (&gsi, true);
3829	}
3830    }
3831
3832  if (dump_file && (dump_flags & TDF_DETAILS))
3833    fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3834	     lp->index, e_out->dest->index);
3835
3836  /* Redirect the edge.  Since redirect_eh_edge_1 expects to be moving
3837     a successor edge, humor it.  But do the real CFG change with the
3838     predecessor of E_OUT in order to preserve the ordering of arguments
3839     to the PHI nodes in E_OUT->DEST.  */
3840  redirect_eh_edge_1 (e_in, e_out->dest, false);
3841  redirect_edge_pred (e_out, e_in->src);
3842  e_out->flags = e_in->flags;
3843  e_out->probability = e_in->probability;
3844  e_out->count = e_in->count;
3845  remove_edge (e_in);
3846
3847  return true;
3848}
3849
3850/* Examine each landing pad block and see if it matches unsplit_eh.  */
3851
3852static bool
3853unsplit_all_eh (void)
3854{
3855  bool changed = false;
3856  eh_landing_pad lp;
3857  int i;
3858
3859  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
3860    if (lp)
3861      changed |= unsplit_eh (lp);
3862
3863  return changed;
3864}
3865
3866/* A subroutine of cleanup_empty_eh.  Redirect all EH edges incoming
3867   to OLD_BB to NEW_BB; return true on success, false on failure.
3868
3869   OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3870   PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3871   Virtual PHIs may be deleted and marked for renaming.  */
3872
3873static bool
3874cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3875			     edge old_bb_out, bool change_region)
3876{
3877  gimple_stmt_iterator ngsi, ogsi;
3878  edge_iterator ei;
3879  edge e;
3880  bitmap rename_virts;
3881  bitmap ophi_handled;
3882
3883  /* The destination block must not be a regular successor for any
3884     of the preds of the landing pad.  Thus, avoid turning
3885        <..>
3886	 |  \ EH
3887	 |  <..>
3888	 |  /
3889	<..>
3890     into
3891        <..>
3892	|  | EH
3893	<..>
3894     which CFG verification would choke on.  See PR45172 and PR51089.  */
3895  FOR_EACH_EDGE (e, ei, old_bb->preds)
3896    if (find_edge (e->src, new_bb))
3897      return false;
3898
3899  FOR_EACH_EDGE (e, ei, old_bb->preds)
3900    redirect_edge_var_map_clear (e);
3901
3902  ophi_handled = BITMAP_ALLOC (NULL);
3903  rename_virts = BITMAP_ALLOC (NULL);
3904
3905  /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3906     for the edges we're going to move.  */
3907  for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3908    {
3909      gimple ophi, nphi = gsi_stmt (ngsi);
3910      tree nresult, nop;
3911
3912      nresult = gimple_phi_result (nphi);
3913      nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3914
3915      /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3916	 the source ssa_name.  */
3917      ophi = NULL;
3918      for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3919	{
3920	  ophi = gsi_stmt (ogsi);
3921	  if (gimple_phi_result (ophi) == nop)
3922	    break;
3923	  ophi = NULL;
3924	}
3925
3926      /* If we did find the corresponding PHI, copy those inputs.  */
3927      if (ophi)
3928	{
3929	  /* If NOP is used somewhere else beyond phis in new_bb, give up.  */
3930	  if (!has_single_use (nop))
3931	    {
3932	      imm_use_iterator imm_iter;
3933	      use_operand_p use_p;
3934
3935	      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3936		{
3937		  if (!gimple_debug_bind_p (USE_STMT (use_p))
3938		      && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3939			  || gimple_bb (USE_STMT (use_p)) != new_bb))
3940		    goto fail;
3941		}
3942	    }
3943	  bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3944	  FOR_EACH_EDGE (e, ei, old_bb->preds)
3945	    {
3946	      location_t oloc;
3947	      tree oop;
3948
3949	      if ((e->flags & EDGE_EH) == 0)
3950		continue;
3951	      oop = gimple_phi_arg_def (ophi, e->dest_idx);
3952	      oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3953	      redirect_edge_var_map_add (e, nresult, oop, oloc);
3954	    }
3955	}
3956      /* If we didn't find the PHI, but it's a VOP, remember to rename
3957	 it later, assuming all other tests succeed.  */
3958      else if (virtual_operand_p (nresult))
3959	bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3960      /* If we didn't find the PHI, and it's a real variable, we know
3961	 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3962	 variable is unchanged from input to the block and we can simply
3963	 re-use the input to NEW_BB from the OLD_BB_OUT edge.  */
3964      else
3965	{
3966	  location_t nloc
3967	    = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3968	  FOR_EACH_EDGE (e, ei, old_bb->preds)
3969	    redirect_edge_var_map_add (e, nresult, nop, nloc);
3970	}
3971    }
3972
3973  /* Second, verify that all PHIs from OLD_BB have been handled.  If not,
3974     we don't know what values from the other edges into NEW_BB to use.  */
3975  for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3976    {
3977      gimple ophi = gsi_stmt (ogsi);
3978      tree oresult = gimple_phi_result (ophi);
3979      if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3980	goto fail;
3981    }
3982
3983  /* At this point we know that the merge will succeed.  Remove the PHI
3984     nodes for the virtuals that we want to rename.  */
3985  if (!bitmap_empty_p (rename_virts))
3986    {
3987      for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3988	{
3989	  gimple nphi = gsi_stmt (ngsi);
3990	  tree nresult = gimple_phi_result (nphi);
3991	  if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3992	    {
3993	      mark_virtual_phi_result_for_renaming (nphi);
3994	      remove_phi_node (&ngsi, true);
3995	    }
3996	  else
3997	    gsi_next (&ngsi);
3998	}
3999    }
4000
4001  /* Finally, move the edges and update the PHIs.  */
4002  for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4003    if (e->flags & EDGE_EH)
4004      {
4005	/* ???  CFG manipluation routines do not try to update loop
4006	   form on edge redirection.  Do so manually here for now.  */
4007	/* If we redirect a loop entry or latch edge that will either create
4008	   a multiple entry loop or rotate the loop.  If the loops merge
4009	   we may have created a loop with multiple latches.
4010	   All of this isn't easily fixed thus cancel the affected loop
4011	   and mark the other loop as possibly having multiple latches.  */
4012	if (current_loops
4013	    && e->dest == e->dest->loop_father->header)
4014	  {
4015	    e->dest->loop_father->header = NULL;
4016	    e->dest->loop_father->latch = NULL;
4017	    new_bb->loop_father->latch = NULL;
4018	    loops_state_set (LOOPS_NEED_FIXUP|LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
4019	  }
4020	redirect_eh_edge_1 (e, new_bb, change_region);
4021	redirect_edge_succ (e, new_bb);
4022	flush_pending_stmts (e);
4023      }
4024    else
4025      ei_next (&ei);
4026
4027  BITMAP_FREE (ophi_handled);
4028  BITMAP_FREE (rename_virts);
4029  return true;
4030
4031 fail:
4032  FOR_EACH_EDGE (e, ei, old_bb->preds)
4033    redirect_edge_var_map_clear (e);
4034  BITMAP_FREE (ophi_handled);
4035  BITMAP_FREE (rename_virts);
4036  return false;
4037}
4038
4039/* A subroutine of cleanup_empty_eh.  Move a landing pad LP from its
4040   old region to NEW_REGION at BB.  */
4041
4042static void
4043cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4044			  eh_landing_pad lp, eh_region new_region)
4045{
4046  gimple_stmt_iterator gsi;
4047  eh_landing_pad *pp;
4048
4049  for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4050    continue;
4051  *pp = lp->next_lp;
4052
4053  lp->region = new_region;
4054  lp->next_lp = new_region->landing_pads;
4055  new_region->landing_pads = lp;
4056
4057  /* Delete the RESX that was matched within the empty handler block.  */
4058  gsi = gsi_last_bb (bb);
4059  unlink_stmt_vdef (gsi_stmt (gsi));
4060  gsi_remove (&gsi, true);
4061
4062  /* Clean up E_OUT for the fallthru.  */
4063  e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4064  e_out->probability = REG_BR_PROB_BASE;
4065}
4066
4067/* A subroutine of cleanup_empty_eh.  Handle more complex cases of
4068   unsplitting than unsplit_eh was prepared to handle, e.g. when
4069   multiple incoming edges and phis are involved.  */
4070
4071static bool
4072cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4073{
4074  gimple_stmt_iterator gsi;
4075  tree lab;
4076
4077  /* We really ought not have totally lost everything following
4078     a landing pad label.  Given that BB is empty, there had better
4079     be a successor.  */
4080  gcc_assert (e_out != NULL);
4081
4082  /* The destination block must not already have a landing pad
4083     for a different region.  */
4084  lab = NULL;
4085  for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4086    {
4087      gimple stmt = gsi_stmt (gsi);
4088      int lp_nr;
4089
4090      if (gimple_code (stmt) != GIMPLE_LABEL)
4091	break;
4092      lab = gimple_label_label (stmt);
4093      lp_nr = EH_LANDING_PAD_NR (lab);
4094      if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4095	return false;
4096    }
4097
4098  /* Attempt to move the PHIs into the successor block.  */
4099  if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4100    {
4101      if (dump_file && (dump_flags & TDF_DETAILS))
4102	fprintf (dump_file,
4103		 "Unsplit EH landing pad %d to block %i "
4104		 "(via cleanup_empty_eh).\n",
4105		 lp->index, e_out->dest->index);
4106      return true;
4107    }
4108
4109  return false;
4110}
4111
4112/* Return true if edge E_FIRST is part of an empty infinite loop
4113   or leads to such a loop through a series of single successor
4114   empty bbs.  */
4115
4116static bool
4117infinite_empty_loop_p (edge e_first)
4118{
4119  bool inf_loop = false;
4120  edge e;
4121
4122  if (e_first->dest == e_first->src)
4123    return true;
4124
4125  e_first->src->aux = (void *) 1;
4126  for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4127    {
4128      gimple_stmt_iterator gsi;
4129      if (e->dest->aux)
4130	{
4131	  inf_loop = true;
4132	  break;
4133	}
4134      e->dest->aux = (void *) 1;
4135      gsi = gsi_after_labels (e->dest);
4136      if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4137	gsi_next_nondebug (&gsi);
4138      if (!gsi_end_p (gsi))
4139	break;
4140    }
4141  e_first->src->aux = NULL;
4142  for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4143    e->dest->aux = NULL;
4144
4145  return inf_loop;
4146}
4147
4148/* Examine the block associated with LP to determine if it's an empty
4149   handler for its EH region.  If so, attempt to redirect EH edges to
4150   an outer region.  Return true the CFG was updated in any way.  This
4151   is similar to jump forwarding, just across EH edges.  */
4152
4153static bool
4154cleanup_empty_eh (eh_landing_pad lp)
4155{
4156  basic_block bb = label_to_block (lp->post_landing_pad);
4157  gimple_stmt_iterator gsi;
4158  gimple resx;
4159  eh_region new_region;
4160  edge_iterator ei;
4161  edge e, e_out;
4162  bool has_non_eh_pred;
4163  bool ret = false;
4164  int new_lp_nr;
4165
4166  /* There can be zero or one edges out of BB.  This is the quickest test.  */
4167  switch (EDGE_COUNT (bb->succs))
4168    {
4169    case 0:
4170      e_out = NULL;
4171      break;
4172    case 1:
4173      e_out = EDGE_SUCC (bb, 0);
4174      break;
4175    default:
4176      return false;
4177    }
4178
4179  resx = last_stmt (bb);
4180  if (resx && is_gimple_resx (resx))
4181    {
4182      if (stmt_can_throw_external (resx))
4183	optimize_clobbers (bb);
4184      else if (sink_clobbers (bb))
4185	ret = true;
4186    }
4187
4188  gsi = gsi_after_labels (bb);
4189
4190  /* Make sure to skip debug statements.  */
4191  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4192    gsi_next_nondebug (&gsi);
4193
4194  /* If the block is totally empty, look for more unsplitting cases.  */
4195  if (gsi_end_p (gsi))
4196    {
4197      /* For the degenerate case of an infinite loop bail out.
4198	 If bb has no successors and is totally empty, which can happen e.g.
4199	 because of incorrect noreturn attribute, bail out too.  */
4200      if (e_out == NULL
4201	  || infinite_empty_loop_p (e_out))
4202	return ret;
4203
4204      return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4205    }
4206
4207  /* The block should consist only of a single RESX statement, modulo a
4208     preceding call to __builtin_stack_restore if there is no outgoing
4209     edge, since the call can be eliminated in this case.  */
4210  resx = gsi_stmt (gsi);
4211  if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4212    {
4213      gsi_next (&gsi);
4214      resx = gsi_stmt (gsi);
4215    }
4216  if (!is_gimple_resx (resx))
4217    return ret;
4218  gcc_assert (gsi_one_before_end_p (gsi));
4219
4220  /* Determine if there are non-EH edges, or resx edges into the handler.  */
4221  has_non_eh_pred = false;
4222  FOR_EACH_EDGE (e, ei, bb->preds)
4223    if (!(e->flags & EDGE_EH))
4224      has_non_eh_pred = true;
4225
4226  /* Find the handler that's outer of the empty handler by looking at
4227     where the RESX instruction was vectored.  */
4228  new_lp_nr = lookup_stmt_eh_lp (resx);
4229  new_region = get_eh_region_from_lp_number (new_lp_nr);
4230
4231  /* If there's no destination region within the current function,
4232     redirection is trivial via removing the throwing statements from
4233     the EH region, removing the EH edges, and allowing the block
4234     to go unreachable.  */
4235  if (new_region == NULL)
4236    {
4237      gcc_assert (e_out == NULL);
4238      for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4239	if (e->flags & EDGE_EH)
4240	  {
4241	    gimple stmt = last_stmt (e->src);
4242	    remove_stmt_from_eh_lp (stmt);
4243	    remove_edge (e);
4244	  }
4245	else
4246	  ei_next (&ei);
4247      goto succeed;
4248    }
4249
4250  /* If the destination region is a MUST_NOT_THROW, allow the runtime
4251     to handle the abort and allow the blocks to go unreachable.  */
4252  if (new_region->type == ERT_MUST_NOT_THROW)
4253    {
4254      for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4255	if (e->flags & EDGE_EH)
4256	  {
4257	    gimple stmt = last_stmt (e->src);
4258	    remove_stmt_from_eh_lp (stmt);
4259	    add_stmt_to_eh_lp (stmt, new_lp_nr);
4260	    remove_edge (e);
4261	  }
4262	else
4263	  ei_next (&ei);
4264      goto succeed;
4265    }
4266
4267  /* Try to redirect the EH edges and merge the PHIs into the destination
4268     landing pad block.  If the merge succeeds, we'll already have redirected
4269     all the EH edges.  The handler itself will go unreachable if there were
4270     no normal edges.  */
4271  if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4272    goto succeed;
4273
4274  /* Finally, if all input edges are EH edges, then we can (potentially)
4275     reduce the number of transfers from the runtime by moving the landing
4276     pad from the original region to the new region.  This is a win when
4277     we remove the last CLEANUP region along a particular exception
4278     propagation path.  Since nothing changes except for the region with
4279     which the landing pad is associated, the PHI nodes do not need to be
4280     adjusted at all.  */
4281  if (!has_non_eh_pred)
4282    {
4283      cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4284      if (dump_file && (dump_flags & TDF_DETAILS))
4285	fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4286		 lp->index, new_region->index);
4287
4288      /* ??? The CFG didn't change, but we may have rendered the
4289	 old EH region unreachable.  Trigger a cleanup there.  */
4290      return true;
4291    }
4292
4293  return ret;
4294
4295 succeed:
4296  if (dump_file && (dump_flags & TDF_DETAILS))
4297    fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4298  remove_eh_landing_pad (lp);
4299  return true;
4300}
4301
4302/* Do a post-order traversal of the EH region tree.  Examine each
4303   post_landing_pad block and see if we can eliminate it as empty.  */
4304
4305static bool
4306cleanup_all_empty_eh (void)
4307{
4308  bool changed = false;
4309  eh_landing_pad lp;
4310  int i;
4311
4312  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4313    if (lp)
4314      changed |= cleanup_empty_eh (lp);
4315
4316  return changed;
4317}
4318
4319/* Perform cleanups and lowering of exception handling
4320    1) cleanups regions with handlers doing nothing are optimized out
4321    2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4322    3) Info about regions that are containing instructions, and regions
4323       reachable via local EH edges is collected
4324    4) Eh tree is pruned for regions no longer neccesary.
4325
4326   TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4327	 Unify those that have the same failure decl and locus.
4328*/
4329
4330static unsigned int
4331execute_cleanup_eh_1 (void)
4332{
4333  /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4334     looking up unreachable landing pads.  */
4335  remove_unreachable_handlers ();
4336
4337  /* Watch out for the region tree vanishing due to all unreachable.  */
4338  if (cfun->eh->region_tree)
4339    {
4340      bool changed = false;
4341
4342      if (optimize)
4343	changed |= unsplit_all_eh ();
4344      changed |= cleanup_all_empty_eh ();
4345
4346      if (changed)
4347	{
4348	  free_dominance_info (CDI_DOMINATORS);
4349	  free_dominance_info (CDI_POST_DOMINATORS);
4350
4351          /* We delayed all basic block deletion, as we may have performed
4352	     cleanups on EH edges while non-EH edges were still present.  */
4353	  delete_unreachable_blocks ();
4354
4355	  /* We manipulated the landing pads.  Remove any region that no
4356	     longer has a landing pad.  */
4357	  remove_unreachable_handlers_no_lp ();
4358
4359	  return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4360	}
4361    }
4362
4363  return 0;
4364}
4365
4366static unsigned int
4367execute_cleanup_eh (void)
4368{
4369  int ret = execute_cleanup_eh_1 ();
4370
4371  /* If the function no longer needs an EH personality routine
4372     clear it.  This exposes cross-language inlining opportunities
4373     and avoids references to a never defined personality routine.  */
4374  if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4375      && function_needs_eh_personality (cfun) != eh_personality_lang)
4376    DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4377
4378  return ret;
4379}
4380
4381static bool
4382gate_cleanup_eh (void)
4383{
4384  return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4385}
4386
4387struct gimple_opt_pass pass_cleanup_eh = {
4388  {
4389   GIMPLE_PASS,
4390   "ehcleanup",			/* name */
4391   OPTGROUP_NONE,               /* optinfo_flags */
4392   gate_cleanup_eh,		/* gate */
4393   execute_cleanup_eh,		/* execute */
4394   NULL,			/* sub */
4395   NULL,			/* next */
4396   0,				/* static_pass_number */
4397   TV_TREE_EH,			/* tv_id */
4398   PROP_gimple_lcf,		/* properties_required */
4399   0,				/* properties_provided */
4400   0,				/* properties_destroyed */
4401   0,				/* todo_flags_start */
4402   0             		/* todo_flags_finish */
4403   }
4404};
4405
4406/* Verify that BB containing STMT as the last statement, has precisely the
4407   edge that make_eh_edges would create.  */
4408
4409DEBUG_FUNCTION bool
4410verify_eh_edges (gimple stmt)
4411{
4412  basic_block bb = gimple_bb (stmt);
4413  eh_landing_pad lp = NULL;
4414  int lp_nr;
4415  edge_iterator ei;
4416  edge e, eh_edge;
4417
4418  lp_nr = lookup_stmt_eh_lp (stmt);
4419  if (lp_nr > 0)
4420    lp = get_eh_landing_pad_from_number (lp_nr);
4421
4422  eh_edge = NULL;
4423  FOR_EACH_EDGE (e, ei, bb->succs)
4424    {
4425      if (e->flags & EDGE_EH)
4426	{
4427	  if (eh_edge)
4428	    {
4429	      error ("BB %i has multiple EH edges", bb->index);
4430	      return true;
4431	    }
4432	  else
4433	    eh_edge = e;
4434	}
4435    }
4436
4437  if (lp == NULL)
4438    {
4439      if (eh_edge)
4440	{
4441	  error ("BB %i can not throw but has an EH edge", bb->index);
4442	  return true;
4443	}
4444      return false;
4445    }
4446
4447  if (!stmt_could_throw_p (stmt))
4448    {
4449      error ("BB %i last statement has incorrectly set lp", bb->index);
4450      return true;
4451    }
4452
4453  if (eh_edge == NULL)
4454    {
4455      error ("BB %i is missing an EH edge", bb->index);
4456      return true;
4457    }
4458
4459  if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4460    {
4461      error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4462      return true;
4463    }
4464
4465  return false;
4466}
4467
4468/* Similarly, but handle GIMPLE_EH_DISPATCH specifically.  */
4469
4470DEBUG_FUNCTION bool
4471verify_eh_dispatch_edge (gimple stmt)
4472{
4473  eh_region r;
4474  eh_catch c;
4475  basic_block src, dst;
4476  bool want_fallthru = true;
4477  edge_iterator ei;
4478  edge e, fall_edge;
4479
4480  r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4481  src = gimple_bb (stmt);
4482
4483  FOR_EACH_EDGE (e, ei, src->succs)
4484    gcc_assert (e->aux == NULL);
4485
4486  switch (r->type)
4487    {
4488    case ERT_TRY:
4489      for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4490	{
4491	  dst = label_to_block (c->label);
4492	  e = find_edge (src, dst);
4493	  if (e == NULL)
4494	    {
4495	      error ("BB %i is missing an edge", src->index);
4496	      return true;
4497	    }
4498	  e->aux = (void *)e;
4499
4500	  /* A catch-all handler doesn't have a fallthru.  */
4501	  if (c->type_list == NULL)
4502	    {
4503	      want_fallthru = false;
4504	      break;
4505	    }
4506	}
4507      break;
4508
4509    case ERT_ALLOWED_EXCEPTIONS:
4510      dst = label_to_block (r->u.allowed.label);
4511      e = find_edge (src, dst);
4512      if (e == NULL)
4513	{
4514	  error ("BB %i is missing an edge", src->index);
4515	  return true;
4516	}
4517      e->aux = (void *)e;
4518      break;
4519
4520    default:
4521      gcc_unreachable ();
4522    }
4523
4524  fall_edge = NULL;
4525  FOR_EACH_EDGE (e, ei, src->succs)
4526    {
4527      if (e->flags & EDGE_FALLTHRU)
4528	{
4529	  if (fall_edge != NULL)
4530	    {
4531	      error ("BB %i too many fallthru edges", src->index);
4532	      return true;
4533	    }
4534	  fall_edge = e;
4535	}
4536      else if (e->aux)
4537	e->aux = NULL;
4538      else
4539	{
4540	  error ("BB %i has incorrect edge", src->index);
4541	  return true;
4542	}
4543    }
4544  if ((fall_edge != NULL) ^ want_fallthru)
4545    {
4546      error ("BB %i has incorrect fallthru edge", src->index);
4547      return true;
4548    }
4549
4550  return false;
4551}
4552