1169689Skan/* Exception handling semantics and decomposition for trees.
2169689Skan   Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3169689Skan
4169689SkanThis file is part of GCC.
5169689Skan
6169689SkanGCC is free software; you can redistribute it and/or modify
7169689Skanit under the terms of the GNU General Public License as published by
8169689Skanthe Free Software Foundation; either version 2, or (at your option)
9169689Skanany later version.
10169689Skan
11169689SkanGCC is distributed in the hope that it will be useful,
12169689Skanbut WITHOUT ANY WARRANTY; without even the implied warranty of
13169689SkanMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14169689SkanGNU General Public License for more details.
15169689Skan
16169689SkanYou should have received a copy of the GNU General Public License
17169689Skanalong with GCC; see the file COPYING.  If not, write to
18169689Skanthe Free Software Foundation, 51 Franklin Street, Fifth Floor,
19169689SkanBoston, MA 02110-1301, USA.  */
20169689Skan
21169689Skan#include "config.h"
22169689Skan#include "system.h"
23169689Skan#include "coretypes.h"
24169689Skan#include "tm.h"
25169689Skan#include "tree.h"
26169689Skan#include "rtl.h"
27169689Skan#include "tm_p.h"
28169689Skan#include "flags.h"
29169689Skan#include "function.h"
30169689Skan#include "except.h"
31169689Skan#include "tree-flow.h"
32169689Skan#include "tree-dump.h"
33169689Skan#include "tree-inline.h"
34169689Skan#include "tree-iterator.h"
35169689Skan#include "tree-pass.h"
36169689Skan#include "timevar.h"
37169689Skan#include "langhooks.h"
38169689Skan#include "ggc.h"
39169689Skan#include "toplev.h"
40169689Skan
41169689Skan
42169689Skan/* Nonzero if we are using EH to handle cleanups.  */
43169689Skanstatic int using_eh_for_cleanups_p = 0;
44169689Skan
45169689Skanvoid
46169689Skanusing_eh_for_cleanups (void)
47169689Skan{
48169689Skan  using_eh_for_cleanups_p = 1;
49169689Skan}
50169689Skan
51169689Skan/* Misc functions used in this file.  */
52169689Skan
53169689Skan/* Compare and hash for any structure which begins with a canonical
54169689Skan   pointer.  Assumes all pointers are interchangeable, which is sort
55169689Skan   of already assumed by gcc elsewhere IIRC.  */
56169689Skan
57169689Skanstatic int
58169689Skanstruct_ptr_eq (const void *a, const void *b)
59169689Skan{
60169689Skan  const void * const * x = (const void * const *) a;
61169689Skan  const void * const * y = (const void * const *) b;
62169689Skan  return *x == *y;
63169689Skan}
64169689Skan
65169689Skanstatic hashval_t
66169689Skanstruct_ptr_hash (const void *a)
67169689Skan{
68169689Skan  const void * const * x = (const void * const *) a;
69169689Skan  return (size_t)*x >> 4;
70169689Skan}
71169689Skan
72169689Skan
73169689Skan/* Remember and lookup EH region data for arbitrary statements.
74169689Skan   Really this means any statement that could_throw_p.  We could
75169689Skan   stuff this information into the stmt_ann data structure, but:
76169689Skan
77169689Skan   (1) We absolutely rely on this information being kept until
78169689Skan   we get to rtl.  Once we're done with lowering here, if we lose
79169689Skan   the information there's no way to recover it!
80169689Skan
81169689Skan   (2) There are many more statements that *cannot* throw as
82169689Skan   compared to those that can.  We should be saving some amount
83169689Skan   of space by only allocating memory for those that can throw.  */
84169689Skan
85169689Skanstatic void
86169689Skanrecord_stmt_eh_region (struct eh_region *region, tree t)
87169689Skan{
88169689Skan  if (!region)
89169689Skan    return;
90169689Skan
91169689Skan  add_stmt_to_eh_region (t, get_eh_region_number (region));
92169689Skan}
93169689Skan
94169689Skanvoid
95169689Skanadd_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
96169689Skan{
97169689Skan  struct throw_stmt_node *n;
98169689Skan  void **slot;
99169689Skan
100169689Skan  gcc_assert (num >= 0);
101169689Skan  gcc_assert (TREE_CODE (t) != RESX_EXPR);
102169689Skan
103169689Skan  n = GGC_NEW (struct throw_stmt_node);
104169689Skan  n->stmt = t;
105169689Skan  n->region_nr = num;
106169689Skan
107169689Skan  if (!get_eh_throw_stmt_table (ifun))
108169689Skan    set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109169689Skan						    struct_ptr_eq,
110169689Skan						    ggc_free));
111169689Skan
112169689Skan  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113169689Skan  gcc_assert (!*slot);
114169689Skan  *slot = n;
115169689Skan  /* ??? For the benefit of calls.c, converting all this to rtl,
116169689Skan     we need to record the call expression, not just the outer
117169689Skan     modify statement.  */
118169689Skan  if (TREE_CODE (t) == MODIFY_EXPR
119169689Skan      && (t = get_call_expr_in (t)))
120169689Skan    add_stmt_to_eh_region_fn (ifun, t, num);
121169689Skan}
122169689Skan
123169689Skanvoid
124169689Skanadd_stmt_to_eh_region (tree t, int num)
125169689Skan{
126169689Skan  add_stmt_to_eh_region_fn (cfun, t, num);
127169689Skan}
128169689Skan
129169689Skanbool
130169689Skanremove_stmt_from_eh_region_fn (struct function *ifun, tree t)
131169689Skan{
132169689Skan  struct throw_stmt_node dummy;
133169689Skan  void **slot;
134169689Skan
135169689Skan  if (!get_eh_throw_stmt_table (ifun))
136169689Skan    return false;
137169689Skan
138169689Skan  dummy.stmt = t;
139169689Skan  slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
140169689Skan                        NO_INSERT);
141169689Skan  if (slot)
142169689Skan    {
143169689Skan      htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
144169689Skan      /* ??? For the benefit of calls.c, converting all this to rtl,
145169689Skan	 we need to record the call expression, not just the outer
146169689Skan	 modify statement.  */
147169689Skan      if (TREE_CODE (t) == MODIFY_EXPR
148169689Skan	  && (t = get_call_expr_in (t)))
149169689Skan	remove_stmt_from_eh_region_fn (ifun, t);
150169689Skan      return true;
151169689Skan    }
152169689Skan  else
153169689Skan    return false;
154169689Skan}
155169689Skan
156169689Skanbool
157169689Skanremove_stmt_from_eh_region (tree t)
158169689Skan{
159169689Skan  return remove_stmt_from_eh_region_fn (cfun, t);
160169689Skan}
161169689Skan
162169689Skanint
163169689Skanlookup_stmt_eh_region_fn (struct function *ifun, tree t)
164169689Skan{
165169689Skan  struct throw_stmt_node *p, n;
166169689Skan
167169689Skan  if (!get_eh_throw_stmt_table (ifun))
168169689Skan    return -2;
169169689Skan
170169689Skan  n.stmt = t;
171169689Skan  p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
172169689Skan                                            &n);
173169689Skan
174169689Skan  return (p ? p->region_nr : -1);
175169689Skan}
176169689Skan
177169689Skanint
178169689Skanlookup_stmt_eh_region (tree t)
179169689Skan{
180169689Skan  /* We can get called from initialized data when -fnon-call-exceptions
181169689Skan     is on; prevent crash.  */
182169689Skan  if (!cfun)
183169689Skan    return -1;
184169689Skan  return lookup_stmt_eh_region_fn (cfun, t);
185169689Skan}
186169689Skan
187169689Skan
188169689Skan/* First pass of EH node decomposition.  Build up a tree of TRY_FINALLY_EXPR
189169689Skan   nodes and LABEL_DECL nodes.  We will use this during the second phase to
190169689Skan   determine if a goto leaves the body of a TRY_FINALLY_EXPR node.  */
191169689Skan
192169689Skanstruct finally_tree_node
193169689Skan{
194169689Skan  tree child, parent;
195169689Skan};
196169689Skan
197169689Skan/* Note that this table is *not* marked GTY.  It is short-lived.  */
198169689Skanstatic htab_t finally_tree;
199169689Skan
200169689Skanstatic void
201169689Skanrecord_in_finally_tree (tree child, tree parent)
202169689Skan{
203169689Skan  struct finally_tree_node *n;
204169689Skan  void **slot;
205169689Skan
206169689Skan  n = XNEW (struct finally_tree_node);
207169689Skan  n->child = child;
208169689Skan  n->parent = parent;
209169689Skan
210169689Skan  slot = htab_find_slot (finally_tree, n, INSERT);
211169689Skan  gcc_assert (!*slot);
212169689Skan  *slot = n;
213169689Skan}
214169689Skan
215169689Skanstatic void
216169689Skancollect_finally_tree (tree t, tree region)
217169689Skan{
218169689Skan tailrecurse:
219169689Skan  switch (TREE_CODE (t))
220169689Skan    {
221169689Skan    case LABEL_EXPR:
222169689Skan      record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
223169689Skan      break;
224169689Skan
225169689Skan    case TRY_FINALLY_EXPR:
226169689Skan      record_in_finally_tree (t, region);
227169689Skan      collect_finally_tree (TREE_OPERAND (t, 0), t);
228169689Skan      t = TREE_OPERAND (t, 1);
229169689Skan      goto tailrecurse;
230169689Skan
231169689Skan    case TRY_CATCH_EXPR:
232169689Skan      collect_finally_tree (TREE_OPERAND (t, 0), region);
233169689Skan      t = TREE_OPERAND (t, 1);
234169689Skan      goto tailrecurse;
235169689Skan
236169689Skan    case CATCH_EXPR:
237169689Skan      t = CATCH_BODY (t);
238169689Skan      goto tailrecurse;
239169689Skan
240169689Skan    case EH_FILTER_EXPR:
241169689Skan      t = EH_FILTER_FAILURE (t);
242169689Skan      goto tailrecurse;
243169689Skan
244169689Skan    case STATEMENT_LIST:
245169689Skan      {
246169689Skan	tree_stmt_iterator i;
247169689Skan	for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
248169689Skan	  collect_finally_tree (tsi_stmt (i), region);
249169689Skan      }
250169689Skan      break;
251169689Skan
252169689Skan    default:
253169689Skan      /* A type, a decl, or some kind of statement that we're not
254169689Skan	 interested in.  Don't walk them.  */
255169689Skan      break;
256169689Skan    }
257169689Skan}
258169689Skan
259169689Skan/* Use the finally tree to determine if a jump from START to TARGET
260169689Skan   would leave the try_finally node that START lives in.  */
261169689Skan
262169689Skanstatic bool
263169689Skanoutside_finally_tree (tree start, tree target)
264169689Skan{
265169689Skan  struct finally_tree_node n, *p;
266169689Skan
267169689Skan  do
268169689Skan    {
269169689Skan      n.child = start;
270169689Skan      p = (struct finally_tree_node *) htab_find (finally_tree, &n);
271169689Skan      if (!p)
272169689Skan	return true;
273169689Skan      start = p->parent;
274169689Skan    }
275169689Skan  while (start != target);
276169689Skan
277169689Skan  return false;
278169689Skan}
279169689Skan
280169689Skan/* Second pass of EH node decomposition.  Actually transform the TRY_FINALLY
281169689Skan   and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
282169689Skan   The eh region creation is straight-forward, but frobbing all the gotos
283169689Skan   and such into shape isn't.  */
284169689Skan
285169689Skan/* State of the world while lowering.  */
286169689Skan
287169689Skanstruct leh_state
288169689Skan{
289169689Skan  /* What's "current" while constructing the eh region tree.  These
290169689Skan     correspond to variables of the same name in cfun->eh, which we
291169689Skan     don't have easy access to.  */
292169689Skan  struct eh_region *cur_region;
293169689Skan  struct eh_region *prev_try;
294169689Skan
295169689Skan  /* Processing of TRY_FINALLY requires a bit more state.  This is
296169689Skan     split out into a separate structure so that we don't have to
297169689Skan     copy so much when processing other nodes.  */
298169689Skan  struct leh_tf_state *tf;
299169689Skan};
300169689Skan
301169689Skanstruct leh_tf_state
302169689Skan{
303169689Skan  /* Pointer to the TRY_FINALLY node under discussion.  The try_finally_expr
304169689Skan     is the original TRY_FINALLY_EXPR.  We need to retain this so that
305169689Skan     outside_finally_tree can reliably reference the tree used in the
306169689Skan     collect_finally_tree data structures.  */
307169689Skan  tree try_finally_expr;
308169689Skan  tree *top_p;
309169689Skan
310169689Skan  /* The state outside this try_finally node.  */
311169689Skan  struct leh_state *outer;
312169689Skan
313169689Skan  /* The exception region created for it.  */
314169689Skan  struct eh_region *region;
315169689Skan
316169689Skan  /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
317169689Skan     that are seen to escape this TRY_FINALLY_EXPR node.  */
318169689Skan  struct goto_queue_node {
319169689Skan    tree stmt;
320169689Skan    tree repl_stmt;
321169689Skan    tree cont_stmt;
322169689Skan    int index;
323169689Skan  } *goto_queue;
324169689Skan  size_t goto_queue_size;
325169689Skan  size_t goto_queue_active;
326169689Skan
327169689Skan  /* The set of unique labels seen as entries in the goto queue.  */
328169689Skan  VEC(tree,heap) *dest_array;
329169689Skan
330169689Skan  /* A label to be added at the end of the completed transformed
331169689Skan     sequence.  It will be set if may_fallthru was true *at one time*,
332169689Skan     though subsequent transformations may have cleared that flag.  */
333169689Skan  tree fallthru_label;
334169689Skan
335169689Skan  /* A label that has been registered with except.c to be the
336169689Skan     landing pad for this try block.  */
337169689Skan  tree eh_label;
338169689Skan
339169689Skan  /* True if it is possible to fall out the bottom of the try block.
340169689Skan     Cleared if the fallthru is converted to a goto.  */
341169689Skan  bool may_fallthru;
342169689Skan
343169689Skan  /* True if any entry in goto_queue is a RETURN_EXPR.  */
344169689Skan  bool may_return;
345169689Skan
346169689Skan  /* True if the finally block can receive an exception edge.
347169689Skan     Cleared if the exception case is handled by code duplication.  */
348169689Skan  bool may_throw;
349169689Skan};
350169689Skan
351169689Skanstatic void lower_eh_filter (struct leh_state *, tree *);
352169689Skanstatic void lower_eh_constructs_1 (struct leh_state *, tree *);
353169689Skan
354169689Skan/* Comparison function for qsort/bsearch.  We're interested in
355169689Skan   searching goto queue elements for source statements.  */
356169689Skan
357169689Skanstatic int
358169689Skangoto_queue_cmp (const void *x, const void *y)
359169689Skan{
360169689Skan  tree a = ((const struct goto_queue_node *)x)->stmt;
361169689Skan  tree b = ((const struct goto_queue_node *)y)->stmt;
362169689Skan  return (a == b ? 0 : a < b ? -1 : 1);
363169689Skan}
364169689Skan
365169689Skan/* Search for STMT in the goto queue.  Return the replacement,
366169689Skan   or null if the statement isn't in the queue.  */
367169689Skan
368169689Skanstatic tree
369169689Skanfind_goto_replacement (struct leh_tf_state *tf, tree stmt)
370169689Skan{
371169689Skan  struct goto_queue_node tmp, *ret;
372169689Skan  tmp.stmt = stmt;
373169689Skan  ret = (struct goto_queue_node *)
374169689Skan     bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
375169689Skan		 sizeof (struct goto_queue_node), goto_queue_cmp);
376169689Skan  return (ret ? ret->repl_stmt : NULL);
377169689Skan}
378169689Skan
379169689Skan/* A subroutine of replace_goto_queue_1.  Handles the sub-clauses of a
380169689Skan   lowered COND_EXPR.  If, by chance, the replacement is a simple goto,
381169689Skan   then we can just splat it in, otherwise we add the new stmts immediately
382169689Skan   after the COND_EXPR and redirect.  */
383169689Skan
384169689Skanstatic void
385169689Skanreplace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
386169689Skan				tree_stmt_iterator *tsi)
387169689Skan{
388169689Skan  tree new, one, label;
389169689Skan
390169689Skan  new = find_goto_replacement (tf, *tp);
391169689Skan  if (!new)
392169689Skan    return;
393169689Skan
394169689Skan  one = expr_only (new);
395169689Skan  if (one && TREE_CODE (one) == GOTO_EXPR)
396169689Skan    {
397169689Skan      *tp = one;
398169689Skan      return;
399169689Skan    }
400169689Skan
401169689Skan  label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
402169689Skan  *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
403169689Skan
404169689Skan  tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
405169689Skan  tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
406169689Skan}
407169689Skan
408169689Skan/* The real work of replace_goto_queue.  Returns with TSI updated to
409169689Skan   point to the next statement.  */
410169689Skan
411169689Skanstatic void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
412169689Skan
413169689Skanstatic void
414169689Skanreplace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
415169689Skan{
416169689Skan  switch (TREE_CODE (t))
417169689Skan    {
418169689Skan    case GOTO_EXPR:
419169689Skan    case RETURN_EXPR:
420169689Skan      t = find_goto_replacement (tf, t);
421169689Skan      if (t)
422169689Skan	{
423169689Skan	  tsi_link_before (tsi, t, TSI_SAME_STMT);
424169689Skan	  tsi_delink (tsi);
425169689Skan	  return;
426169689Skan	}
427169689Skan      break;
428169689Skan
429169689Skan    case COND_EXPR:
430169689Skan      replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
431169689Skan      replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
432169689Skan      break;
433169689Skan
434169689Skan    case TRY_FINALLY_EXPR:
435169689Skan    case TRY_CATCH_EXPR:
436169689Skan      replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
437169689Skan      replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
438169689Skan      break;
439169689Skan    case CATCH_EXPR:
440169689Skan      replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
441169689Skan      break;
442169689Skan    case EH_FILTER_EXPR:
443169689Skan      replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
444169689Skan      break;
445169689Skan
446169689Skan    case STATEMENT_LIST:
447169689Skan      gcc_unreachable ();
448169689Skan
449169689Skan    default:
450169689Skan      /* These won't have gotos in them.  */
451169689Skan      break;
452169689Skan    }
453169689Skan
454169689Skan  tsi_next (tsi);
455169689Skan}
456169689Skan
457169689Skan/* A subroutine of replace_goto_queue.  Handles STATEMENT_LISTs.  */
458169689Skan
459169689Skanstatic void
460169689Skanreplace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
461169689Skan{
462169689Skan  tree_stmt_iterator i = tsi_start (t);
463169689Skan  while (!tsi_end_p (i))
464169689Skan    replace_goto_queue_1 (tsi_stmt (i), tf, &i);
465169689Skan}
466169689Skan
467169689Skan/* Replace all goto queue members.  */
468169689Skan
469169689Skanstatic void
470169689Skanreplace_goto_queue (struct leh_tf_state *tf)
471169689Skan{
472169689Skan  if (tf->goto_queue_active == 0)
473169689Skan    return;
474169689Skan  replace_goto_queue_stmt_list (*tf->top_p, tf);
475169689Skan}
476169689Skan
477169689Skan/* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
478169689Skan   node, and if so record that fact in the goto queue associated with that
479169689Skan   try_finally node.  */
480169689Skan
481169689Skanstatic void
482169689Skanmaybe_record_in_goto_queue (struct leh_state *state, tree stmt)
483169689Skan{
484169689Skan  struct leh_tf_state *tf = state->tf;
485169689Skan  struct goto_queue_node *q;
486169689Skan  size_t active, size;
487169689Skan  int index;
488169689Skan
489169689Skan  if (!tf)
490169689Skan    return;
491169689Skan
492169689Skan  switch (TREE_CODE (stmt))
493169689Skan    {
494169689Skan    case GOTO_EXPR:
495169689Skan      {
496169689Skan	tree lab = GOTO_DESTINATION (stmt);
497169689Skan
498169689Skan	/* Computed and non-local gotos do not get processed.  Given
499169689Skan	   their nature we can neither tell whether we've escaped the
500169689Skan	   finally block nor redirect them if we knew.  */
501169689Skan	if (TREE_CODE (lab) != LABEL_DECL)
502169689Skan	  return;
503169689Skan
504169689Skan	/* No need to record gotos that don't leave the try block.  */
505169689Skan	if (! outside_finally_tree (lab, tf->try_finally_expr))
506169689Skan	  return;
507169689Skan
508169689Skan	if (! tf->dest_array)
509169689Skan	  {
510169689Skan	    tf->dest_array = VEC_alloc (tree, heap, 10);
511169689Skan	    VEC_quick_push (tree, tf->dest_array, lab);
512169689Skan	    index = 0;
513169689Skan	  }
514169689Skan	else
515169689Skan	  {
516169689Skan	    int n = VEC_length (tree, tf->dest_array);
517169689Skan	    for (index = 0; index < n; ++index)
518169689Skan	      if (VEC_index (tree, tf->dest_array, index) == lab)
519169689Skan		break;
520169689Skan	    if (index == n)
521169689Skan	      VEC_safe_push (tree, heap, tf->dest_array, lab);
522169689Skan	  }
523169689Skan      }
524169689Skan      break;
525169689Skan
526169689Skan    case RETURN_EXPR:
527169689Skan      tf->may_return = true;
528169689Skan      index = -1;
529169689Skan      break;
530169689Skan
531169689Skan    default:
532169689Skan      gcc_unreachable ();
533169689Skan    }
534169689Skan
535169689Skan  active = tf->goto_queue_active;
536169689Skan  size = tf->goto_queue_size;
537169689Skan  if (active >= size)
538169689Skan    {
539169689Skan      size = (size ? size * 2 : 32);
540169689Skan      tf->goto_queue_size = size;
541169689Skan      tf->goto_queue
542169689Skan         = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
543169689Skan    }
544169689Skan
545169689Skan  q = &tf->goto_queue[active];
546169689Skan  tf->goto_queue_active = active + 1;
547169689Skan
548169689Skan  memset (q, 0, sizeof (*q));
549169689Skan  q->stmt = stmt;
550169689Skan  q->index = index;
551169689Skan}
552169689Skan
553169689Skan#ifdef ENABLE_CHECKING
554169689Skan/* We do not process SWITCH_EXPRs for now.  As long as the original source
555169689Skan   was in fact structured, and we've not yet done jump threading, then none
556169689Skan   of the labels will leave outer TRY_FINALLY_EXPRs.  Verify this.  */
557169689Skan
558169689Skanstatic void
559169689Skanverify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
560169689Skan{
561169689Skan  struct leh_tf_state *tf = state->tf;
562169689Skan  size_t i, n;
563169689Skan  tree vec;
564169689Skan
565169689Skan  if (!tf)
566169689Skan    return;
567169689Skan
568169689Skan  vec = SWITCH_LABELS (switch_expr);
569169689Skan  n = TREE_VEC_LENGTH (vec);
570169689Skan
571169689Skan  for (i = 0; i < n; ++i)
572169689Skan    {
573169689Skan      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
574169689Skan      gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
575169689Skan    }
576169689Skan}
577169689Skan#else
578169689Skan#define verify_norecord_switch_expr(state, switch_expr)
579169689Skan#endif
580169689Skan
581169689Skan/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB.  Place in CONT_P
582169689Skan   whatever is needed to finish the return.  If MOD is non-null, insert it
583169689Skan   before the new branch.  RETURN_VALUE_P is a cache containing a temporary
584169689Skan   variable to be used in manipulating the value returned from the function.  */
585169689Skan
586169689Skanstatic void
587169689Skando_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
588169689Skan		       tree *return_value_p)
589169689Skan{
590169689Skan  tree ret_expr = TREE_OPERAND (q->stmt, 0);
591169689Skan  tree x;
592169689Skan
593169689Skan  if (ret_expr)
594169689Skan    {
595169689Skan      /* The nasty part about redirecting the return value is that the
596169689Skan	 return value itself is to be computed before the FINALLY block
597169689Skan	 is executed.  e.g.
598169689Skan
599169689Skan		int x;
600169689Skan		int foo (void)
601169689Skan		{
602169689Skan		  x = 0;
603169689Skan		  try {
604169689Skan		    return x;
605169689Skan		  } finally {
606169689Skan		    x++;
607169689Skan		  }
608169689Skan		}
609169689Skan
610169689Skan	  should return 0, not 1.  Arrange for this to happen by copying
611169689Skan	  computed the return value into a local temporary.  This also
612169689Skan	  allows us to redirect multiple return statements through the
613169689Skan	  same destination block; whether this is a net win or not really
614169689Skan	  depends, I guess, but it does make generation of the switch in
615169689Skan	  lower_try_finally_switch easier.  */
616169689Skan
617169689Skan      switch (TREE_CODE (ret_expr))
618169689Skan	{
619169689Skan	case RESULT_DECL:
620169689Skan	  if (!*return_value_p)
621169689Skan	    *return_value_p = ret_expr;
622169689Skan	  else
623169689Skan	    gcc_assert (*return_value_p == ret_expr);
624169689Skan	  q->cont_stmt = q->stmt;
625169689Skan	  break;
626169689Skan
627169689Skan	case MODIFY_EXPR:
628169689Skan	  {
629169689Skan	    tree result = TREE_OPERAND (ret_expr, 0);
630169689Skan	    tree new, old = TREE_OPERAND (ret_expr, 1);
631169689Skan
632169689Skan	    if (!*return_value_p)
633169689Skan	      {
634169689Skan		if (aggregate_value_p (TREE_TYPE (result),
635169689Skan				      TREE_TYPE (current_function_decl)))
636169689Skan		  /* If this function returns in memory, copy the argument
637169689Skan		    into the return slot now.  Otherwise, we might need to
638169689Skan		    worry about magic return semantics, so we need to use a
639169689Skan		    temporary to hold the value until we're actually ready
640169689Skan		    to return.  */
641169689Skan		  new = result;
642169689Skan		else
643169689Skan		  new = create_tmp_var (TREE_TYPE (old), "rettmp");
644169689Skan		*return_value_p = new;
645169689Skan	      }
646169689Skan	    else
647169689Skan	      new = *return_value_p;
648169689Skan
649169689Skan	    x = build2 (MODIFY_EXPR, TREE_TYPE (new), new, old);
650169689Skan	    append_to_statement_list (x, &q->repl_stmt);
651169689Skan
652169689Skan	    if (new == result)
653169689Skan	      x = result;
654169689Skan	    else
655169689Skan	      x = build2 (MODIFY_EXPR, TREE_TYPE (result), result, new);
656169689Skan	    q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
657169689Skan	  }
658169689Skan
659169689Skan	default:
660169689Skan	  gcc_unreachable ();
661169689Skan	}
662169689Skan    }
663169689Skan  else
664169689Skan    {
665169689Skan      /* If we don't return a value, all return statements are the same.  */
666169689Skan      q->cont_stmt = q->stmt;
667169689Skan    }
668169689Skan
669169689Skan  if (mod)
670169689Skan    append_to_statement_list (mod, &q->repl_stmt);
671169689Skan
672169689Skan  x = build1 (GOTO_EXPR, void_type_node, finlab);
673169689Skan  append_to_statement_list (x, &q->repl_stmt);
674169689Skan}
675169689Skan
676169689Skan/* Similar, but easier, for GOTO_EXPR.  */
677169689Skan
678169689Skanstatic void
679169689Skando_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
680169689Skan{
681169689Skan  tree x;
682169689Skan
683169689Skan  q->cont_stmt = q->stmt;
684169689Skan  if (mod)
685169689Skan    append_to_statement_list (mod, &q->repl_stmt);
686169689Skan
687169689Skan  x = build1 (GOTO_EXPR, void_type_node, finlab);
688169689Skan  append_to_statement_list (x, &q->repl_stmt);
689169689Skan}
690169689Skan
691169689Skan/* We want to transform
692169689Skan	try { body; } catch { stuff; }
693169689Skan   to
694169689Skan	body; goto over; lab: stuff; over:
695169689Skan
696169689Skan   T is a TRY_FINALLY or TRY_CATCH node.  LAB is the label that
697169689Skan   should be placed before the second operand, or NULL.  OVER is
698169689Skan   an existing label that should be put at the exit, or NULL.  */
699169689Skan
700169689Skanstatic void
701169689Skanfrob_into_branch_around (tree *tp, tree lab, tree over)
702169689Skan{
703169689Skan  tree x, op1;
704169689Skan
705169689Skan  op1 = TREE_OPERAND (*tp, 1);
706169689Skan  *tp = TREE_OPERAND (*tp, 0);
707169689Skan
708169689Skan  if (block_may_fallthru (*tp))
709169689Skan    {
710169689Skan      if (!over)
711169689Skan	over = create_artificial_label ();
712169689Skan      x = build1 (GOTO_EXPR, void_type_node, over);
713169689Skan      append_to_statement_list (x, tp);
714169689Skan    }
715169689Skan
716169689Skan  if (lab)
717169689Skan    {
718169689Skan      x = build1 (LABEL_EXPR, void_type_node, lab);
719169689Skan      append_to_statement_list (x, tp);
720169689Skan    }
721169689Skan
722169689Skan  append_to_statement_list (op1, tp);
723169689Skan
724169689Skan  if (over)
725169689Skan    {
726169689Skan      x = build1 (LABEL_EXPR, void_type_node, over);
727169689Skan      append_to_statement_list (x, tp);
728169689Skan    }
729169689Skan}
730169689Skan
731169689Skan/* A subroutine of lower_try_finally.  Duplicate the tree rooted at T.
732169689Skan   Make sure to record all new labels found.  */
733169689Skan
734169689Skanstatic tree
735169689Skanlower_try_finally_dup_block (tree t, struct leh_state *outer_state)
736169689Skan{
737169689Skan  tree region = NULL;
738169689Skan
739169689Skan  t = unsave_expr_now (t);
740169689Skan
741169689Skan  if (outer_state->tf)
742169689Skan    region = outer_state->tf->try_finally_expr;
743169689Skan  collect_finally_tree (t, region);
744169689Skan
745169689Skan  return t;
746169689Skan}
747169689Skan
748169689Skan/* A subroutine of lower_try_finally.  Create a fallthru label for
749169689Skan   the given try_finally state.  The only tricky bit here is that
750169689Skan   we have to make sure to record the label in our outer context.  */
751169689Skan
752169689Skanstatic tree
753169689Skanlower_try_finally_fallthru_label (struct leh_tf_state *tf)
754169689Skan{
755169689Skan  tree label = tf->fallthru_label;
756169689Skan  if (!label)
757169689Skan    {
758169689Skan      label = create_artificial_label ();
759169689Skan      tf->fallthru_label = label;
760169689Skan      if (tf->outer->tf)
761169689Skan        record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
762169689Skan    }
763169689Skan  return label;
764169689Skan}
765169689Skan
766169689Skan/* A subroutine of lower_try_finally.  If lang_protect_cleanup_actions
767169689Skan   returns non-null, then the language requires that the exception path out
768169689Skan   of a try_finally be treated specially.  To wit: the code within the
769169689Skan   finally block may not itself throw an exception.  We have two choices here.
770169689Skan   First we can duplicate the finally block and wrap it in a must_not_throw
771169689Skan   region.  Second, we can generate code like
772169689Skan
773169689Skan	try {
774169689Skan	  finally_block;
775169689Skan	} catch {
776169689Skan	  if (fintmp == eh_edge)
777169689Skan	    protect_cleanup_actions;
778169689Skan	}
779169689Skan
780169689Skan   where "fintmp" is the temporary used in the switch statement generation
781169689Skan   alternative considered below.  For the nonce, we always choose the first
782169689Skan   option.
783169689Skan
784169689Skan   THIS_STATE may be null if this is a try-cleanup, not a try-finally.  */
785169689Skan
786169689Skanstatic void
787169689Skanhonor_protect_cleanup_actions (struct leh_state *outer_state,
788169689Skan			       struct leh_state *this_state,
789169689Skan			       struct leh_tf_state *tf)
790169689Skan{
791169689Skan  tree protect_cleanup_actions, finally, x;
792169689Skan  tree_stmt_iterator i;
793169689Skan  bool finally_may_fallthru;
794169689Skan
795169689Skan  /* First check for nothing to do.  */
796169689Skan  if (lang_protect_cleanup_actions)
797169689Skan    protect_cleanup_actions = lang_protect_cleanup_actions ();
798169689Skan  else
799169689Skan    protect_cleanup_actions = NULL;
800169689Skan
801169689Skan  finally = TREE_OPERAND (*tf->top_p, 1);
802169689Skan
803169689Skan  /* If the EH case of the finally block can fall through, this may be a
804169689Skan     structure of the form
805169689Skan	try {
806169689Skan	  try {
807169689Skan	    throw ...;
808169689Skan	  } cleanup {
809169689Skan	    try {
810169689Skan	      throw ...;
811169689Skan	    } catch (...) {
812169689Skan	    }
813169689Skan	  }
814169689Skan	} catch (...) {
815169689Skan	  yyy;
816169689Skan	}
817169689Skan    E.g. with an inline destructor with an embedded try block.  In this
818169689Skan    case we must save the runtime EH data around the nested exception.
819169689Skan
820169689Skan    This complication means that any time the previous runtime data might
821169689Skan    be used (via fallthru from the finally) we handle the eh case here,
822169689Skan    whether or not protect_cleanup_actions is active.  */
823169689Skan
824169689Skan  finally_may_fallthru = block_may_fallthru (finally);
825169689Skan  if (!finally_may_fallthru && !protect_cleanup_actions)
826169689Skan    return;
827169689Skan
828169689Skan  /* Duplicate the FINALLY block.  Only need to do this for try-finally,
829169689Skan     and not for cleanups.  */
830169689Skan  if (this_state)
831169689Skan    finally = lower_try_finally_dup_block (finally, outer_state);
832169689Skan
833169689Skan  /* Resume execution after the exception.  Adding this now lets
834169689Skan     lower_eh_filter not add unnecessary gotos, as it is clear that
835169689Skan     we never fallthru from this copy of the finally block.  */
836169689Skan  if (finally_may_fallthru)
837169689Skan    {
838169689Skan      tree save_eptr, save_filt;
839169689Skan
840169689Skan      save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
841169689Skan      save_filt = create_tmp_var (integer_type_node, "save_filt");
842169689Skan
843169689Skan      i = tsi_start (finally);
844169689Skan      x = build0 (EXC_PTR_EXPR, ptr_type_node);
845169689Skan      x = build2 (MODIFY_EXPR, void_type_node, save_eptr, x);
846169689Skan      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
847169689Skan
848169689Skan      x = build0 (FILTER_EXPR, integer_type_node);
849169689Skan      x = build2 (MODIFY_EXPR, void_type_node, save_filt, x);
850169689Skan      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
851169689Skan
852169689Skan      i = tsi_last (finally);
853169689Skan      x = build0 (EXC_PTR_EXPR, ptr_type_node);
854169689Skan      x = build2 (MODIFY_EXPR, void_type_node, x, save_eptr);
855169689Skan      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
856169689Skan
857169689Skan      x = build0 (FILTER_EXPR, integer_type_node);
858169689Skan      x = build2 (MODIFY_EXPR, void_type_node, x, save_filt);
859169689Skan      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
860169689Skan
861169689Skan      x = build_resx (get_eh_region_number (tf->region));
862169689Skan      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
863169689Skan    }
864169689Skan
865169689Skan  /* Wrap the block with protect_cleanup_actions as the action.  */
866169689Skan  if (protect_cleanup_actions)
867169689Skan    {
868169689Skan      x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
869169689Skan      append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
870169689Skan      EH_FILTER_MUST_NOT_THROW (x) = 1;
871169689Skan      finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
872169689Skan      lower_eh_filter (outer_state, &finally);
873169689Skan    }
874169689Skan  else
875169689Skan    lower_eh_constructs_1 (outer_state, &finally);
876169689Skan
877169689Skan  /* Hook this up to the end of the existing try block.  If we
878169689Skan     previously fell through the end, we'll have to branch around.
879169689Skan     This means adding a new goto, and adding it to the queue.  */
880169689Skan
881169689Skan  i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
882169689Skan
883169689Skan  if (tf->may_fallthru)
884169689Skan    {
885169689Skan      x = lower_try_finally_fallthru_label (tf);
886169689Skan      x = build1 (GOTO_EXPR, void_type_node, x);
887169689Skan      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
888169689Skan
889169689Skan      if (this_state)
890169689Skan        maybe_record_in_goto_queue (this_state, x);
891169689Skan
892169689Skan      tf->may_fallthru = false;
893169689Skan    }
894169689Skan
895169689Skan  x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
896169689Skan  tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
897169689Skan  tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
898169689Skan
899169689Skan  /* Having now been handled, EH isn't to be considered with
900169689Skan     the rest of the outgoing edges.  */
901169689Skan  tf->may_throw = false;
902169689Skan}
903169689Skan
904169689Skan/* A subroutine of lower_try_finally.  We have determined that there is
905169689Skan   no fallthru edge out of the finally block.  This means that there is
906169689Skan   no outgoing edge corresponding to any incoming edge.  Restructure the
907169689Skan   try_finally node for this special case.  */
908169689Skan
909169689Skanstatic void
910169689Skanlower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
911169689Skan{
912169689Skan  tree x, finally, lab, return_val;
913169689Skan  struct goto_queue_node *q, *qe;
914169689Skan
915169689Skan  if (tf->may_throw)
916169689Skan    lab = tf->eh_label;
917169689Skan  else
918169689Skan    lab = create_artificial_label ();
919169689Skan
920169689Skan  finally = TREE_OPERAND (*tf->top_p, 1);
921169689Skan  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
922169689Skan
923169689Skan  x = build1 (LABEL_EXPR, void_type_node, lab);
924169689Skan  append_to_statement_list (x, tf->top_p);
925169689Skan
926169689Skan  return_val = NULL;
927169689Skan  q = tf->goto_queue;
928169689Skan  qe = q + tf->goto_queue_active;
929169689Skan  for (; q < qe; ++q)
930169689Skan    if (q->index < 0)
931169689Skan      do_return_redirection (q, lab, NULL, &return_val);
932169689Skan    else
933169689Skan      do_goto_redirection (q, lab, NULL);
934169689Skan
935169689Skan  replace_goto_queue (tf);
936169689Skan
937169689Skan  lower_eh_constructs_1 (state, &finally);
938169689Skan  append_to_statement_list (finally, tf->top_p);
939169689Skan}
940169689Skan
941169689Skan/* A subroutine of lower_try_finally.  We have determined that there is
942169689Skan   exactly one destination of the finally block.  Restructure the
943169689Skan   try_finally node for this special case.  */
944169689Skan
945169689Skanstatic void
946169689Skanlower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
947169689Skan{
948169689Skan  struct goto_queue_node *q, *qe;
949169689Skan  tree x, finally, finally_label;
950169689Skan
951169689Skan  finally = TREE_OPERAND (*tf->top_p, 1);
952169689Skan  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
953169689Skan
954169689Skan  lower_eh_constructs_1 (state, &finally);
955169689Skan
956169689Skan  if (tf->may_throw)
957169689Skan    {
958169689Skan      /* Only reachable via the exception edge.  Add the given label to
959169689Skan         the head of the FINALLY block.  Append a RESX at the end.  */
960169689Skan
961169689Skan      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
962169689Skan      append_to_statement_list (x, tf->top_p);
963169689Skan
964169689Skan      append_to_statement_list (finally, tf->top_p);
965169689Skan
966169689Skan      x = build_resx (get_eh_region_number (tf->region));
967169689Skan
968169689Skan      append_to_statement_list (x, tf->top_p);
969169689Skan
970169689Skan      return;
971169689Skan    }
972169689Skan
973169689Skan  if (tf->may_fallthru)
974169689Skan    {
975169689Skan      /* Only reachable via the fallthru edge.  Do nothing but let
976169689Skan	 the two blocks run together; we'll fall out the bottom.  */
977169689Skan      append_to_statement_list (finally, tf->top_p);
978169689Skan      return;
979169689Skan    }
980169689Skan
981169689Skan  finally_label = create_artificial_label ();
982169689Skan  x = build1 (LABEL_EXPR, void_type_node, finally_label);
983169689Skan  append_to_statement_list (x, tf->top_p);
984169689Skan
985169689Skan  append_to_statement_list (finally, tf->top_p);
986169689Skan
987169689Skan  q = tf->goto_queue;
988169689Skan  qe = q + tf->goto_queue_active;
989169689Skan
990169689Skan  if (tf->may_return)
991169689Skan    {
992169689Skan      /* Reachable by return expressions only.  Redirect them.  */
993169689Skan      tree return_val = NULL;
994169689Skan      for (; q < qe; ++q)
995169689Skan	do_return_redirection (q, finally_label, NULL, &return_val);
996169689Skan      replace_goto_queue (tf);
997169689Skan    }
998169689Skan  else
999169689Skan    {
1000169689Skan      /* Reachable by goto expressions only.  Redirect them.  */
1001169689Skan      for (; q < qe; ++q)
1002169689Skan	do_goto_redirection (q, finally_label, NULL);
1003169689Skan      replace_goto_queue (tf);
1004169689Skan
1005169689Skan      if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1006169689Skan	{
1007169689Skan	  /* Reachable by goto to fallthru label only.  Redirect it
1008169689Skan	     to the new label (already created, sadly), and do not
1009169689Skan	     emit the final branch out, or the fallthru label.  */
1010169689Skan	  tf->fallthru_label = NULL;
1011169689Skan	  return;
1012169689Skan	}
1013169689Skan    }
1014169689Skan
1015169689Skan  append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1016169689Skan  maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1017169689Skan}
1018169689Skan
1019169689Skan/* A subroutine of lower_try_finally.  There are multiple edges incoming
1020169689Skan   and outgoing from the finally block.  Implement this by duplicating the
1021169689Skan   finally block for every destination.  */
1022169689Skan
1023169689Skanstatic void
1024169689Skanlower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1025169689Skan{
1026169689Skan  tree finally, new_stmt;
1027169689Skan  tree x;
1028169689Skan
1029169689Skan  finally = TREE_OPERAND (*tf->top_p, 1);
1030169689Skan  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1031169689Skan
1032169689Skan  new_stmt = NULL_TREE;
1033169689Skan
1034169689Skan  if (tf->may_fallthru)
1035169689Skan    {
1036169689Skan      x = lower_try_finally_dup_block (finally, state);
1037169689Skan      lower_eh_constructs_1 (state, &x);
1038169689Skan      append_to_statement_list (x, &new_stmt);
1039169689Skan
1040169689Skan      x = lower_try_finally_fallthru_label (tf);
1041169689Skan      x = build1 (GOTO_EXPR, void_type_node, x);
1042169689Skan      append_to_statement_list (x, &new_stmt);
1043169689Skan    }
1044169689Skan
1045169689Skan  if (tf->may_throw)
1046169689Skan    {
1047169689Skan      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1048169689Skan      append_to_statement_list (x, &new_stmt);
1049169689Skan
1050169689Skan      x = lower_try_finally_dup_block (finally, state);
1051169689Skan      lower_eh_constructs_1 (state, &x);
1052169689Skan      append_to_statement_list (x, &new_stmt);
1053169689Skan
1054169689Skan      x = build_resx (get_eh_region_number (tf->region));
1055169689Skan      append_to_statement_list (x, &new_stmt);
1056169689Skan    }
1057169689Skan
1058169689Skan  if (tf->goto_queue)
1059169689Skan    {
1060169689Skan      struct goto_queue_node *q, *qe;
1061169689Skan      tree return_val = NULL;
1062169689Skan      int return_index, index;
1063169689Skan      struct labels_s
1064169689Skan      {
1065169689Skan	struct goto_queue_node *q;
1066169689Skan	tree label;
1067169689Skan      } *labels;
1068169689Skan
1069169689Skan      return_index = VEC_length (tree, tf->dest_array);
1070169689Skan      labels = XCNEWVEC (struct labels_s, return_index + 1);
1071169689Skan
1072169689Skan      q = tf->goto_queue;
1073169689Skan      qe = q + tf->goto_queue_active;
1074169689Skan      for (; q < qe; q++)
1075169689Skan	{
1076169689Skan	  index = q->index < 0 ? return_index : q->index;
1077169689Skan
1078169689Skan	  if (!labels[index].q)
1079169689Skan	    labels[index].q = q;
1080169689Skan	}
1081169689Skan
1082169689Skan      for (index = 0; index < return_index + 1; index++)
1083169689Skan	{
1084169689Skan	  tree lab;
1085169689Skan
1086169689Skan	  q = labels[index].q;
1087169689Skan	  if (! q)
1088169689Skan	    continue;
1089169689Skan
1090169689Skan	  lab = labels[index].label = create_artificial_label ();
1091169689Skan
1092169689Skan	  if (index == return_index)
1093169689Skan	    do_return_redirection (q, lab, NULL, &return_val);
1094169689Skan	  else
1095169689Skan	    do_goto_redirection (q, lab, NULL);
1096169689Skan
1097169689Skan	  x = build1 (LABEL_EXPR, void_type_node, lab);
1098169689Skan	  append_to_statement_list (x, &new_stmt);
1099169689Skan
1100169689Skan	  x = lower_try_finally_dup_block (finally, state);
1101169689Skan	  lower_eh_constructs_1 (state, &x);
1102169689Skan	  append_to_statement_list (x, &new_stmt);
1103169689Skan
1104169689Skan	  append_to_statement_list (q->cont_stmt, &new_stmt);
1105169689Skan	  maybe_record_in_goto_queue (state, q->cont_stmt);
1106169689Skan	}
1107169689Skan
1108169689Skan      for (q = tf->goto_queue; q < qe; q++)
1109169689Skan	{
1110169689Skan	  tree lab;
1111169689Skan
1112169689Skan	  index = q->index < 0 ? return_index : q->index;
1113169689Skan
1114169689Skan	  if (labels[index].q == q)
1115169689Skan	    continue;
1116169689Skan
1117169689Skan	  lab = labels[index].label;
1118169689Skan
1119169689Skan	  if (index == return_index)
1120169689Skan	    do_return_redirection (q, lab, NULL, &return_val);
1121169689Skan	  else
1122169689Skan	    do_goto_redirection (q, lab, NULL);
1123169689Skan	}
1124169689Skan
1125169689Skan      replace_goto_queue (tf);
1126169689Skan      free (labels);
1127169689Skan    }
1128169689Skan
1129169689Skan  /* Need to link new stmts after running replace_goto_queue due
1130169689Skan     to not wanting to process the same goto stmts twice.  */
1131169689Skan  append_to_statement_list (new_stmt, tf->top_p);
1132169689Skan}
1133169689Skan
1134169689Skan/* A subroutine of lower_try_finally.  There are multiple edges incoming
1135169689Skan   and outgoing from the finally block.  Implement this by instrumenting
1136169689Skan   each incoming edge and creating a switch statement at the end of the
1137169689Skan   finally block that branches to the appropriate destination.  */
1138169689Skan
1139169689Skanstatic void
1140169689Skanlower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1141169689Skan{
1142169689Skan  struct goto_queue_node *q, *qe;
1143169689Skan  tree return_val = NULL;
1144169689Skan  tree finally, finally_tmp, finally_label;
1145169689Skan  int return_index, eh_index, fallthru_index;
1146169689Skan  int nlabels, ndests, j, last_case_index;
1147169689Skan  tree case_label_vec, switch_stmt, last_case, switch_body;
1148169689Skan  tree x;
1149169689Skan
1150169689Skan  /* Mash the TRY block to the head of the chain.  */
1151169689Skan  finally = TREE_OPERAND (*tf->top_p, 1);
1152169689Skan  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1153169689Skan
1154169689Skan  /* Lower the finally block itself.  */
1155169689Skan  lower_eh_constructs_1 (state, &finally);
1156169689Skan
1157169689Skan  /* Prepare for switch statement generation.  */
1158169689Skan  nlabels = VEC_length (tree, tf->dest_array);
1159169689Skan  return_index = nlabels;
1160169689Skan  eh_index = return_index + tf->may_return;
1161169689Skan  fallthru_index = eh_index + tf->may_throw;
1162169689Skan  ndests = fallthru_index + tf->may_fallthru;
1163169689Skan
1164169689Skan  finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1165169689Skan  finally_label = create_artificial_label ();
1166169689Skan
1167169689Skan  case_label_vec = make_tree_vec (ndests);
1168169689Skan  switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1169169689Skan		        NULL_TREE, case_label_vec);
1170169689Skan  switch_body = NULL;
1171169689Skan  last_case = NULL;
1172169689Skan  last_case_index = 0;
1173169689Skan
1174169689Skan  /* Begin inserting code for getting to the finally block.  Things
1175169689Skan     are done in this order to correspond to the sequence the code is
1176169689Skan     layed out.  */
1177169689Skan
1178169689Skan  if (tf->may_fallthru)
1179169689Skan    {
1180169689Skan      x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1181169689Skan		  build_int_cst (NULL_TREE, fallthru_index));
1182169689Skan      append_to_statement_list (x, tf->top_p);
1183169689Skan
1184169689Skan      if (tf->may_throw)
1185169689Skan	{
1186169689Skan	  x = build1 (GOTO_EXPR, void_type_node, finally_label);
1187169689Skan	  append_to_statement_list (x, tf->top_p);
1188169689Skan	}
1189169689Skan
1190169689Skan
1191169689Skan      last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1192169689Skan			  build_int_cst (NULL_TREE, fallthru_index), NULL,
1193169689Skan			  create_artificial_label ());
1194169689Skan      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1195169689Skan      last_case_index++;
1196169689Skan
1197169689Skan      x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1198169689Skan      append_to_statement_list (x, &switch_body);
1199169689Skan
1200169689Skan      x = lower_try_finally_fallthru_label (tf);
1201169689Skan      x = build1 (GOTO_EXPR, void_type_node, x);
1202169689Skan      append_to_statement_list (x, &switch_body);
1203169689Skan    }
1204169689Skan
1205169689Skan  if (tf->may_throw)
1206169689Skan    {
1207169689Skan      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1208169689Skan      append_to_statement_list (x, tf->top_p);
1209169689Skan
1210169689Skan      x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1211169689Skan		  build_int_cst (NULL_TREE, eh_index));
1212169689Skan      append_to_statement_list (x, tf->top_p);
1213169689Skan
1214169689Skan      last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1215169689Skan			  build_int_cst (NULL_TREE, eh_index), NULL,
1216169689Skan			  create_artificial_label ());
1217169689Skan      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1218169689Skan      last_case_index++;
1219169689Skan
1220169689Skan      x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1221169689Skan      append_to_statement_list (x, &switch_body);
1222169689Skan      x = build_resx (get_eh_region_number (tf->region));
1223169689Skan      append_to_statement_list (x, &switch_body);
1224169689Skan    }
1225169689Skan
1226169689Skan  x = build1 (LABEL_EXPR, void_type_node, finally_label);
1227169689Skan  append_to_statement_list (x, tf->top_p);
1228169689Skan
1229169689Skan  append_to_statement_list (finally, tf->top_p);
1230169689Skan
1231169689Skan  /* Redirect each incoming goto edge.  */
1232169689Skan  q = tf->goto_queue;
1233169689Skan  qe = q + tf->goto_queue_active;
1234169689Skan  j = last_case_index + tf->may_return;
1235169689Skan  for (; q < qe; ++q)
1236169689Skan    {
1237169689Skan      tree mod;
1238169689Skan      int switch_id, case_index;
1239169689Skan
1240169689Skan      if (q->index < 0)
1241169689Skan	{
1242169689Skan	  mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1243169689Skan		        build_int_cst (NULL_TREE, return_index));
1244169689Skan	  do_return_redirection (q, finally_label, mod, &return_val);
1245169689Skan	  switch_id = return_index;
1246169689Skan	}
1247169689Skan      else
1248169689Skan	{
1249169689Skan	  mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
1250169689Skan		        build_int_cst (NULL_TREE, q->index));
1251169689Skan	  do_goto_redirection (q, finally_label, mod);
1252169689Skan	  switch_id = q->index;
1253169689Skan	}
1254169689Skan
1255169689Skan      case_index = j + q->index;
1256169689Skan      if (!TREE_VEC_ELT (case_label_vec, case_index))
1257169689Skan	TREE_VEC_ELT (case_label_vec, case_index)
1258169689Skan	  = build3 (CASE_LABEL_EXPR, void_type_node,
1259169689Skan		    build_int_cst (NULL_TREE, switch_id), NULL,
1260169689Skan		    /* We store the cont_stmt in the
1261169689Skan		       CASE_LABEL, so that we can recover it
1262169689Skan		       in the loop below.  We don't create
1263169689Skan		       the new label while walking the
1264169689Skan		       goto_queue because pointers don't
1265169689Skan		       offer a stable order.  */
1266169689Skan		    q->cont_stmt);
1267169689Skan    }
1268169689Skan  for (j = last_case_index; j < last_case_index + nlabels; j++)
1269169689Skan    {
1270169689Skan      tree label;
1271169689Skan      tree cont_stmt;
1272169689Skan
1273169689Skan      last_case = TREE_VEC_ELT (case_label_vec, j);
1274169689Skan
1275169689Skan      gcc_assert (last_case);
1276169689Skan
1277169689Skan      cont_stmt = CASE_LABEL (last_case);
1278169689Skan
1279169689Skan      label = create_artificial_label ();
1280169689Skan      CASE_LABEL (last_case) = label;
1281169689Skan
1282169689Skan      x = build1 (LABEL_EXPR, void_type_node, label);
1283169689Skan      append_to_statement_list (x, &switch_body);
1284169689Skan      append_to_statement_list (cont_stmt, &switch_body);
1285169689Skan      maybe_record_in_goto_queue (state, cont_stmt);
1286169689Skan    }
1287169689Skan  replace_goto_queue (tf);
1288169689Skan
1289169689Skan  /* Make sure that the last case is the default label, as one is required.
1290169689Skan     Then sort the labels, which is also required in GIMPLE.  */
1291169689Skan  CASE_LOW (last_case) = NULL;
1292169689Skan  sort_case_labels (case_label_vec);
1293169689Skan
1294169689Skan  /* Need to link switch_stmt after running replace_goto_queue due
1295169689Skan     to not wanting to process the same goto stmts twice.  */
1296169689Skan  append_to_statement_list (switch_stmt, tf->top_p);
1297169689Skan  append_to_statement_list (switch_body, tf->top_p);
1298169689Skan}
1299169689Skan
1300169689Skan/* Decide whether or not we are going to duplicate the finally block.
1301169689Skan   There are several considerations.
1302169689Skan
1303169689Skan   First, if this is Java, then the finally block contains code
1304169689Skan   written by the user.  It has line numbers associated with it,
1305169689Skan   so duplicating the block means it's difficult to set a breakpoint.
1306169689Skan   Since controlling code generation via -g is verboten, we simply
1307169689Skan   never duplicate code without optimization.
1308169689Skan
1309169689Skan   Second, we'd like to prevent egregious code growth.  One way to
1310169689Skan   do this is to estimate the size of the finally block, multiply
1311169689Skan   that by the number of copies we'd need to make, and compare against
1312169689Skan   the estimate of the size of the switch machinery we'd have to add.  */
1313169689Skan
1314169689Skanstatic bool
1315169689Skandecide_copy_try_finally (int ndests, tree finally)
1316169689Skan{
1317169689Skan  int f_estimate, sw_estimate;
1318169689Skan
1319169689Skan  if (!optimize)
1320169689Skan    return false;
1321169689Skan
1322169689Skan  /* Finally estimate N times, plus N gotos.  */
1323169689Skan  f_estimate = estimate_num_insns (finally);
1324169689Skan  f_estimate = (f_estimate + 1) * ndests;
1325169689Skan
1326169689Skan  /* Switch statement (cost 10), N variable assignments, N gotos.  */
1327169689Skan  sw_estimate = 10 + 2 * ndests;
1328169689Skan
1329169689Skan  /* Optimize for size clearly wants our best guess.  */
1330169689Skan  if (optimize_size)
1331169689Skan    return f_estimate < sw_estimate;
1332169689Skan
1333169689Skan  /* ??? These numbers are completely made up so far.  */
1334169689Skan  if (optimize > 1)
1335169689Skan    return f_estimate < 100 || f_estimate < sw_estimate * 2;
1336169689Skan  else
1337169689Skan    return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1338169689Skan}
1339169689Skan
1340169689Skan/* A subroutine of lower_eh_constructs_1.  Lower a TRY_FINALLY_EXPR nodes
1341169689Skan   to a sequence of labels and blocks, plus the exception region trees
1342169689Skan   that record all the magic.  This is complicated by the need to
1343169689Skan   arrange for the FINALLY block to be executed on all exits.  */
1344169689Skan
1345169689Skanstatic void
1346169689Skanlower_try_finally (struct leh_state *state, tree *tp)
1347169689Skan{
1348169689Skan  struct leh_tf_state this_tf;
1349169689Skan  struct leh_state this_state;
1350169689Skan  int ndests;
1351169689Skan
1352169689Skan  /* Process the try block.  */
1353169689Skan
1354169689Skan  memset (&this_tf, 0, sizeof (this_tf));
1355169689Skan  this_tf.try_finally_expr = *tp;
1356169689Skan  this_tf.top_p = tp;
1357169689Skan  this_tf.outer = state;
1358169689Skan  if (using_eh_for_cleanups_p)
1359169689Skan    this_tf.region
1360169689Skan      = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1361169689Skan  else
1362169689Skan    this_tf.region = NULL;
1363169689Skan
1364169689Skan  this_state.cur_region = this_tf.region;
1365169689Skan  this_state.prev_try = state->prev_try;
1366169689Skan  this_state.tf = &this_tf;
1367169689Skan
1368169689Skan  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1369169689Skan
1370169689Skan  /* Determine if the try block is escaped through the bottom.  */
1371169689Skan  this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1372169689Skan
1373169689Skan  /* Determine if any exceptions are possible within the try block.  */
1374169689Skan  if (using_eh_for_cleanups_p)
1375169689Skan    this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1376169689Skan  if (this_tf.may_throw)
1377169689Skan    {
1378169689Skan      this_tf.eh_label = create_artificial_label ();
1379169689Skan      set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1380169689Skan      honor_protect_cleanup_actions (state, &this_state, &this_tf);
1381169689Skan    }
1382169689Skan
1383169689Skan  /* Sort the goto queue for efficient searching later.  */
1384169689Skan  if (this_tf.goto_queue_active > 1)
1385169689Skan    qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1386169689Skan	   sizeof (struct goto_queue_node), goto_queue_cmp);
1387169689Skan
1388169689Skan  /* Determine how many edges (still) reach the finally block.  Or rather,
1389169689Skan     how many destinations are reached by the finally block.  Use this to
1390169689Skan     determine how we process the finally block itself.  */
1391169689Skan
1392169689Skan  ndests = VEC_length (tree, this_tf.dest_array);
1393169689Skan  ndests += this_tf.may_fallthru;
1394169689Skan  ndests += this_tf.may_return;
1395169689Skan  ndests += this_tf.may_throw;
1396169689Skan
1397169689Skan  /* If the FINALLY block is not reachable, dike it out.  */
1398169689Skan  if (ndests == 0)
1399169689Skan    *tp = TREE_OPERAND (*tp, 0);
1400169689Skan
1401169689Skan  /* If the finally block doesn't fall through, then any destination
1402169689Skan     we might try to impose there isn't reached either.  There may be
1403169689Skan     some minor amount of cleanup and redirection still needed.  */
1404169689Skan  else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1405169689Skan    lower_try_finally_nofallthru (state, &this_tf);
1406169689Skan
1407169689Skan  /* We can easily special-case redirection to a single destination.  */
1408169689Skan  else if (ndests == 1)
1409169689Skan    lower_try_finally_onedest (state, &this_tf);
1410169689Skan
1411169689Skan  else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1412169689Skan    lower_try_finally_copy (state, &this_tf);
1413169689Skan  else
1414169689Skan    lower_try_finally_switch (state, &this_tf);
1415169689Skan
1416169689Skan  /* If someone requested we add a label at the end of the transformed
1417169689Skan     block, do so.  */
1418169689Skan  if (this_tf.fallthru_label)
1419169689Skan    {
1420169689Skan      tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1421169689Skan      append_to_statement_list (x, tp);
1422169689Skan    }
1423169689Skan
1424169689Skan  VEC_free (tree, heap, this_tf.dest_array);
1425169689Skan  if (this_tf.goto_queue)
1426169689Skan    free (this_tf.goto_queue);
1427169689Skan}
1428169689Skan
1429169689Skan/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
1430169689Skan   list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1431169689Skan   exception region trees that record all the magic.  */
1432169689Skan
1433169689Skanstatic void
1434169689Skanlower_catch (struct leh_state *state, tree *tp)
1435169689Skan{
1436169689Skan  struct eh_region *try_region;
1437169689Skan  struct leh_state this_state;
1438169689Skan  tree_stmt_iterator i;
1439169689Skan  tree out_label;
1440169689Skan
1441169689Skan  try_region = gen_eh_region_try (state->cur_region);
1442169689Skan  this_state.cur_region = try_region;
1443169689Skan  this_state.prev_try = try_region;
1444169689Skan  this_state.tf = state->tf;
1445169689Skan
1446169689Skan  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1447169689Skan
1448169689Skan  if (!get_eh_region_may_contain_throw (try_region))
1449169689Skan    {
1450169689Skan      *tp = TREE_OPERAND (*tp, 0);
1451169689Skan      return;
1452169689Skan    }
1453169689Skan
1454169689Skan  out_label = NULL;
1455169689Skan  for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1456169689Skan    {
1457169689Skan      struct eh_region *catch_region;
1458169689Skan      tree catch, x, eh_label;
1459169689Skan
1460169689Skan      catch = tsi_stmt (i);
1461169689Skan      catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1462169689Skan
1463169689Skan      this_state.cur_region = catch_region;
1464169689Skan      this_state.prev_try = state->prev_try;
1465169689Skan      lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1466169689Skan
1467169689Skan      eh_label = create_artificial_label ();
1468169689Skan      set_eh_region_tree_label (catch_region, eh_label);
1469169689Skan
1470169689Skan      x = build1 (LABEL_EXPR, void_type_node, eh_label);
1471169689Skan      tsi_link_before (&i, x, TSI_SAME_STMT);
1472169689Skan
1473169689Skan      if (block_may_fallthru (CATCH_BODY (catch)))
1474169689Skan	{
1475169689Skan	  if (!out_label)
1476169689Skan	    out_label = create_artificial_label ();
1477169689Skan
1478169689Skan	  x = build1 (GOTO_EXPR, void_type_node, out_label);
1479169689Skan	  append_to_statement_list (x, &CATCH_BODY (catch));
1480169689Skan	}
1481169689Skan
1482169689Skan      tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1483169689Skan      tsi_delink (&i);
1484169689Skan    }
1485169689Skan
1486169689Skan  frob_into_branch_around (tp, NULL, out_label);
1487169689Skan}
1488169689Skan
1489169689Skan/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
1490169689Skan   EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1491169689Skan   region trees that record all the magic.  */
1492169689Skan
1493169689Skanstatic void
1494169689Skanlower_eh_filter (struct leh_state *state, tree *tp)
1495169689Skan{
1496169689Skan  struct leh_state this_state;
1497169689Skan  struct eh_region *this_region;
1498169689Skan  tree inner = expr_first (TREE_OPERAND (*tp, 1));
1499169689Skan  tree eh_label;
1500169689Skan
1501169689Skan  if (EH_FILTER_MUST_NOT_THROW (inner))
1502169689Skan    this_region = gen_eh_region_must_not_throw (state->cur_region);
1503169689Skan  else
1504169689Skan    this_region = gen_eh_region_allowed (state->cur_region,
1505169689Skan					 EH_FILTER_TYPES (inner));
1506169689Skan  this_state = *state;
1507169689Skan  this_state.cur_region = this_region;
1508169689Skan  /* For must not throw regions any cleanup regions inside it
1509169689Skan     can't reach outer catch regions.  */
1510169689Skan  if (EH_FILTER_MUST_NOT_THROW (inner))
1511169689Skan    this_state.prev_try = NULL;
1512169689Skan
1513169689Skan  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1514169689Skan
1515169689Skan  if (!get_eh_region_may_contain_throw (this_region))
1516169689Skan    {
1517169689Skan      *tp = TREE_OPERAND (*tp, 0);
1518169689Skan      return;
1519169689Skan    }
1520169689Skan
1521169689Skan  lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1522169689Skan  TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1523169689Skan
1524169689Skan  eh_label = create_artificial_label ();
1525169689Skan  set_eh_region_tree_label (this_region, eh_label);
1526169689Skan
1527169689Skan  frob_into_branch_around (tp, eh_label, NULL);
1528169689Skan}
1529169689Skan
1530169689Skan/* Implement a cleanup expression.  This is similar to try-finally,
1531169689Skan   except that we only execute the cleanup block for exception edges.  */
1532169689Skan
1533169689Skanstatic void
1534169689Skanlower_cleanup (struct leh_state *state, tree *tp)
1535169689Skan{
1536169689Skan  struct leh_state this_state;
1537169689Skan  struct eh_region *this_region;
1538169689Skan  struct leh_tf_state fake_tf;
1539169689Skan
1540169689Skan  /* If not using eh, then exception-only cleanups are no-ops.  */
1541169689Skan  if (!flag_exceptions)
1542169689Skan    {
1543169689Skan      *tp = TREE_OPERAND (*tp, 0);
1544169689Skan      lower_eh_constructs_1 (state, tp);
1545169689Skan      return;
1546169689Skan    }
1547169689Skan
1548169689Skan  this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1549169689Skan  this_state = *state;
1550169689Skan  this_state.cur_region = this_region;
1551169689Skan
1552169689Skan  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1553169689Skan
1554169689Skan  if (!get_eh_region_may_contain_throw (this_region))
1555169689Skan    {
1556169689Skan      *tp = TREE_OPERAND (*tp, 0);
1557169689Skan      return;
1558169689Skan    }
1559169689Skan
1560169689Skan  /* Build enough of a try-finally state so that we can reuse
1561169689Skan     honor_protect_cleanup_actions.  */
1562169689Skan  memset (&fake_tf, 0, sizeof (fake_tf));
1563169689Skan  fake_tf.top_p = tp;
1564169689Skan  fake_tf.outer = state;
1565169689Skan  fake_tf.region = this_region;
1566169689Skan  fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1567169689Skan  fake_tf.may_throw = true;
1568169689Skan
1569169689Skan  fake_tf.eh_label = create_artificial_label ();
1570169689Skan  set_eh_region_tree_label (this_region, fake_tf.eh_label);
1571169689Skan
1572169689Skan  honor_protect_cleanup_actions (state, NULL, &fake_tf);
1573169689Skan
1574169689Skan  if (fake_tf.may_throw)
1575169689Skan    {
1576169689Skan      /* In this case honor_protect_cleanup_actions had nothing to do,
1577169689Skan	 and we should process this normally.  */
1578169689Skan      lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1579169689Skan      frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1580169689Skan    }
1581169689Skan  else
1582169689Skan    {
1583169689Skan      /* In this case honor_protect_cleanup_actions did nearly all of
1584169689Skan	 the work.  All we have left is to append the fallthru_label.  */
1585169689Skan
1586169689Skan      *tp = TREE_OPERAND (*tp, 0);
1587169689Skan      if (fake_tf.fallthru_label)
1588169689Skan	{
1589169689Skan	  tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1590169689Skan	  append_to_statement_list (x, tp);
1591169689Skan	}
1592169689Skan    }
1593169689Skan}
1594169689Skan
1595169689Skan/* Main loop for lowering eh constructs.  */
1596169689Skan
1597169689Skanstatic void
1598169689Skanlower_eh_constructs_1 (struct leh_state *state, tree *tp)
1599169689Skan{
1600169689Skan  tree_stmt_iterator i;
1601169689Skan  tree t = *tp;
1602169689Skan
1603169689Skan  switch (TREE_CODE (t))
1604169689Skan    {
1605169689Skan    case COND_EXPR:
1606169689Skan      lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1607169689Skan      lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1608169689Skan      break;
1609169689Skan
1610169689Skan    case CALL_EXPR:
1611169689Skan      /* Look for things that can throw exceptions, and record them.  */
1612169689Skan      if (state->cur_region && tree_could_throw_p (t))
1613169689Skan	{
1614169689Skan	  record_stmt_eh_region (state->cur_region, t);
1615169689Skan	  note_eh_region_may_contain_throw (state->cur_region);
1616169689Skan	}
1617169689Skan      break;
1618169689Skan
1619169689Skan    case MODIFY_EXPR:
1620169689Skan      /* Look for things that can throw exceptions, and record them.  */
1621169689Skan      if (state->cur_region && tree_could_throw_p (t))
1622169689Skan	{
1623169689Skan	  record_stmt_eh_region (state->cur_region, t);
1624169689Skan	  note_eh_region_may_contain_throw (state->cur_region);
1625169689Skan	}
1626169689Skan      break;
1627169689Skan
1628169689Skan    case GOTO_EXPR:
1629169689Skan    case RETURN_EXPR:
1630169689Skan      maybe_record_in_goto_queue (state, t);
1631169689Skan      break;
1632169689Skan    case SWITCH_EXPR:
1633169689Skan      verify_norecord_switch_expr (state, t);
1634169689Skan      break;
1635169689Skan
1636169689Skan    case TRY_FINALLY_EXPR:
1637169689Skan      lower_try_finally (state, tp);
1638169689Skan      break;
1639169689Skan
1640169689Skan    case TRY_CATCH_EXPR:
1641169689Skan      i = tsi_start (TREE_OPERAND (t, 1));
1642169689Skan      switch (TREE_CODE (tsi_stmt (i)))
1643169689Skan	{
1644169689Skan	case CATCH_EXPR:
1645169689Skan	  lower_catch (state, tp);
1646169689Skan	  break;
1647169689Skan	case EH_FILTER_EXPR:
1648169689Skan	  lower_eh_filter (state, tp);
1649169689Skan	  break;
1650169689Skan	default:
1651169689Skan	  lower_cleanup (state, tp);
1652169689Skan	  break;
1653169689Skan	}
1654169689Skan      break;
1655169689Skan
1656169689Skan    case STATEMENT_LIST:
1657169689Skan      for (i = tsi_start (t); !tsi_end_p (i); )
1658169689Skan	{
1659169689Skan	  lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1660169689Skan	  t = tsi_stmt (i);
1661169689Skan	  if (TREE_CODE (t) == STATEMENT_LIST)
1662169689Skan	    {
1663169689Skan	      tsi_link_before (&i, t, TSI_SAME_STMT);
1664169689Skan	      tsi_delink (&i);
1665169689Skan	    }
1666169689Skan	  else
1667169689Skan	    tsi_next (&i);
1668169689Skan	}
1669169689Skan      break;
1670169689Skan
1671169689Skan    default:
1672169689Skan      /* A type, a decl, or some kind of statement that we're not
1673169689Skan	 interested in.  Don't walk them.  */
1674169689Skan      break;
1675169689Skan    }
1676169689Skan}
1677169689Skan
1678169689Skanstatic unsigned int
1679169689Skanlower_eh_constructs (void)
1680169689Skan{
1681169689Skan  struct leh_state null_state;
1682169689Skan  tree *tp = &DECL_SAVED_TREE (current_function_decl);
1683169689Skan
1684169689Skan  finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1685169689Skan
1686169689Skan  collect_finally_tree (*tp, NULL);
1687169689Skan
1688169689Skan  memset (&null_state, 0, sizeof (null_state));
1689169689Skan  lower_eh_constructs_1 (&null_state, tp);
1690169689Skan
1691169689Skan  htab_delete (finally_tree);
1692169689Skan
1693169689Skan  collect_eh_region_array ();
1694169689Skan  return 0;
1695169689Skan}
1696169689Skan
1697169689Skanstruct tree_opt_pass pass_lower_eh =
1698169689Skan{
1699169689Skan  "eh",					/* name */
1700169689Skan  NULL,					/* gate */
1701169689Skan  lower_eh_constructs,			/* execute */
1702169689Skan  NULL,					/* sub */
1703169689Skan  NULL,					/* next */
1704169689Skan  0,					/* static_pass_number */
1705169689Skan  TV_TREE_EH,				/* tv_id */
1706169689Skan  PROP_gimple_lcf,			/* properties_required */
1707169689Skan  PROP_gimple_leh,			/* properties_provided */
1708169689Skan  0,					/* properties_destroyed */
1709169689Skan  0,					/* todo_flags_start */
1710169689Skan  TODO_dump_func,			/* todo_flags_finish */
1711169689Skan  0					/* letter */
1712169689Skan};
1713169689Skan
1714169689Skan
1715169689Skan/* Construct EH edges for STMT.  */
1716169689Skan
1717169689Skanstatic void
1718169689Skanmake_eh_edge (struct eh_region *region, void *data)
1719169689Skan{
1720169689Skan  tree stmt, lab;
1721169689Skan  basic_block src, dst;
1722169689Skan
1723169689Skan  stmt = (tree) data;
1724169689Skan  lab = get_eh_region_tree_label (region);
1725169689Skan
1726169689Skan  src = bb_for_stmt (stmt);
1727169689Skan  dst = label_to_block (lab);
1728169689Skan
1729169689Skan  make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1730169689Skan}
1731169689Skan
1732169689Skanvoid
1733169689Skanmake_eh_edges (tree stmt)
1734169689Skan{
1735169689Skan  int region_nr;
1736169689Skan  bool is_resx;
1737169689Skan
1738169689Skan  if (TREE_CODE (stmt) == RESX_EXPR)
1739169689Skan    {
1740169689Skan      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1741169689Skan      is_resx = true;
1742169689Skan    }
1743169689Skan  else
1744169689Skan    {
1745169689Skan      region_nr = lookup_stmt_eh_region (stmt);
1746169689Skan      if (region_nr < 0)
1747169689Skan	return;
1748169689Skan      is_resx = false;
1749169689Skan    }
1750169689Skan
1751169689Skan  foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1752169689Skan}
1753169689Skan
1754169689Skanstatic bool mark_eh_edge_found_error;
1755169689Skan
1756169689Skan/* Mark edge make_eh_edge would create for given region by setting it aux
1757169689Skan   field, output error if something goes wrong.  */
1758169689Skanstatic void
1759169689Skanmark_eh_edge (struct eh_region *region, void *data)
1760169689Skan{
1761169689Skan  tree stmt, lab;
1762169689Skan  basic_block src, dst;
1763169689Skan  edge e;
1764169689Skan
1765169689Skan  stmt = (tree) data;
1766169689Skan  lab = get_eh_region_tree_label (region);
1767169689Skan
1768169689Skan  src = bb_for_stmt (stmt);
1769169689Skan  dst = label_to_block (lab);
1770169689Skan
1771169689Skan  e = find_edge (src, dst);
1772169689Skan  if (!e)
1773169689Skan    {
1774169689Skan      error ("EH edge %i->%i is missing", src->index, dst->index);
1775169689Skan      mark_eh_edge_found_error = true;
1776169689Skan    }
1777169689Skan  else if (!(e->flags & EDGE_EH))
1778169689Skan    {
1779169689Skan      error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1780169689Skan      mark_eh_edge_found_error = true;
1781169689Skan    }
1782169689Skan  else if (e->aux)
1783169689Skan    {
1784169689Skan      /* ??? might not be mistake.  */
1785169689Skan      error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1786169689Skan      mark_eh_edge_found_error = true;
1787169689Skan    }
1788169689Skan  else
1789169689Skan    e->aux = (void *)1;
1790169689Skan}
1791169689Skan
1792169689Skan/* Verify that BB containing stmt as last stmt has precisely the edges
1793169689Skan   make_eh_edges would create.  */
1794169689Skanbool
1795169689Skanverify_eh_edges (tree stmt)
1796169689Skan{
1797169689Skan  int region_nr;
1798169689Skan  bool is_resx;
1799169689Skan  basic_block bb = bb_for_stmt (stmt);
1800169689Skan  edge_iterator ei;
1801169689Skan  edge e;
1802169689Skan
1803169689Skan  FOR_EACH_EDGE (e, ei, bb->succs)
1804169689Skan    gcc_assert (!e->aux);
1805169689Skan  mark_eh_edge_found_error = false;
1806169689Skan  if (TREE_CODE (stmt) == RESX_EXPR)
1807169689Skan    {
1808169689Skan      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1809169689Skan      is_resx = true;
1810169689Skan    }
1811169689Skan  else
1812169689Skan    {
1813169689Skan      region_nr = lookup_stmt_eh_region (stmt);
1814169689Skan      if (region_nr < 0)
1815169689Skan	{
1816169689Skan	  FOR_EACH_EDGE (e, ei, bb->succs)
1817169689Skan	    if (e->flags & EDGE_EH)
1818169689Skan	      {
1819169689Skan		error ("BB %i can not throw but has EH edges", bb->index);
1820169689Skan		return true;
1821169689Skan	      }
1822169689Skan	   return false;
1823169689Skan	}
1824169689Skan      if (!tree_could_throw_p (stmt))
1825169689Skan	{
1826169689Skan	  error ("BB %i last statement has incorrectly set region", bb->index);
1827169689Skan	  return true;
1828169689Skan	}
1829169689Skan      is_resx = false;
1830169689Skan    }
1831169689Skan
1832169689Skan  foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1833169689Skan  FOR_EACH_EDGE (e, ei, bb->succs)
1834169689Skan    {
1835169689Skan      if ((e->flags & EDGE_EH) && !e->aux)
1836169689Skan	{
1837169689Skan	  error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1838169689Skan	  mark_eh_edge_found_error = true;
1839169689Skan	  return true;
1840169689Skan	}
1841169689Skan      e->aux = NULL;
1842169689Skan    }
1843169689Skan  return mark_eh_edge_found_error;
1844169689Skan}
1845169689Skan
1846169689Skan
1847169689Skan/* Return true if the expr can trap, as in dereferencing an invalid pointer
1848169689Skan   location or floating point arithmetic.  C.f. the rtl version, may_trap_p.
1849169689Skan   This routine expects only GIMPLE lhs or rhs input.  */
1850169689Skan
1851169689Skanbool
1852169689Skantree_could_trap_p (tree expr)
1853169689Skan{
1854169689Skan  enum tree_code code = TREE_CODE (expr);
1855169689Skan  bool honor_nans = false;
1856169689Skan  bool honor_snans = false;
1857169689Skan  bool fp_operation = false;
1858169689Skan  bool honor_trapv = false;
1859169689Skan  tree t, base;
1860169689Skan
1861169689Skan  if (TREE_CODE_CLASS (code) == tcc_comparison
1862169689Skan      || TREE_CODE_CLASS (code) == tcc_unary
1863169689Skan      || TREE_CODE_CLASS (code) == tcc_binary)
1864169689Skan    {
1865169689Skan      t = TREE_TYPE (expr);
1866169689Skan      fp_operation = FLOAT_TYPE_P (t);
1867169689Skan      if (fp_operation)
1868169689Skan	{
1869169689Skan	  honor_nans = flag_trapping_math && !flag_finite_math_only;
1870169689Skan	  honor_snans = flag_signaling_nans != 0;
1871169689Skan	}
1872169689Skan      else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1873169689Skan	honor_trapv = true;
1874169689Skan    }
1875169689Skan
1876169689Skan restart:
1877169689Skan  switch (code)
1878169689Skan    {
1879169689Skan    case TARGET_MEM_REF:
1880169689Skan      /* For TARGET_MEM_REFs use the information based on the original
1881169689Skan	 reference.  */
1882169689Skan      expr = TMR_ORIGINAL (expr);
1883169689Skan      code = TREE_CODE (expr);
1884169689Skan      goto restart;
1885169689Skan
1886169689Skan    case COMPONENT_REF:
1887169689Skan    case REALPART_EXPR:
1888169689Skan    case IMAGPART_EXPR:
1889169689Skan    case BIT_FIELD_REF:
1890169689Skan    case VIEW_CONVERT_EXPR:
1891169689Skan    case WITH_SIZE_EXPR:
1892169689Skan      expr = TREE_OPERAND (expr, 0);
1893169689Skan      code = TREE_CODE (expr);
1894169689Skan      goto restart;
1895169689Skan
1896169689Skan    case ARRAY_RANGE_REF:
1897169689Skan      base = TREE_OPERAND (expr, 0);
1898169689Skan      if (tree_could_trap_p (base))
1899169689Skan	return true;
1900169689Skan
1901169689Skan      if (TREE_THIS_NOTRAP (expr))
1902169689Skan	return false;
1903169689Skan
1904169689Skan      return !range_in_array_bounds_p (expr);
1905169689Skan
1906169689Skan    case ARRAY_REF:
1907169689Skan      base = TREE_OPERAND (expr, 0);
1908169689Skan      if (tree_could_trap_p (base))
1909169689Skan	return true;
1910169689Skan
1911169689Skan      if (TREE_THIS_NOTRAP (expr))
1912169689Skan	return false;
1913169689Skan
1914169689Skan      return !in_array_bounds_p (expr);
1915169689Skan
1916169689Skan    case INDIRECT_REF:
1917169689Skan    case ALIGN_INDIRECT_REF:
1918169689Skan    case MISALIGNED_INDIRECT_REF:
1919169689Skan      return !TREE_THIS_NOTRAP (expr);
1920169689Skan
1921169689Skan    case ASM_EXPR:
1922169689Skan      return TREE_THIS_VOLATILE (expr);
1923169689Skan
1924169689Skan    case TRUNC_DIV_EXPR:
1925169689Skan    case CEIL_DIV_EXPR:
1926169689Skan    case FLOOR_DIV_EXPR:
1927169689Skan    case ROUND_DIV_EXPR:
1928169689Skan    case EXACT_DIV_EXPR:
1929169689Skan    case CEIL_MOD_EXPR:
1930169689Skan    case FLOOR_MOD_EXPR:
1931169689Skan    case ROUND_MOD_EXPR:
1932169689Skan    case TRUNC_MOD_EXPR:
1933169689Skan    case RDIV_EXPR:
1934169689Skan      if (honor_snans || honor_trapv)
1935169689Skan	return true;
1936169689Skan      if (fp_operation)
1937169689Skan	return flag_trapping_math;
1938169689Skan      t = TREE_OPERAND (expr, 1);
1939169689Skan      if (!TREE_CONSTANT (t) || integer_zerop (t))
1940169689Skan        return true;
1941169689Skan      return false;
1942169689Skan
1943169689Skan    case LT_EXPR:
1944169689Skan    case LE_EXPR:
1945169689Skan    case GT_EXPR:
1946169689Skan    case GE_EXPR:
1947169689Skan    case LTGT_EXPR:
1948169689Skan      /* Some floating point comparisons may trap.  */
1949169689Skan      return honor_nans;
1950169689Skan
1951169689Skan    case EQ_EXPR:
1952169689Skan    case NE_EXPR:
1953169689Skan    case UNORDERED_EXPR:
1954169689Skan    case ORDERED_EXPR:
1955169689Skan    case UNLT_EXPR:
1956169689Skan    case UNLE_EXPR:
1957169689Skan    case UNGT_EXPR:
1958169689Skan    case UNGE_EXPR:
1959169689Skan    case UNEQ_EXPR:
1960169689Skan      return honor_snans;
1961169689Skan
1962169689Skan    case CONVERT_EXPR:
1963169689Skan    case FIX_TRUNC_EXPR:
1964169689Skan    case FIX_CEIL_EXPR:
1965169689Skan    case FIX_FLOOR_EXPR:
1966169689Skan    case FIX_ROUND_EXPR:
1967169689Skan      /* Conversion of floating point might trap.  */
1968169689Skan      return honor_nans;
1969169689Skan
1970169689Skan    case NEGATE_EXPR:
1971169689Skan    case ABS_EXPR:
1972169689Skan    case CONJ_EXPR:
1973169689Skan      /* These operations don't trap with floating point.  */
1974169689Skan      if (honor_trapv)
1975169689Skan	return true;
1976169689Skan      return false;
1977169689Skan
1978169689Skan    case PLUS_EXPR:
1979169689Skan    case MINUS_EXPR:
1980169689Skan    case MULT_EXPR:
1981169689Skan      /* Any floating arithmetic may trap.  */
1982169689Skan      if (fp_operation && flag_trapping_math)
1983169689Skan	return true;
1984169689Skan      if (honor_trapv)
1985169689Skan	return true;
1986169689Skan      return false;
1987169689Skan
1988169689Skan    case CALL_EXPR:
1989169689Skan      t = get_callee_fndecl (expr);
1990169689Skan      /* Assume that calls to weak functions may trap.  */
1991169689Skan      if (!t || !DECL_P (t) || DECL_WEAK (t))
1992169689Skan	return true;
1993169689Skan      return false;
1994169689Skan
1995169689Skan    default:
1996169689Skan      /* Any floating arithmetic may trap.  */
1997169689Skan      if (fp_operation && flag_trapping_math)
1998169689Skan	return true;
1999169689Skan      return false;
2000169689Skan    }
2001169689Skan}
2002169689Skan
2003169689Skanbool
2004169689Skantree_could_throw_p (tree t)
2005169689Skan{
2006169689Skan  if (!flag_exceptions)
2007169689Skan    return false;
2008169689Skan  if (TREE_CODE (t) == MODIFY_EXPR)
2009169689Skan    {
2010169689Skan      if (flag_non_call_exceptions
2011169689Skan	  && tree_could_trap_p (TREE_OPERAND (t, 0)))
2012169689Skan	return true;
2013169689Skan      t = TREE_OPERAND (t, 1);
2014169689Skan    }
2015169689Skan
2016169689Skan  if (TREE_CODE (t) == WITH_SIZE_EXPR)
2017169689Skan    t = TREE_OPERAND (t, 0);
2018169689Skan  if (TREE_CODE (t) == CALL_EXPR)
2019169689Skan    return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2020169689Skan  if (flag_non_call_exceptions)
2021169689Skan    return tree_could_trap_p (t);
2022169689Skan  return false;
2023169689Skan}
2024169689Skan
2025169689Skanbool
2026169689Skantree_can_throw_internal (tree stmt)
2027169689Skan{
2028169689Skan  int region_nr;
2029169689Skan  bool is_resx = false;
2030169689Skan
2031169689Skan  if (TREE_CODE (stmt) == RESX_EXPR)
2032169689Skan    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2033169689Skan  else
2034169689Skan    region_nr = lookup_stmt_eh_region (stmt);
2035169689Skan  if (region_nr < 0)
2036169689Skan    return false;
2037169689Skan  return can_throw_internal_1 (region_nr, is_resx);
2038169689Skan}
2039169689Skan
2040169689Skanbool
2041169689Skantree_can_throw_external (tree stmt)
2042169689Skan{
2043169689Skan  int region_nr;
2044169689Skan  bool is_resx = false;
2045169689Skan
2046169689Skan  if (TREE_CODE (stmt) == RESX_EXPR)
2047169689Skan    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2048169689Skan  else
2049169689Skan    region_nr = lookup_stmt_eh_region (stmt);
2050169689Skan  if (region_nr < 0)
2051169689Skan    return tree_could_throw_p (stmt);
2052169689Skan  else
2053169689Skan    return can_throw_external_1 (region_nr, is_resx);
2054169689Skan}
2055169689Skan
2056169689Skan/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2057169689Skan   OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2058169689Skan   in the table if it should be in there.  Return TRUE if a replacement was
2059169689Skan   done that my require an EH edge purge.  */
2060169689Skan
2061169689Skanbool
2062169689Skanmaybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2063169689Skan{
2064169689Skan  int region_nr = lookup_stmt_eh_region (old_stmt);
2065169689Skan
2066169689Skan  if (region_nr >= 0)
2067169689Skan    {
2068169689Skan      bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2069169689Skan
2070169689Skan      if (new_stmt == old_stmt && new_stmt_could_throw)
2071169689Skan	return false;
2072169689Skan
2073169689Skan      remove_stmt_from_eh_region (old_stmt);
2074169689Skan      if (new_stmt_could_throw)
2075169689Skan	{
2076169689Skan	  add_stmt_to_eh_region (new_stmt, region_nr);
2077169689Skan	  return false;
2078169689Skan	}
2079169689Skan      else
2080169689Skan	return true;
2081169689Skan    }
2082169689Skan
2083169689Skan  return false;
2084169689Skan}
2085169689Skan
2086169689Skan#ifdef ENABLE_CHECKING
2087169689Skanstatic int
2088169689Skanverify_eh_throw_stmt_node (void **slot, void *data ATTRIBUTE_UNUSED)
2089169689Skan{
2090169689Skan  struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
2091169689Skan
2092169689Skan  gcc_assert (node->stmt->common.ann == NULL);
2093169689Skan  return 1;
2094169689Skan}
2095169689Skan
2096169689Skanvoid
2097169689Skanverify_eh_throw_table_statements (void)
2098169689Skan{
2099169689Skan  if (!get_eh_throw_stmt_table (cfun))
2100169689Skan    return;
2101169689Skan  htab_traverse (get_eh_throw_stmt_table (cfun),
2102169689Skan		 verify_eh_throw_stmt_node,
2103169689Skan		 NULL);
2104169689Skan}
2105169689Skan
2106169689Skan#endif
2107