1/* Implements exception handling.
2   Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4   Contributed by Mike Stump <mrs@cygnus.com>.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23
24/* An exception is an event that can be signaled from within a
25   function. This event can then be "caught" or "trapped" by the
26   callers of this function. This potentially allows program flow to
27   be transferred to any arbitrary code associated with a function call
28   several levels up the stack.
29
30   The intended use for this mechanism is for signaling "exceptional
31   events" in an out-of-band fashion, hence its name. The C++ language
32   (and many other OO-styled or functional languages) practically
33   requires such a mechanism, as otherwise it becomes very difficult
34   or even impossible to signal failure conditions in complex
35   situations.  The traditional C++ example is when an error occurs in
36   the process of constructing an object; without such a mechanism, it
37   is impossible to signal that the error occurs without adding global
38   state variables and error checks around every object construction.
39
40   The act of causing this event to occur is referred to as "throwing
41   an exception". (Alternate terms include "raising an exception" or
42   "signaling an exception".) The term "throw" is used because control
43   is returned to the callers of the function that is signaling the
44   exception, and thus there is the concept of "throwing" the
45   exception up the call stack.
46
47   [ Add updated documentation on how to use this.  ]  */
48
49
50#include "config.h"
51#include "system.h"
52#include "coretypes.h"
53#include "tm.h"
54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
57#include "function.h"
58#include "expr.h"
59#include "libfuncs.h"
60#include "insn-config.h"
61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
65#include "output.h"
66#include "dwarf2asm.h"
67#include "dwarf2out.h"
68#include "dwarf2.h"
69#include "toplev.h"
70#include "hashtab.h"
71#include "intl.h"
72#include "ggc.h"
73#include "tm_p.h"
74#include "target.h"
75#include "langhooks.h"
76#include "cgraph.h"
77#include "diagnostic.h"
78#include "tree-pass.h"
79#include "timevar.h"
80
81/* Provide defaults for stuff that may not be defined when using
82   sjlj exceptions.  */
83#ifndef EH_RETURN_DATA_REGNO
84#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85#endif
86
87
88/* Protect cleanup actions with must-not-throw regions, with a call
89   to the given failure handler.  */
90tree (*lang_protect_cleanup_actions) (void);
91
92/* Return true if type A catches type B.  */
93int (*lang_eh_type_covers) (tree a, tree b);
94
95/* Map a type to a runtime object to match type.  */
96tree (*lang_eh_runtime_type) (tree);
97
98/* A hash table of label to region number.  */
99
100struct ehl_map_entry GTY(())
101{
102  rtx label;
103  struct eh_region *region;
104};
105
106static GTY(()) int call_site_base;
107static GTY ((param_is (union tree_node)))
108  htab_t type_to_runtime_map;
109
110/* Describe the SjLj_Function_Context structure.  */
111static GTY(()) tree sjlj_fc_type_node;
112static int sjlj_fc_call_site_ofs;
113static int sjlj_fc_data_ofs;
114static int sjlj_fc_personality_ofs;
115static int sjlj_fc_lsda_ofs;
116static int sjlj_fc_jbuf_ofs;
117
118/* Describes one exception region.  */
119struct eh_region GTY(())
120{
121  /* The immediately surrounding region.  */
122  struct eh_region *outer;
123
124  /* The list of immediately contained regions.  */
125  struct eh_region *inner;
126  struct eh_region *next_peer;
127
128  /* An identifier for this region.  */
129  int region_number;
130
131  /* When a region is deleted, its parents inherit the REG_EH_REGION
132     numbers already assigned.  */
133  bitmap aka;
134
135  /* Each region does exactly one thing.  */
136  enum eh_region_type
137  {
138    ERT_UNKNOWN = 0,
139    ERT_CLEANUP,
140    ERT_TRY,
141    ERT_CATCH,
142    ERT_ALLOWED_EXCEPTIONS,
143    ERT_MUST_NOT_THROW,
144    ERT_THROW
145  } type;
146
147  /* Holds the action to perform based on the preceding type.  */
148  union eh_region_u {
149    /* A list of catch blocks, a surrounding try block,
150       and the label for continuing after a catch.  */
151    struct eh_region_u_try {
152      struct eh_region *catch;
153      struct eh_region *last_catch;
154    } GTY ((tag ("ERT_TRY"))) try;
155
156    /* The list through the catch handlers, the list of type objects
157       matched, and the list of associated filters.  */
158    struct eh_region_u_catch {
159      struct eh_region *next_catch;
160      struct eh_region *prev_catch;
161      tree type_list;
162      tree filter_list;
163    } GTY ((tag ("ERT_CATCH"))) catch;
164
165    /* A tree_list of allowed types.  */
166    struct eh_region_u_allowed {
167      tree type_list;
168      int filter;
169    } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170
171    /* The type given by a call to "throw foo();", or discovered
172       for a throw.  */
173    struct eh_region_u_throw {
174      tree type;
175    } GTY ((tag ("ERT_THROW"))) throw;
176
177    /* Retain the cleanup expression even after expansion so that
178       we can match up fixup regions.  */
179    struct eh_region_u_cleanup {
180      struct eh_region *prev_try;
181    } GTY ((tag ("ERT_CLEANUP"))) cleanup;
182  } GTY ((desc ("%0.type"))) u;
183
184  /* Entry point for this region's handler before landing pads are built.  */
185  rtx label;
186  tree tree_label;
187
188  /* Entry point for this region's handler from the runtime eh library.  */
189  rtx landing_pad;
190
191  /* Entry point for this region's handler from an inner region.  */
192  rtx post_landing_pad;
193
194  /* The RESX insn for handing off control to the next outermost handler,
195     if appropriate.  */
196  rtx resume;
197
198  /* True if something in this region may throw.  */
199  unsigned may_contain_throw : 1;
200};
201
202typedef struct eh_region *eh_region;
203
204struct call_site_record GTY(())
205{
206  rtx landing_pad;
207  int action;
208};
209
210DEF_VEC_P(eh_region);
211DEF_VEC_ALLOC_P(eh_region, gc);
212
213/* Used to save exception status for each function.  */
214struct eh_status GTY(())
215{
216  /* The tree of all regions for this function.  */
217  struct eh_region *region_tree;
218
219  /* The same information as an indexable array.  */
220  VEC(eh_region,gc) *region_array;
221
222  /* The most recently open region.  */
223  struct eh_region *cur_region;
224
225  /* This is the region for which we are processing catch blocks.  */
226  struct eh_region *try_region;
227
228  rtx filter;
229  rtx exc_ptr;
230
231  int built_landing_pads;
232  int last_region_number;
233
234  VEC(tree,gc) *ttype_data;
235  varray_type ehspec_data;
236  varray_type action_record_data;
237
238  htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
239
240  struct call_site_record * GTY ((length ("%h.call_site_data_used")))
241    call_site_data;
242  int call_site_data_used;
243  int call_site_data_size;
244
245  rtx ehr_stackadj;
246  rtx ehr_handler;
247  rtx ehr_label;
248
249  rtx sjlj_fc;
250  rtx sjlj_exit_after;
251
252  htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
253};
254
255
256static int t2r_eq (const void *, const void *);
257static hashval_t t2r_hash (const void *);
258static void add_type_for_runtime (tree);
259static tree lookup_type_for_runtime (tree);
260
261static void remove_unreachable_regions (rtx);
262
263static int ttypes_filter_eq (const void *, const void *);
264static hashval_t ttypes_filter_hash (const void *);
265static int ehspec_filter_eq (const void *, const void *);
266static hashval_t ehspec_filter_hash (const void *);
267static int add_ttypes_entry (htab_t, tree);
268static int add_ehspec_entry (htab_t, htab_t, tree);
269static void assign_filter_values (void);
270static void build_post_landing_pads (void);
271static void connect_post_landing_pads (void);
272static void dw2_build_landing_pads (void);
273
274struct sjlj_lp_info;
275static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
276static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
277static void sjlj_mark_call_sites (struct sjlj_lp_info *);
278static void sjlj_emit_function_enter (rtx);
279static void sjlj_emit_function_exit (void);
280static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
281static void sjlj_build_landing_pads (void);
282
283static hashval_t ehl_hash (const void *);
284static int ehl_eq (const void *, const void *);
285static void add_ehl_entry (rtx, struct eh_region *);
286static void remove_exception_handler_label (rtx);
287static void remove_eh_handler (struct eh_region *);
288static int for_each_eh_label_1 (void **, void *);
289
290/* The return value of reachable_next_level.  */
291enum reachable_code
292{
293  /* The given exception is not processed by the given region.  */
294  RNL_NOT_CAUGHT,
295  /* The given exception may need processing by the given region.  */
296  RNL_MAYBE_CAUGHT,
297  /* The given exception is completely processed by the given region.  */
298  RNL_CAUGHT,
299  /* The given exception is completely processed by the runtime.  */
300  RNL_BLOCKED
301};
302
303struct reachable_info;
304static enum reachable_code reachable_next_level (struct eh_region *, tree,
305						 struct reachable_info *);
306
307static int action_record_eq (const void *, const void *);
308static hashval_t action_record_hash (const void *);
309static int add_action_record (htab_t, int, int);
310static int collect_one_action_chain (htab_t, struct eh_region *);
311static int add_call_site (rtx, int);
312
313static void push_uleb128 (varray_type *, unsigned int);
314static void push_sleb128 (varray_type *, int);
315#ifndef HAVE_AS_LEB128
316static int dw2_size_of_call_site_table (void);
317static int sjlj_size_of_call_site_table (void);
318#endif
319static void dw2_output_call_site_table (void);
320static void sjlj_output_call_site_table (void);
321
322
323/* Routine to see if exception handling is turned on.
324   DO_WARN is nonzero if we want to inform the user that exception
325   handling is turned off.
326
327   This is used to ensure that -fexceptions has been specified if the
328   compiler tries to use any exception-specific functions.  */
329
330int
331doing_eh (int do_warn)
332{
333  if (! flag_exceptions)
334    {
335      static int warned = 0;
336      if (! warned && do_warn)
337	{
338	  error ("exception handling disabled, use -fexceptions to enable");
339	  warned = 1;
340	}
341      return 0;
342    }
343  return 1;
344}
345
346
347void
348init_eh (void)
349{
350  if (! flag_exceptions)
351    return;
352
353  type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
354
355  /* Create the SjLj_Function_Context structure.  This should match
356     the definition in unwind-sjlj.c.  */
357  if (USING_SJLJ_EXCEPTIONS)
358    {
359      tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
360
361      sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
362
363      f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
364			   build_pointer_type (sjlj_fc_type_node));
365      DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
366
367      f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
368			 integer_type_node);
369      DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
370
371      tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
372      tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
373			      tmp);
374      f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
375      DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
376
377      f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
378			  ptr_type_node);
379      DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
380
381      f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
382			   ptr_type_node);
383      DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
384
385#ifdef DONT_USE_BUILTIN_SETJMP
386#ifdef JMP_BUF_SIZE
387      tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
388#else
389      /* Should be large enough for most systems, if it is not,
390	 JMP_BUF_SIZE should be defined with the proper value.  It will
391	 also tend to be larger than necessary for most systems, a more
392	 optimal port will define JMP_BUF_SIZE.  */
393      tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
394#endif
395#else
396      /* builtin_setjmp takes a pointer to 5 words.  */
397      tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
398#endif
399      tmp = build_index_type (tmp);
400      tmp = build_array_type (ptr_type_node, tmp);
401      f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
402#ifdef DONT_USE_BUILTIN_SETJMP
403      /* We don't know what the alignment requirements of the
404	 runtime's jmp_buf has.  Overestimate.  */
405      DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
406      DECL_USER_ALIGN (f_jbuf) = 1;
407#endif
408      DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
409
410      TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
411      TREE_CHAIN (f_prev) = f_cs;
412      TREE_CHAIN (f_cs) = f_data;
413      TREE_CHAIN (f_data) = f_per;
414      TREE_CHAIN (f_per) = f_lsda;
415      TREE_CHAIN (f_lsda) = f_jbuf;
416
417      layout_type (sjlj_fc_type_node);
418
419      /* Cache the interesting field offsets so that we have
420	 easy access from rtl.  */
421      sjlj_fc_call_site_ofs
422	= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
423	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
424      sjlj_fc_data_ofs
425	= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
426	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
427      sjlj_fc_personality_ofs
428	= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
429	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
430      sjlj_fc_lsda_ofs
431	= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
432	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
433      sjlj_fc_jbuf_ofs
434	= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
435	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
436    }
437}
438
439void
440init_eh_for_function (void)
441{
442  cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
443}
444
445/* Routines to generate the exception tree somewhat directly.
446   These are used from tree-eh.c when processing exception related
447   nodes during tree optimization.  */
448
449static struct eh_region *
450gen_eh_region (enum eh_region_type type, struct eh_region *outer)
451{
452  struct eh_region *new;
453
454#ifdef ENABLE_CHECKING
455  gcc_assert (doing_eh (0));
456#endif
457
458  /* Insert a new blank region as a leaf in the tree.  */
459  new = ggc_alloc_cleared (sizeof (*new));
460  new->type = type;
461  new->outer = outer;
462  if (outer)
463    {
464      new->next_peer = outer->inner;
465      outer->inner = new;
466    }
467  else
468    {
469      new->next_peer = cfun->eh->region_tree;
470      cfun->eh->region_tree = new;
471    }
472
473  new->region_number = ++cfun->eh->last_region_number;
474
475  return new;
476}
477
478struct eh_region *
479gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
480{
481  struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
482  cleanup->u.cleanup.prev_try = prev_try;
483  return cleanup;
484}
485
486struct eh_region *
487gen_eh_region_try (struct eh_region *outer)
488{
489  return gen_eh_region (ERT_TRY, outer);
490}
491
492struct eh_region *
493gen_eh_region_catch (struct eh_region *t, tree type_or_list)
494{
495  struct eh_region *c, *l;
496  tree type_list, type_node;
497
498  /* Ensure to always end up with a type list to normalize further
499     processing, then register each type against the runtime types map.  */
500  type_list = type_or_list;
501  if (type_or_list)
502    {
503      if (TREE_CODE (type_or_list) != TREE_LIST)
504	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
505
506      type_node = type_list;
507      for (; type_node; type_node = TREE_CHAIN (type_node))
508	add_type_for_runtime (TREE_VALUE (type_node));
509    }
510
511  c = gen_eh_region (ERT_CATCH, t->outer);
512  c->u.catch.type_list = type_list;
513  l = t->u.try.last_catch;
514  c->u.catch.prev_catch = l;
515  if (l)
516    l->u.catch.next_catch = c;
517  else
518    t->u.try.catch = c;
519  t->u.try.last_catch = c;
520
521  return c;
522}
523
524struct eh_region *
525gen_eh_region_allowed (struct eh_region *outer, tree allowed)
526{
527  struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
528  region->u.allowed.type_list = allowed;
529
530  for (; allowed ; allowed = TREE_CHAIN (allowed))
531    add_type_for_runtime (TREE_VALUE (allowed));
532
533  return region;
534}
535
536struct eh_region *
537gen_eh_region_must_not_throw (struct eh_region *outer)
538{
539  return gen_eh_region (ERT_MUST_NOT_THROW, outer);
540}
541
542int
543get_eh_region_number (struct eh_region *region)
544{
545  return region->region_number;
546}
547
548bool
549get_eh_region_may_contain_throw (struct eh_region *region)
550{
551  return region->may_contain_throw;
552}
553
554tree
555get_eh_region_tree_label (struct eh_region *region)
556{
557  return region->tree_label;
558}
559
560void
561set_eh_region_tree_label (struct eh_region *region, tree lab)
562{
563  region->tree_label = lab;
564}
565
566void
567expand_resx_expr (tree exp)
568{
569  int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
570  struct eh_region *reg = VEC_index (eh_region,
571				     cfun->eh->region_array, region_nr);
572
573  gcc_assert (!reg->resume);
574  reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
575  emit_barrier ();
576}
577
578/* Note that the current EH region (if any) may contain a throw, or a
579   call to a function which itself may contain a throw.  */
580
581void
582note_eh_region_may_contain_throw (struct eh_region *region)
583{
584  while (region && !region->may_contain_throw)
585    {
586      region->may_contain_throw = 1;
587      region = region->outer;
588    }
589}
590
591void
592note_current_region_may_contain_throw (void)
593{
594  note_eh_region_may_contain_throw (cfun->eh->cur_region);
595}
596
597
598/* Return an rtl expression for a pointer to the exception object
599   within a handler.  */
600
601rtx
602get_exception_pointer (struct function *fun)
603{
604  rtx exc_ptr = fun->eh->exc_ptr;
605  if (fun == cfun && ! exc_ptr)
606    {
607      exc_ptr = gen_reg_rtx (ptr_mode);
608      fun->eh->exc_ptr = exc_ptr;
609    }
610  return exc_ptr;
611}
612
613/* Return an rtl expression for the exception dispatch filter
614   within a handler.  */
615
616rtx
617get_exception_filter (struct function *fun)
618{
619  rtx filter = fun->eh->filter;
620  if (fun == cfun && ! filter)
621    {
622      filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
623      fun->eh->filter = filter;
624    }
625  return filter;
626}
627
628/* This section is for the exception handling specific optimization pass.  */
629
630/* Random access the exception region tree.  */
631
632void
633collect_eh_region_array (void)
634{
635  struct eh_region *i;
636
637  i = cfun->eh->region_tree;
638  if (! i)
639    return;
640
641  VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
642		 cfun->eh->last_region_number + 1);
643  VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
644
645  while (1)
646    {
647      VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
648
649      /* If there are sub-regions, process them.  */
650      if (i->inner)
651	i = i->inner;
652      /* If there are peers, process them.  */
653      else if (i->next_peer)
654	i = i->next_peer;
655      /* Otherwise, step back up the tree to the next peer.  */
656      else
657	{
658	  do {
659	    i = i->outer;
660	    if (i == NULL)
661	      return;
662	  } while (i->next_peer == NULL);
663	  i = i->next_peer;
664	}
665    }
666}
667
668/* Remove all regions whose labels are not reachable from insns.  */
669
670static void
671remove_unreachable_regions (rtx insns)
672{
673  int i, *uid_region_num;
674  bool *reachable;
675  struct eh_region *r;
676  rtx insn;
677
678  uid_region_num = xcalloc (get_max_uid (), sizeof(int));
679  reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
680
681  for (i = cfun->eh->last_region_number; i > 0; --i)
682    {
683      r = VEC_index (eh_region, cfun->eh->region_array, i);
684      if (!r || r->region_number != i)
685	continue;
686
687      if (r->resume)
688	{
689	  gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
690	  uid_region_num[INSN_UID (r->resume)] = i;
691	}
692      if (r->label)
693	{
694	  gcc_assert (!uid_region_num[INSN_UID (r->label)]);
695	  uid_region_num[INSN_UID (r->label)] = i;
696	}
697    }
698
699  for (insn = insns; insn; insn = NEXT_INSN (insn))
700    reachable[uid_region_num[INSN_UID (insn)]] = true;
701
702  for (i = cfun->eh->last_region_number; i > 0; --i)
703    {
704      r = VEC_index (eh_region, cfun->eh->region_array, i);
705      if (r && r->region_number == i && !reachable[i])
706	{
707	  bool kill_it = true;
708	  switch (r->type)
709	    {
710	    case ERT_THROW:
711	      /* Don't remove ERT_THROW regions if their outer region
712		 is reachable.  */
713	      if (r->outer && reachable[r->outer->region_number])
714		kill_it = false;
715	      break;
716
717	    case ERT_MUST_NOT_THROW:
718	      /* MUST_NOT_THROW regions are implementable solely in the
719		 runtime, but their existence continues to affect calls
720		 within that region.  Never delete them here.  */
721	      kill_it = false;
722	      break;
723
724	    case ERT_TRY:
725	      {
726		/* TRY regions are reachable if any of its CATCH regions
727		   are reachable.  */
728		struct eh_region *c;
729		for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
730		  if (reachable[c->region_number])
731		    {
732		      kill_it = false;
733		      break;
734		    }
735		break;
736	      }
737
738	    default:
739	      break;
740	    }
741
742	  if (kill_it)
743	    remove_eh_handler (r);
744	}
745    }
746
747  free (reachable);
748  free (uid_region_num);
749}
750
751/* Set up EH labels for RTL.  */
752
753void
754convert_from_eh_region_ranges (void)
755{
756  rtx insns = get_insns ();
757  int i, n = cfun->eh->last_region_number;
758
759  /* Most of the work is already done at the tree level.  All we need to
760     do is collect the rtl labels that correspond to the tree labels that
761     collect the rtl labels that correspond to the tree labels
762     we allocated earlier.  */
763  for (i = 1; i <= n; ++i)
764    {
765      struct eh_region *region;
766
767      region = VEC_index (eh_region, cfun->eh->region_array, i);
768      if (region && region->tree_label)
769	region->label = DECL_RTL_IF_SET (region->tree_label);
770    }
771
772  remove_unreachable_regions (insns);
773}
774
775static void
776add_ehl_entry (rtx label, struct eh_region *region)
777{
778  struct ehl_map_entry **slot, *entry;
779
780  LABEL_PRESERVE_P (label) = 1;
781
782  entry = ggc_alloc (sizeof (*entry));
783  entry->label = label;
784  entry->region = region;
785
786  slot = (struct ehl_map_entry **)
787    htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
788
789  /* Before landing pad creation, each exception handler has its own
790     label.  After landing pad creation, the exception handlers may
791     share landing pads.  This is ok, since maybe_remove_eh_handler
792     only requires the 1-1 mapping before landing pad creation.  */
793  gcc_assert (!*slot || cfun->eh->built_landing_pads);
794
795  *slot = entry;
796}
797
798void
799find_exception_handler_labels (void)
800{
801  int i;
802
803  if (cfun->eh->exception_handler_label_map)
804    htab_empty (cfun->eh->exception_handler_label_map);
805  else
806    {
807      /* ??? The expansion factor here (3/2) must be greater than the htab
808	 occupancy factor (4/3) to avoid unnecessary resizing.  */
809      cfun->eh->exception_handler_label_map
810        = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
811			   ehl_hash, ehl_eq, NULL);
812    }
813
814  if (cfun->eh->region_tree == NULL)
815    return;
816
817  for (i = cfun->eh->last_region_number; i > 0; --i)
818    {
819      struct eh_region *region;
820      rtx lab;
821
822      region = VEC_index (eh_region, cfun->eh->region_array, i);
823      if (! region || region->region_number != i)
824	continue;
825      if (cfun->eh->built_landing_pads)
826	lab = region->landing_pad;
827      else
828	lab = region->label;
829
830      if (lab)
831	add_ehl_entry (lab, region);
832    }
833
834  /* For sjlj exceptions, need the return label to remain live until
835     after landing pad generation.  */
836  if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
837    add_ehl_entry (return_label, NULL);
838}
839
840/* Returns true if the current function has exception handling regions.  */
841
842bool
843current_function_has_exception_handlers (void)
844{
845  int i;
846
847  for (i = cfun->eh->last_region_number; i > 0; --i)
848    {
849      struct eh_region *region;
850
851      region = VEC_index (eh_region, cfun->eh->region_array, i);
852      if (region
853	  && region->region_number == i
854	  && region->type != ERT_THROW)
855	return true;
856    }
857
858  return false;
859}
860
861static struct eh_region *
862duplicate_eh_region_1 (struct eh_region *o)
863{
864  struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
865
866  *n = *o;
867
868  n->region_number = o->region_number + cfun->eh->last_region_number;
869  VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
870  gcc_assert (!o->aka);
871
872  return n;
873}
874
875static void
876duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array,
877		       struct eh_region *prev_try)
878{
879  struct eh_region *n = n_array[o->region_number];
880
881  switch (n->type)
882    {
883    case ERT_TRY:
884      if (o->u.try.catch)
885        n->u.try.catch = n_array[o->u.try.catch->region_number];
886      if (o->u.try.last_catch)
887        n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
888      break;
889
890    case ERT_CATCH:
891      if (o->u.catch.next_catch)
892	n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
893      if (o->u.catch.prev_catch)
894	n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
895      break;
896
897    case ERT_CLEANUP:
898      if (o->u.cleanup.prev_try)
899	n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number];
900      else
901        n->u.cleanup.prev_try = prev_try;
902      break;
903
904    default:
905      break;
906    }
907
908  if (o->outer)
909    n->outer = n_array[o->outer->region_number];
910  if (o->inner)
911    n->inner = n_array[o->inner->region_number];
912  if (o->next_peer)
913    n->next_peer = n_array[o->next_peer->region_number];
914}
915
916/* Duplicate the EH regions of IFUN into current function, root the tree in
917   OUTER_REGION and remap labels using MAP callback.  */
918int
919duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
920		      void *data, int outer_region)
921{
922  int ifun_last_region_number = ifun->eh->last_region_number;
923  struct eh_region **n_array, *root, *cur, *prev_try;
924  int i;
925
926  if (ifun_last_region_number == 0 || !ifun->eh->region_tree)
927    return 0;
928
929  n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
930  VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
931		 cfun->eh->last_region_number + 1 + ifun_last_region_number);
932
933  /* We might've created new cfun->eh->region_array so zero out nonexisting region 0.  */
934  VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
935
936  for (i = cfun->eh->last_region_number + 1;
937       i < cfun->eh->last_region_number + 1 + ifun_last_region_number; i++)
938    VEC_replace (eh_region, cfun->eh->region_array, i, 0);
939
940  /* Search for the containing ERT_TRY region to fix up
941     the prev_try short-cuts for ERT_CLEANUP regions.  */
942  prev_try = NULL;
943  if (outer_region > 0)
944    for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region);
945         prev_try && prev_try->type != ERT_TRY;
946	 prev_try = prev_try->outer)
947      ;
948
949  for (i = 1; i <= ifun_last_region_number; ++i)
950    {
951      cur = VEC_index (eh_region, ifun->eh->region_array, i);
952      if (!cur || cur->region_number != i)
953	continue;
954      n_array[i] = duplicate_eh_region_1 (cur);
955      if (cur->tree_label)
956	{
957	  tree newlabel = map (cur->tree_label, data);
958	  n_array[i]->tree_label = newlabel;
959	}
960      else
961	n_array[i]->tree_label = NULL;
962    }
963  for (i = 1; i <= ifun_last_region_number; ++i)
964    {
965      cur = VEC_index (eh_region, ifun->eh->region_array, i);
966      if (!cur || cur->region_number != i)
967	continue;
968      duplicate_eh_region_2 (cur, n_array, prev_try);
969    }
970
971  root = n_array[ifun->eh->region_tree->region_number];
972  gcc_assert (root->outer == NULL);
973  if (outer_region > 0)
974    {
975      struct eh_region *cur
976         = VEC_index (eh_region, cfun->eh->region_array, outer_region);
977      struct eh_region *p = cur->inner;
978
979      if (p)
980	{
981	  while (p->next_peer)
982	    p = p->next_peer;
983	  p->next_peer = root;
984	}
985      else
986        cur->inner = root;
987      for (i = 1; i <= ifun_last_region_number; ++i)
988	if (n_array[i] && n_array[i]->outer == NULL)
989	  n_array[i]->outer = cur;
990    }
991  else
992    {
993      struct eh_region *p = cfun->eh->region_tree;
994      if (p)
995	{
996	  while (p->next_peer)
997	    p = p->next_peer;
998	  p->next_peer = root;
999	}
1000      else
1001        cfun->eh->region_tree = root;
1002    }
1003
1004  free (n_array);
1005
1006  i = cfun->eh->last_region_number;
1007  cfun->eh->last_region_number = i + ifun_last_region_number;
1008
1009  return i;
1010}
1011
1012static int
1013t2r_eq (const void *pentry, const void *pdata)
1014{
1015  tree entry = (tree) pentry;
1016  tree data = (tree) pdata;
1017
1018  return TREE_PURPOSE (entry) == data;
1019}
1020
1021static hashval_t
1022t2r_hash (const void *pentry)
1023{
1024  tree entry = (tree) pentry;
1025  return TREE_HASH (TREE_PURPOSE (entry));
1026}
1027
1028static void
1029add_type_for_runtime (tree type)
1030{
1031  tree *slot;
1032
1033  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1034					    TREE_HASH (type), INSERT);
1035  if (*slot == NULL)
1036    {
1037      tree runtime = (*lang_eh_runtime_type) (type);
1038      *slot = tree_cons (type, runtime, NULL_TREE);
1039    }
1040}
1041
1042static tree
1043lookup_type_for_runtime (tree type)
1044{
1045  tree *slot;
1046
1047  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1048					    TREE_HASH (type), NO_INSERT);
1049
1050  /* We should have always inserted the data earlier.  */
1051  return TREE_VALUE (*slot);
1052}
1053
1054
1055/* Represent an entry in @TTypes for either catch actions
1056   or exception filter actions.  */
1057struct ttypes_filter GTY(())
1058{
1059  tree t;
1060  int filter;
1061};
1062
1063/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1064   (a tree) for a @TTypes type node we are thinking about adding.  */
1065
1066static int
1067ttypes_filter_eq (const void *pentry, const void *pdata)
1068{
1069  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1070  tree data = (tree) pdata;
1071
1072  return entry->t == data;
1073}
1074
1075static hashval_t
1076ttypes_filter_hash (const void *pentry)
1077{
1078  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1079  return TREE_HASH (entry->t);
1080}
1081
1082/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1083   exception specification list we are thinking about adding.  */
1084/* ??? Currently we use the type lists in the order given.  Someone
1085   should put these in some canonical order.  */
1086
1087static int
1088ehspec_filter_eq (const void *pentry, const void *pdata)
1089{
1090  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1091  const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1092
1093  return type_list_equal (entry->t, data->t);
1094}
1095
1096/* Hash function for exception specification lists.  */
1097
1098static hashval_t
1099ehspec_filter_hash (const void *pentry)
1100{
1101  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1102  hashval_t h = 0;
1103  tree list;
1104
1105  for (list = entry->t; list ; list = TREE_CHAIN (list))
1106    h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1107  return h;
1108}
1109
1110/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1111   to speed up the search.  Return the filter value to be used.  */
1112
1113static int
1114add_ttypes_entry (htab_t ttypes_hash, tree type)
1115{
1116  struct ttypes_filter **slot, *n;
1117
1118  slot = (struct ttypes_filter **)
1119    htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1120
1121  if ((n = *slot) == NULL)
1122    {
1123      /* Filter value is a 1 based table index.  */
1124
1125      n = xmalloc (sizeof (*n));
1126      n->t = type;
1127      n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1128      *slot = n;
1129
1130      VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1131    }
1132
1133  return n->filter;
1134}
1135
1136/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1137   to speed up the search.  Return the filter value to be used.  */
1138
1139static int
1140add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1141{
1142  struct ttypes_filter **slot, *n;
1143  struct ttypes_filter dummy;
1144
1145  dummy.t = list;
1146  slot = (struct ttypes_filter **)
1147    htab_find_slot (ehspec_hash, &dummy, INSERT);
1148
1149  if ((n = *slot) == NULL)
1150    {
1151      /* Filter value is a -1 based byte index into a uleb128 buffer.  */
1152
1153      n = xmalloc (sizeof (*n));
1154      n->t = list;
1155      n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1156      *slot = n;
1157
1158      /* Generate a 0 terminated list of filter values.  */
1159      for (; list ; list = TREE_CHAIN (list))
1160	{
1161	  if (targetm.arm_eabi_unwinder)
1162	    VARRAY_PUSH_TREE (cfun->eh->ehspec_data, TREE_VALUE (list));
1163	  else
1164	    {
1165	      /* Look up each type in the list and encode its filter
1166		 value as a uleb128.  */
1167	      push_uleb128 (&cfun->eh->ehspec_data,
1168		  add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1169	    }
1170	}
1171      if (targetm.arm_eabi_unwinder)
1172	VARRAY_PUSH_TREE (cfun->eh->ehspec_data, NULL_TREE);
1173      else
1174	VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1175    }
1176
1177  return n->filter;
1178}
1179
1180/* Generate the action filter values to be used for CATCH and
1181   ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
1182   we use lots of landing pads, and so every type or list can share
1183   the same filter value, which saves table space.  */
1184
1185static void
1186assign_filter_values (void)
1187{
1188  int i;
1189  htab_t ttypes, ehspec;
1190
1191  cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1192  if (targetm.arm_eabi_unwinder)
1193    VARRAY_TREE_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1194  else
1195    VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1196
1197  ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1198  ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1199
1200  for (i = cfun->eh->last_region_number; i > 0; --i)
1201    {
1202      struct eh_region *r;
1203
1204      r = VEC_index (eh_region, cfun->eh->region_array, i);
1205
1206      /* Mind we don't process a region more than once.  */
1207      if (!r || r->region_number != i)
1208	continue;
1209
1210      switch (r->type)
1211	{
1212	case ERT_CATCH:
1213	  /* Whatever type_list is (NULL or true list), we build a list
1214	     of filters for the region.  */
1215	  r->u.catch.filter_list = NULL_TREE;
1216
1217	  if (r->u.catch.type_list != NULL)
1218	    {
1219	      /* Get a filter value for each of the types caught and store
1220		 them in the region's dedicated list.  */
1221	      tree tp_node = r->u.catch.type_list;
1222
1223	      for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1224		{
1225		  int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1226		  tree flt_node = build_int_cst (NULL_TREE, flt);
1227
1228		  r->u.catch.filter_list
1229		    = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1230		}
1231	    }
1232	  else
1233	    {
1234	      /* Get a filter value for the NULL list also since it will need
1235		 an action record anyway.  */
1236	      int flt = add_ttypes_entry (ttypes, NULL);
1237	      tree flt_node = build_int_cst (NULL_TREE, flt);
1238
1239	      r->u.catch.filter_list
1240		= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1241	    }
1242
1243	  break;
1244
1245	case ERT_ALLOWED_EXCEPTIONS:
1246	  r->u.allowed.filter
1247	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1248	  break;
1249
1250	default:
1251	  break;
1252	}
1253    }
1254
1255  htab_delete (ttypes);
1256  htab_delete (ehspec);
1257}
1258
1259/* Emit SEQ into basic block just before INSN (that is assumed to be
1260   first instruction of some existing BB and return the newly
1261   produced block.  */
1262static basic_block
1263emit_to_new_bb_before (rtx seq, rtx insn)
1264{
1265  rtx last;
1266  basic_block bb;
1267  edge e;
1268  edge_iterator ei;
1269
1270  /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1271     call), we don't want it to go into newly created landing pad or other EH
1272     construct.  */
1273  for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1274    if (e->flags & EDGE_FALLTHRU)
1275      force_nonfallthru (e);
1276    else
1277      ei_next (&ei);
1278  last = emit_insn_before (seq, insn);
1279  if (BARRIER_P (last))
1280    last = PREV_INSN (last);
1281  bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1282  update_bb_for_insn (bb);
1283  bb->flags |= BB_SUPERBLOCK;
1284  return bb;
1285}
1286
1287/* Generate the code to actually handle exceptions, which will follow the
1288   landing pads.  */
1289
1290static void
1291build_post_landing_pads (void)
1292{
1293  int i;
1294
1295  for (i = cfun->eh->last_region_number; i > 0; --i)
1296    {
1297      struct eh_region *region;
1298      rtx seq;
1299
1300      region = VEC_index (eh_region, cfun->eh->region_array, i);
1301      /* Mind we don't process a region more than once.  */
1302      if (!region || region->region_number != i)
1303	continue;
1304
1305      switch (region->type)
1306	{
1307	case ERT_TRY:
1308	  /* ??? Collect the set of all non-overlapping catch handlers
1309	       all the way up the chain until blocked by a cleanup.  */
1310	  /* ??? Outer try regions can share landing pads with inner
1311	     try regions if the types are completely non-overlapping,
1312	     and there are no intervening cleanups.  */
1313
1314	  region->post_landing_pad = gen_label_rtx ();
1315
1316	  start_sequence ();
1317
1318	  emit_label (region->post_landing_pad);
1319
1320	  /* ??? It is mighty inconvenient to call back into the
1321	     switch statement generation code in expand_end_case.
1322	     Rapid prototyping sez a sequence of ifs.  */
1323	  {
1324	    struct eh_region *c;
1325	    for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1326	      {
1327		if (c->u.catch.type_list == NULL)
1328		  emit_jump (c->label);
1329		else
1330		  {
1331		    /* Need for one cmp/jump per type caught. Each type
1332		       list entry has a matching entry in the filter list
1333		       (see assign_filter_values).  */
1334		    tree tp_node = c->u.catch.type_list;
1335		    tree flt_node = c->u.catch.filter_list;
1336
1337		    for (; tp_node; )
1338		      {
1339			emit_cmp_and_jump_insns
1340			  (cfun->eh->filter,
1341			   GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1342			   EQ, NULL_RTX,
1343			   targetm.eh_return_filter_mode (), 0, c->label);
1344
1345			tp_node = TREE_CHAIN (tp_node);
1346			flt_node = TREE_CHAIN (flt_node);
1347		      }
1348		  }
1349	      }
1350	  }
1351
1352	  /* We delay the generation of the _Unwind_Resume until we generate
1353	     landing pads.  We emit a marker here so as to get good control
1354	     flow data in the meantime.  */
1355	  region->resume
1356	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1357	  emit_barrier ();
1358
1359	  seq = get_insns ();
1360	  end_sequence ();
1361
1362	  emit_to_new_bb_before (seq, region->u.try.catch->label);
1363
1364	  break;
1365
1366	case ERT_ALLOWED_EXCEPTIONS:
1367	  region->post_landing_pad = gen_label_rtx ();
1368
1369	  start_sequence ();
1370
1371	  emit_label (region->post_landing_pad);
1372
1373	  emit_cmp_and_jump_insns (cfun->eh->filter,
1374				   GEN_INT (region->u.allowed.filter),
1375				   EQ, NULL_RTX,
1376				   targetm.eh_return_filter_mode (), 0, region->label);
1377
1378	  /* We delay the generation of the _Unwind_Resume until we generate
1379	     landing pads.  We emit a marker here so as to get good control
1380	     flow data in the meantime.  */
1381	  region->resume
1382	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1383	  emit_barrier ();
1384
1385	  seq = get_insns ();
1386	  end_sequence ();
1387
1388	  emit_to_new_bb_before (seq, region->label);
1389	  break;
1390
1391	case ERT_CLEANUP:
1392	case ERT_MUST_NOT_THROW:
1393	  region->post_landing_pad = region->label;
1394	  break;
1395
1396	case ERT_CATCH:
1397	case ERT_THROW:
1398	  /* Nothing to do.  */
1399	  break;
1400
1401	default:
1402	  gcc_unreachable ();
1403	}
1404    }
1405}
1406
1407/* Replace RESX patterns with jumps to the next handler if any, or calls to
1408   _Unwind_Resume otherwise.  */
1409
1410static void
1411connect_post_landing_pads (void)
1412{
1413  int i;
1414
1415  for (i = cfun->eh->last_region_number; i > 0; --i)
1416    {
1417      struct eh_region *region;
1418      struct eh_region *outer;
1419      rtx seq;
1420      rtx barrier;
1421
1422      region = VEC_index (eh_region, cfun->eh->region_array, i);
1423      /* Mind we don't process a region more than once.  */
1424      if (!region || region->region_number != i)
1425	continue;
1426
1427      /* If there is no RESX, or it has been deleted by flow, there's
1428	 nothing to fix up.  */
1429      if (! region->resume || INSN_DELETED_P (region->resume))
1430	continue;
1431
1432      /* Search for another landing pad in this function.  */
1433      for (outer = region->outer; outer ; outer = outer->outer)
1434	if (outer->post_landing_pad)
1435	  break;
1436
1437      start_sequence ();
1438
1439      if (outer)
1440	{
1441	  edge e;
1442	  basic_block src, dest;
1443
1444	  emit_jump (outer->post_landing_pad);
1445	  src = BLOCK_FOR_INSN (region->resume);
1446	  dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1447	  while (EDGE_COUNT (src->succs) > 0)
1448	    remove_edge (EDGE_SUCC (src, 0));
1449	  e = make_edge (src, dest, 0);
1450	  e->probability = REG_BR_PROB_BASE;
1451	  e->count = src->count;
1452	}
1453      else
1454	{
1455	  emit_library_call (unwind_resume_libfunc, LCT_THROW,
1456			     VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1457
1458	  /* What we just emitted was a throwing libcall, so it got a
1459	     barrier automatically added after it.  If the last insn in
1460	     the libcall sequence isn't the barrier, it's because the
1461	     target emits multiple insns for a call, and there are insns
1462	     after the actual call insn (which are redundant and would be
1463	     optimized away).  The barrier is inserted exactly after the
1464	     call insn, so let's go get that and delete the insns after
1465	     it, because below we need the barrier to be the last insn in
1466	     the sequence.  */
1467	  delete_insns_since (NEXT_INSN (last_call_insn ()));
1468	}
1469
1470      seq = get_insns ();
1471      end_sequence ();
1472      barrier = emit_insn_before (seq, region->resume);
1473      /* Avoid duplicate barrier.  */
1474      gcc_assert (BARRIER_P (barrier));
1475      delete_insn (barrier);
1476      delete_insn (region->resume);
1477
1478      /* ??? From tree-ssa we can wind up with catch regions whose
1479	 label is not instantiated, but whose resx is present.  Now
1480	 that we've dealt with the resx, kill the region.  */
1481      if (region->label == NULL && region->type == ERT_CLEANUP)
1482	remove_eh_handler (region);
1483    }
1484}
1485
1486
1487static void
1488dw2_build_landing_pads (void)
1489{
1490  int i;
1491  unsigned int j;
1492
1493  for (i = cfun->eh->last_region_number; i > 0; --i)
1494    {
1495      struct eh_region *region;
1496      rtx seq;
1497      basic_block bb;
1498      bool clobbers_hard_regs = false;
1499      edge e;
1500
1501      region = VEC_index (eh_region, cfun->eh->region_array, i);
1502      /* Mind we don't process a region more than once.  */
1503      if (!region || region->region_number != i)
1504	continue;
1505
1506      if (region->type != ERT_CLEANUP
1507	  && region->type != ERT_TRY
1508	  && region->type != ERT_ALLOWED_EXCEPTIONS)
1509	continue;
1510
1511      start_sequence ();
1512
1513      region->landing_pad = gen_label_rtx ();
1514      emit_label (region->landing_pad);
1515
1516#ifdef HAVE_exception_receiver
1517      if (HAVE_exception_receiver)
1518	emit_insn (gen_exception_receiver ());
1519      else
1520#endif
1521#ifdef HAVE_nonlocal_goto_receiver
1522	if (HAVE_nonlocal_goto_receiver)
1523	  emit_insn (gen_nonlocal_goto_receiver ());
1524	else
1525#endif
1526	  { /* Nothing */ }
1527
1528      /* If the eh_return data registers are call-saved, then we
1529	 won't have considered them clobbered from the call that
1530	 threw.  Kill them now.  */
1531      for (j = 0; ; ++j)
1532	{
1533	  unsigned r = EH_RETURN_DATA_REGNO (j);
1534	  if (r == INVALID_REGNUM)
1535	    break;
1536	  if (! call_used_regs[r])
1537	    {
1538	      emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1539	      clobbers_hard_regs = true;
1540	    }
1541	}
1542
1543      if (clobbers_hard_regs)
1544	{
1545	  /* @@@ This is a kludge.  Not all machine descriptions define a
1546	     blockage insn, but we must not allow the code we just generated
1547	     to be reordered by scheduling.  So emit an ASM_INPUT to act as
1548	     blockage insn.  */
1549	  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1550	}
1551
1552      emit_move_insn (cfun->eh->exc_ptr,
1553		      gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1554      emit_move_insn (cfun->eh->filter,
1555		      gen_rtx_REG (targetm.eh_return_filter_mode (),
1556				   EH_RETURN_DATA_REGNO (1)));
1557
1558      seq = get_insns ();
1559      end_sequence ();
1560
1561      bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1562      e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1563      e->count = bb->count;
1564      e->probability = REG_BR_PROB_BASE;
1565    }
1566}
1567
1568
1569struct sjlj_lp_info
1570{
1571  int directly_reachable;
1572  int action_index;
1573  int dispatch_index;
1574  int call_site_index;
1575};
1576
1577static bool
1578sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1579{
1580  rtx insn;
1581  bool found_one = false;
1582
1583  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1584    {
1585      struct eh_region *region;
1586      enum reachable_code rc;
1587      tree type_thrown;
1588      rtx note;
1589
1590      if (! INSN_P (insn))
1591	continue;
1592
1593      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1594      if (!note || INTVAL (XEXP (note, 0)) <= 0)
1595	continue;
1596
1597      region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1598
1599      type_thrown = NULL_TREE;
1600      if (region->type == ERT_THROW)
1601	{
1602	  type_thrown = region->u.throw.type;
1603	  region = region->outer;
1604	}
1605
1606      /* Find the first containing region that might handle the exception.
1607	 That's the landing pad to which we will transfer control.  */
1608      rc = RNL_NOT_CAUGHT;
1609      for (; region; region = region->outer)
1610	{
1611	  rc = reachable_next_level (region, type_thrown, NULL);
1612	  if (rc != RNL_NOT_CAUGHT)
1613	    break;
1614	}
1615      if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1616	{
1617	  lp_info[region->region_number].directly_reachable = 1;
1618	  found_one = true;
1619	}
1620    }
1621
1622  return found_one;
1623}
1624
1625static void
1626sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1627{
1628  htab_t ar_hash;
1629  int i, index;
1630
1631  /* First task: build the action table.  */
1632
1633  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1634  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1635
1636  for (i = cfun->eh->last_region_number; i > 0; --i)
1637    if (lp_info[i].directly_reachable)
1638      {
1639	struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1640
1641	r->landing_pad = dispatch_label;
1642	lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1643	if (lp_info[i].action_index != -1)
1644	  cfun->uses_eh_lsda = 1;
1645      }
1646
1647  htab_delete (ar_hash);
1648
1649  /* Next: assign dispatch values.  In dwarf2 terms, this would be the
1650     landing pad label for the region.  For sjlj though, there is one
1651     common landing pad from which we dispatch to the post-landing pads.
1652
1653     A region receives a dispatch index if it is directly reachable
1654     and requires in-function processing.  Regions that share post-landing
1655     pads may share dispatch indices.  */
1656  /* ??? Post-landing pad sharing doesn't actually happen at the moment
1657     (see build_post_landing_pads) so we don't bother checking for it.  */
1658
1659  index = 0;
1660  for (i = cfun->eh->last_region_number; i > 0; --i)
1661    if (lp_info[i].directly_reachable)
1662      lp_info[i].dispatch_index = index++;
1663
1664  /* Finally: assign call-site values.  If dwarf2 terms, this would be
1665     the region number assigned by convert_to_eh_region_ranges, but
1666     handles no-action and must-not-throw differently.  */
1667
1668  call_site_base = 1;
1669  for (i = cfun->eh->last_region_number; i > 0; --i)
1670    if (lp_info[i].directly_reachable)
1671      {
1672	int action = lp_info[i].action_index;
1673
1674	/* Map must-not-throw to otherwise unused call-site index 0.  */
1675	if (action == -2)
1676	  index = 0;
1677	/* Map no-action to otherwise unused call-site index -1.  */
1678	else if (action == -1)
1679	  index = -1;
1680	/* Otherwise, look it up in the table.  */
1681	else
1682	  index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1683
1684	lp_info[i].call_site_index = index;
1685      }
1686}
1687
1688static void
1689sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1690{
1691  int last_call_site = -2;
1692  rtx insn, mem;
1693
1694  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1695    {
1696      struct eh_region *region;
1697      int this_call_site;
1698      rtx note, before, p;
1699
1700      /* Reset value tracking at extended basic block boundaries.  */
1701      if (LABEL_P (insn))
1702	last_call_site = -2;
1703
1704      if (! INSN_P (insn))
1705	continue;
1706
1707      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1708      if (!note)
1709	{
1710	  /* Calls (and trapping insns) without notes are outside any
1711	     exception handling region in this function.  Mark them as
1712	     no action.  */
1713	  if (CALL_P (insn)
1714	      || (flag_non_call_exceptions
1715		  && may_trap_p (PATTERN (insn))))
1716	    this_call_site = -1;
1717	  else
1718	    continue;
1719	}
1720      else
1721	{
1722	  /* Calls that are known to not throw need not be marked.  */
1723	  if (INTVAL (XEXP (note, 0)) <= 0)
1724	    continue;
1725
1726	  region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1727	  this_call_site = lp_info[region->region_number].call_site_index;
1728	}
1729
1730      if (this_call_site == last_call_site)
1731	continue;
1732
1733      /* Don't separate a call from it's argument loads.  */
1734      before = insn;
1735      if (CALL_P (insn))
1736	before = find_first_parameter_load (insn, NULL_RTX);
1737
1738      start_sequence ();
1739      mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1740			    sjlj_fc_call_site_ofs);
1741      emit_move_insn (mem, GEN_INT (this_call_site));
1742      p = get_insns ();
1743      end_sequence ();
1744
1745      emit_insn_before (p, before);
1746      last_call_site = this_call_site;
1747    }
1748}
1749
1750/* Construct the SjLj_Function_Context.  */
1751
1752static void
1753sjlj_emit_function_enter (rtx dispatch_label)
1754{
1755  rtx fn_begin, fc, mem, seq;
1756  bool fn_begin_outside_block;
1757
1758  fc = cfun->eh->sjlj_fc;
1759
1760  start_sequence ();
1761
1762  /* We're storing this libcall's address into memory instead of
1763     calling it directly.  Thus, we must call assemble_external_libcall
1764     here, as we can not depend on emit_library_call to do it for us.  */
1765  assemble_external_libcall (eh_personality_libfunc);
1766  mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1767  emit_move_insn (mem, eh_personality_libfunc);
1768
1769  mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1770  if (cfun->uses_eh_lsda)
1771    {
1772      char buf[20];
1773      rtx sym;
1774
1775      ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1776      sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1777      SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1778      emit_move_insn (mem, sym);
1779    }
1780  else
1781    emit_move_insn (mem, const0_rtx);
1782
1783#ifdef DONT_USE_BUILTIN_SETJMP
1784  {
1785    rtx x, note;
1786    x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1787				 TYPE_MODE (integer_type_node), 1,
1788				 plus_constant (XEXP (fc, 0),
1789						sjlj_fc_jbuf_ofs), Pmode);
1790
1791    note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1792    NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1793
1794    emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1795			     TYPE_MODE (integer_type_node), 0, dispatch_label);
1796  }
1797#else
1798  expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1799			       dispatch_label);
1800#endif
1801
1802  emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1803		     1, XEXP (fc, 0), Pmode);
1804
1805  seq = get_insns ();
1806  end_sequence ();
1807
1808  /* ??? Instead of doing this at the beginning of the function,
1809     do this in a block that is at loop level 0 and dominates all
1810     can_throw_internal instructions.  */
1811
1812  fn_begin_outside_block = true;
1813  for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1814    if (NOTE_P (fn_begin))
1815      {
1816	if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1817	  break;
1818	else if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK)
1819	  fn_begin_outside_block = false;
1820      }
1821
1822  if (fn_begin_outside_block)
1823    insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1824  else
1825    emit_insn_after (seq, fn_begin);
1826}
1827
1828/* Call back from expand_function_end to know where we should put
1829   the call to unwind_sjlj_unregister_libfunc if needed.  */
1830
1831void
1832sjlj_emit_function_exit_after (rtx after)
1833{
1834  cfun->eh->sjlj_exit_after = after;
1835}
1836
1837static void
1838sjlj_emit_function_exit (void)
1839{
1840  rtx seq;
1841  edge e;
1842  edge_iterator ei;
1843
1844  start_sequence ();
1845
1846  emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1847		     1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1848
1849  seq = get_insns ();
1850  end_sequence ();
1851
1852  /* ??? Really this can be done in any block at loop level 0 that
1853     post-dominates all can_throw_internal instructions.  This is
1854     the last possible moment.  */
1855
1856  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1857    if (e->flags & EDGE_FALLTHRU)
1858      break;
1859  if (e)
1860    {
1861      rtx insn;
1862
1863      /* Figure out whether the place we are supposed to insert libcall
1864         is inside the last basic block or after it.  In the other case
1865         we need to emit to edge.  */
1866      gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1867      for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1868	{
1869	  if (insn == cfun->eh->sjlj_exit_after)
1870	    {
1871	      if (LABEL_P (insn))
1872		insn = NEXT_INSN (insn);
1873	      emit_insn_after (seq, insn);
1874	      return;
1875	    }
1876	  if (insn == BB_END (e->src))
1877	    break;
1878	}
1879      insert_insn_on_edge (seq, e);
1880    }
1881}
1882
1883static void
1884sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1885{
1886  int i, first_reachable;
1887  rtx mem, dispatch, seq, fc;
1888  rtx before;
1889  basic_block bb;
1890  edge e;
1891
1892  fc = cfun->eh->sjlj_fc;
1893
1894  start_sequence ();
1895
1896  emit_label (dispatch_label);
1897
1898#ifndef DONT_USE_BUILTIN_SETJMP
1899  expand_builtin_setjmp_receiver (dispatch_label);
1900#endif
1901
1902  /* Load up dispatch index, exc_ptr and filter values from the
1903     function context.  */
1904  mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1905			sjlj_fc_call_site_ofs);
1906  dispatch = copy_to_reg (mem);
1907
1908  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1909  if (word_mode != ptr_mode)
1910    {
1911#ifdef POINTERS_EXTEND_UNSIGNED
1912      mem = convert_memory_address (ptr_mode, mem);
1913#else
1914      mem = convert_to_mode (ptr_mode, mem, 0);
1915#endif
1916    }
1917  emit_move_insn (cfun->eh->exc_ptr, mem);
1918
1919  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
1920  emit_move_insn (cfun->eh->filter, mem);
1921
1922  /* Jump to one of the directly reachable regions.  */
1923  /* ??? This really ought to be using a switch statement.  */
1924
1925  first_reachable = 0;
1926  for (i = cfun->eh->last_region_number; i > 0; --i)
1927    {
1928      if (! lp_info[i].directly_reachable)
1929	continue;
1930
1931      if (! first_reachable)
1932	{
1933	  first_reachable = i;
1934	  continue;
1935	}
1936
1937      emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1938			       EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
1939	                       ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
1940				->post_landing_pad);
1941    }
1942
1943  seq = get_insns ();
1944  end_sequence ();
1945
1946  before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
1947	    ->post_landing_pad);
1948
1949  bb = emit_to_new_bb_before (seq, before);
1950  e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1951  e->count = bb->count;
1952  e->probability = REG_BR_PROB_BASE;
1953}
1954
1955static void
1956sjlj_build_landing_pads (void)
1957{
1958  struct sjlj_lp_info *lp_info;
1959
1960  lp_info = xcalloc (cfun->eh->last_region_number + 1,
1961		     sizeof (struct sjlj_lp_info));
1962
1963  if (sjlj_find_directly_reachable_regions (lp_info))
1964    {
1965      rtx dispatch_label = gen_label_rtx ();
1966
1967      cfun->eh->sjlj_fc
1968	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1969			      int_size_in_bytes (sjlj_fc_type_node),
1970			      TYPE_ALIGN (sjlj_fc_type_node));
1971
1972      sjlj_assign_call_site_values (dispatch_label, lp_info);
1973      sjlj_mark_call_sites (lp_info);
1974
1975      sjlj_emit_function_enter (dispatch_label);
1976      sjlj_emit_dispatch_table (dispatch_label, lp_info);
1977      sjlj_emit_function_exit ();
1978    }
1979
1980  free (lp_info);
1981}
1982
1983void
1984finish_eh_generation (void)
1985{
1986  basic_block bb;
1987
1988  /* Nothing to do if no regions created.  */
1989  if (cfun->eh->region_tree == NULL)
1990    return;
1991
1992  /* The object here is to provide find_basic_blocks with detailed
1993     information (via reachable_handlers) on how exception control
1994     flows within the function.  In this first pass, we can include
1995     type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1996     regions, and hope that it will be useful in deleting unreachable
1997     handlers.  Subsequently, we will generate landing pads which will
1998     connect many of the handlers, and then type information will not
1999     be effective.  Still, this is a win over previous implementations.  */
2000
2001  /* These registers are used by the landing pads.  Make sure they
2002     have been generated.  */
2003  get_exception_pointer (cfun);
2004  get_exception_filter (cfun);
2005
2006  /* Construct the landing pads.  */
2007
2008  assign_filter_values ();
2009  build_post_landing_pads ();
2010  connect_post_landing_pads ();
2011  if (USING_SJLJ_EXCEPTIONS)
2012    sjlj_build_landing_pads ();
2013  else
2014    dw2_build_landing_pads ();
2015
2016  cfun->eh->built_landing_pads = 1;
2017
2018  /* We've totally changed the CFG.  Start over.  */
2019  find_exception_handler_labels ();
2020  break_superblocks ();
2021  if (USING_SJLJ_EXCEPTIONS)
2022    commit_edge_insertions ();
2023  FOR_EACH_BB (bb)
2024    {
2025      edge e;
2026      edge_iterator ei;
2027      bool eh = false;
2028      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2029	{
2030	  if (e->flags & EDGE_EH)
2031	    {
2032	      remove_edge (e);
2033	      eh = true;
2034	    }
2035	  else
2036	    ei_next (&ei);
2037	}
2038      if (eh)
2039	rtl_make_eh_edge (NULL, bb, BB_END (bb));
2040    }
2041}
2042
2043static hashval_t
2044ehl_hash (const void *pentry)
2045{
2046  struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2047
2048  /* 2^32 * ((sqrt(5) - 1) / 2) */
2049  const hashval_t scaled_golden_ratio = 0x9e3779b9;
2050  return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2051}
2052
2053static int
2054ehl_eq (const void *pentry, const void *pdata)
2055{
2056  struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2057  struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2058
2059  return entry->label == data->label;
2060}
2061
2062/* This section handles removing dead code for flow.  */
2063
2064/* Remove LABEL from exception_handler_label_map.  */
2065
2066static void
2067remove_exception_handler_label (rtx label)
2068{
2069  struct ehl_map_entry **slot, tmp;
2070
2071  /* If exception_handler_label_map was not built yet,
2072     there is nothing to do.  */
2073  if (cfun->eh->exception_handler_label_map == NULL)
2074    return;
2075
2076  tmp.label = label;
2077  slot = (struct ehl_map_entry **)
2078    htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2079  gcc_assert (slot);
2080
2081  htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2082}
2083
2084/* Splice REGION from the region tree etc.  */
2085
2086static void
2087remove_eh_handler (struct eh_region *region)
2088{
2089  struct eh_region **pp, **pp_start, *p, *outer, *inner;
2090  rtx lab;
2091
2092  /* For the benefit of efficiently handling REG_EH_REGION notes,
2093     replace this region in the region array with its containing
2094     region.  Note that previous region deletions may result in
2095     multiple copies of this region in the array, so we have a
2096     list of alternate numbers by which we are known.  */
2097
2098  outer = region->outer;
2099  VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2100  if (region->aka)
2101    {
2102      unsigned i;
2103      bitmap_iterator bi;
2104
2105      EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2106	{
2107          VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2108	}
2109    }
2110
2111  if (outer)
2112    {
2113      if (!outer->aka)
2114        outer->aka = BITMAP_GGC_ALLOC ();
2115      if (region->aka)
2116	bitmap_ior_into (outer->aka, region->aka);
2117      bitmap_set_bit (outer->aka, region->region_number);
2118    }
2119
2120  if (cfun->eh->built_landing_pads)
2121    lab = region->landing_pad;
2122  else
2123    lab = region->label;
2124  if (lab)
2125    remove_exception_handler_label (lab);
2126
2127  if (outer)
2128    pp_start = &outer->inner;
2129  else
2130    pp_start = &cfun->eh->region_tree;
2131  for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2132    continue;
2133  *pp = region->next_peer;
2134
2135  inner = region->inner;
2136  if (inner)
2137    {
2138      for (p = inner; p->next_peer ; p = p->next_peer)
2139	p->outer = outer;
2140      p->outer = outer;
2141
2142      p->next_peer = *pp_start;
2143      *pp_start = inner;
2144    }
2145
2146  if (region->type == ERT_CATCH)
2147    {
2148      struct eh_region *try, *next, *prev;
2149
2150      for (try = region->next_peer;
2151	   try->type == ERT_CATCH;
2152	   try = try->next_peer)
2153	continue;
2154      gcc_assert (try->type == ERT_TRY);
2155
2156      next = region->u.catch.next_catch;
2157      prev = region->u.catch.prev_catch;
2158
2159      if (next)
2160	next->u.catch.prev_catch = prev;
2161      else
2162	try->u.try.last_catch = prev;
2163      if (prev)
2164	prev->u.catch.next_catch = next;
2165      else
2166	{
2167	  try->u.try.catch = next;
2168	  if (! next)
2169	    remove_eh_handler (try);
2170	}
2171    }
2172}
2173
2174/* LABEL heads a basic block that is about to be deleted.  If this
2175   label corresponds to an exception region, we may be able to
2176   delete the region.  */
2177
2178void
2179maybe_remove_eh_handler (rtx label)
2180{
2181  struct ehl_map_entry **slot, tmp;
2182  struct eh_region *region;
2183
2184  /* ??? After generating landing pads, it's not so simple to determine
2185     if the region data is completely unused.  One must examine the
2186     landing pad and the post landing pad, and whether an inner try block
2187     is referencing the catch handlers directly.  */
2188  if (cfun->eh->built_landing_pads)
2189    return;
2190
2191  tmp.label = label;
2192  slot = (struct ehl_map_entry **)
2193    htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2194  if (! slot)
2195    return;
2196  region = (*slot)->region;
2197  if (! region)
2198    return;
2199
2200  /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2201     because there is no path to the fallback call to terminate.
2202     But the region continues to affect call-site data until there
2203     are no more contained calls, which we don't see here.  */
2204  if (region->type == ERT_MUST_NOT_THROW)
2205    {
2206      htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2207      region->label = NULL_RTX;
2208    }
2209  else
2210    remove_eh_handler (region);
2211}
2212
2213/* Invokes CALLBACK for every exception handler label.  Only used by old
2214   loop hackery; should not be used by new code.  */
2215
2216void
2217for_each_eh_label (void (*callback) (rtx))
2218{
2219  htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2220		 (void *) &callback);
2221}
2222
2223static int
2224for_each_eh_label_1 (void **pentry, void *data)
2225{
2226  struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2227  void (*callback) (rtx) = *(void (**) (rtx)) data;
2228
2229  (*callback) (entry->label);
2230  return 1;
2231}
2232
2233/* Invoke CALLBACK for every exception region in the current function.  */
2234
2235void
2236for_each_eh_region (void (*callback) (struct eh_region *))
2237{
2238  int i, n = cfun->eh->last_region_number;
2239  for (i = 1; i <= n; ++i)
2240    {
2241      struct eh_region *region;
2242
2243      region = VEC_index (eh_region, cfun->eh->region_array, i);
2244      if (region)
2245	(*callback) (region);
2246    }
2247}
2248
2249/* This section describes CFG exception edges for flow.  */
2250
2251/* For communicating between calls to reachable_next_level.  */
2252struct reachable_info
2253{
2254  tree types_caught;
2255  tree types_allowed;
2256  void (*callback) (struct eh_region *, void *);
2257  void *callback_data;
2258  bool saw_any_handlers;
2259};
2260
2261/* A subroutine of reachable_next_level.  Return true if TYPE, or a
2262   base class of TYPE, is in HANDLED.  */
2263
2264static int
2265check_handled (tree handled, tree type)
2266{
2267  tree t;
2268
2269  /* We can check for exact matches without front-end help.  */
2270  if (! lang_eh_type_covers)
2271    {
2272      for (t = handled; t ; t = TREE_CHAIN (t))
2273	if (TREE_VALUE (t) == type)
2274	  return 1;
2275    }
2276  else
2277    {
2278      for (t = handled; t ; t = TREE_CHAIN (t))
2279	if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2280	  return 1;
2281    }
2282
2283  return 0;
2284}
2285
2286/* A subroutine of reachable_next_level.  If we are collecting a list
2287   of handlers, add one.  After landing pad generation, reference
2288   it instead of the handlers themselves.  Further, the handlers are
2289   all wired together, so by referencing one, we've got them all.
2290   Before landing pad generation we reference each handler individually.
2291
2292   LP_REGION contains the landing pad; REGION is the handler.  */
2293
2294static void
2295add_reachable_handler (struct reachable_info *info,
2296		       struct eh_region *lp_region, struct eh_region *region)
2297{
2298  if (! info)
2299    return;
2300
2301  info->saw_any_handlers = true;
2302
2303  if (cfun->eh->built_landing_pads)
2304    info->callback (lp_region, info->callback_data);
2305  else
2306    info->callback (region, info->callback_data);
2307}
2308
2309/* Process one level of exception regions for reachability.
2310   If TYPE_THROWN is non-null, then it is the *exact* type being
2311   propagated.  If INFO is non-null, then collect handler labels
2312   and caught/allowed type information between invocations.  */
2313
2314static enum reachable_code
2315reachable_next_level (struct eh_region *region, tree type_thrown,
2316		      struct reachable_info *info)
2317{
2318  switch (region->type)
2319    {
2320    case ERT_CLEANUP:
2321      /* Before landing-pad generation, we model control flow
2322	 directly to the individual handlers.  In this way we can
2323	 see that catch handler types may shadow one another.  */
2324      add_reachable_handler (info, region, region);
2325      return RNL_MAYBE_CAUGHT;
2326
2327    case ERT_TRY:
2328      {
2329	struct eh_region *c;
2330	enum reachable_code ret = RNL_NOT_CAUGHT;
2331
2332	for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2333	  {
2334	    /* A catch-all handler ends the search.  */
2335	    if (c->u.catch.type_list == NULL)
2336	      {
2337		add_reachable_handler (info, region, c);
2338		return RNL_CAUGHT;
2339	      }
2340
2341	    if (type_thrown)
2342	      {
2343		/* If we have at least one type match, end the search.  */
2344		tree tp_node = c->u.catch.type_list;
2345
2346		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2347		  {
2348		    tree type = TREE_VALUE (tp_node);
2349
2350		    if (type == type_thrown
2351			|| (lang_eh_type_covers
2352			    && (*lang_eh_type_covers) (type, type_thrown)))
2353		      {
2354			add_reachable_handler (info, region, c);
2355			return RNL_CAUGHT;
2356		      }
2357		  }
2358
2359		/* If we have definitive information of a match failure,
2360		   the catch won't trigger.  */
2361		if (lang_eh_type_covers)
2362		  return RNL_NOT_CAUGHT;
2363	      }
2364
2365	    /* At this point, we either don't know what type is thrown or
2366	       don't have front-end assistance to help deciding if it is
2367	       covered by one of the types in the list for this region.
2368
2369	       We'd then like to add this region to the list of reachable
2370	       handlers since it is indeed potentially reachable based on the
2371	       information we have.
2372
2373	       Actually, this handler is for sure not reachable if all the
2374	       types it matches have already been caught. That is, it is only
2375	       potentially reachable if at least one of the types it catches
2376	       has not been previously caught.  */
2377
2378	    if (! info)
2379	      ret = RNL_MAYBE_CAUGHT;
2380	    else
2381	      {
2382		tree tp_node = c->u.catch.type_list;
2383		bool maybe_reachable = false;
2384
2385		/* Compute the potential reachability of this handler and
2386		   update the list of types caught at the same time.  */
2387		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2388		  {
2389		    tree type = TREE_VALUE (tp_node);
2390
2391		    if (! check_handled (info->types_caught, type))
2392		      {
2393			info->types_caught
2394			  = tree_cons (NULL, type, info->types_caught);
2395
2396			maybe_reachable = true;
2397		      }
2398		  }
2399
2400		if (maybe_reachable)
2401		  {
2402		    add_reachable_handler (info, region, c);
2403
2404		    /* ??? If the catch type is a base class of every allowed
2405		       type, then we know we can stop the search.  */
2406		    ret = RNL_MAYBE_CAUGHT;
2407		  }
2408	      }
2409	  }
2410
2411	return ret;
2412      }
2413
2414    case ERT_ALLOWED_EXCEPTIONS:
2415      /* An empty list of types definitely ends the search.  */
2416      if (region->u.allowed.type_list == NULL_TREE)
2417	{
2418	  add_reachable_handler (info, region, region);
2419	  return RNL_CAUGHT;
2420	}
2421
2422      /* Collect a list of lists of allowed types for use in detecting
2423	 when a catch may be transformed into a catch-all.  */
2424      if (info)
2425	info->types_allowed = tree_cons (NULL_TREE,
2426					 region->u.allowed.type_list,
2427					 info->types_allowed);
2428
2429      /* If we have definitive information about the type hierarchy,
2430	 then we can tell if the thrown type will pass through the
2431	 filter.  */
2432      if (type_thrown && lang_eh_type_covers)
2433	{
2434	  if (check_handled (region->u.allowed.type_list, type_thrown))
2435	    return RNL_NOT_CAUGHT;
2436	  else
2437	    {
2438	      add_reachable_handler (info, region, region);
2439	      return RNL_CAUGHT;
2440	    }
2441	}
2442
2443      add_reachable_handler (info, region, region);
2444      return RNL_MAYBE_CAUGHT;
2445
2446    case ERT_CATCH:
2447      /* Catch regions are handled by their controlling try region.  */
2448      return RNL_NOT_CAUGHT;
2449
2450    case ERT_MUST_NOT_THROW:
2451      /* Here we end our search, since no exceptions may propagate.
2452	 If we've touched down at some landing pad previous, then the
2453	 explicit function call we generated may be used.  Otherwise
2454	 the call is made by the runtime.
2455
2456         Before inlining, do not perform this optimization.  We may
2457	 inline a subroutine that contains handlers, and that will
2458	 change the value of saw_any_handlers.  */
2459
2460      if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2461	{
2462	  add_reachable_handler (info, region, region);
2463	  return RNL_CAUGHT;
2464	}
2465      else
2466	return RNL_BLOCKED;
2467
2468    case ERT_THROW:
2469    case ERT_UNKNOWN:
2470      /* Shouldn't see these here.  */
2471      gcc_unreachable ();
2472      break;
2473    default:
2474      gcc_unreachable ();
2475    }
2476}
2477
2478/* Invoke CALLBACK on each region reachable from REGION_NUMBER.  */
2479
2480void
2481foreach_reachable_handler (int region_number, bool is_resx,
2482			   void (*callback) (struct eh_region *, void *),
2483			   void *callback_data)
2484{
2485  struct reachable_info info;
2486  struct eh_region *region;
2487  tree type_thrown;
2488
2489  memset (&info, 0, sizeof (info));
2490  info.callback = callback;
2491  info.callback_data = callback_data;
2492
2493  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2494
2495  type_thrown = NULL_TREE;
2496  if (is_resx)
2497    {
2498      /* A RESX leaves a region instead of entering it.  Thus the
2499	 region itself may have been deleted out from under us.  */
2500      if (region == NULL)
2501	return;
2502      region = region->outer;
2503    }
2504  else if (region->type == ERT_THROW)
2505    {
2506      type_thrown = region->u.throw.type;
2507      region = region->outer;
2508    }
2509
2510  while (region)
2511    {
2512      if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2513	break;
2514      /* If we have processed one cleanup, there is no point in
2515	 processing any more of them.  Each cleanup will have an edge
2516	 to the next outer cleanup region, so the flow graph will be
2517	 accurate.  */
2518      if (region->type == ERT_CLEANUP)
2519	region = region->u.cleanup.prev_try;
2520      else
2521	region = region->outer;
2522    }
2523}
2524
2525/* Retrieve a list of labels of exception handlers which can be
2526   reached by a given insn.  */
2527
2528static void
2529arh_to_landing_pad (struct eh_region *region, void *data)
2530{
2531  rtx *p_handlers = data;
2532  if (! *p_handlers)
2533    *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2534}
2535
2536static void
2537arh_to_label (struct eh_region *region, void *data)
2538{
2539  rtx *p_handlers = data;
2540  *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2541}
2542
2543rtx
2544reachable_handlers (rtx insn)
2545{
2546  bool is_resx = false;
2547  rtx handlers = NULL;
2548  int region_number;
2549
2550  if (JUMP_P (insn)
2551      && GET_CODE (PATTERN (insn)) == RESX)
2552    {
2553      region_number = XINT (PATTERN (insn), 0);
2554      is_resx = true;
2555    }
2556  else
2557    {
2558      rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2559      if (!note || INTVAL (XEXP (note, 0)) <= 0)
2560	return NULL;
2561      region_number = INTVAL (XEXP (note, 0));
2562    }
2563
2564  foreach_reachable_handler (region_number, is_resx,
2565			     (cfun->eh->built_landing_pads
2566			      ? arh_to_landing_pad
2567			      : arh_to_label),
2568			     &handlers);
2569
2570  return handlers;
2571}
2572
2573/* Determine if the given INSN can throw an exception that is caught
2574   within the function.  */
2575
2576bool
2577can_throw_internal_1 (int region_number, bool is_resx)
2578{
2579  struct eh_region *region;
2580  tree type_thrown;
2581
2582  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2583
2584  type_thrown = NULL_TREE;
2585  if (is_resx)
2586    region = region->outer;
2587  else if (region->type == ERT_THROW)
2588    {
2589      type_thrown = region->u.throw.type;
2590      region = region->outer;
2591    }
2592
2593  /* If this exception is ignored by each and every containing region,
2594     then control passes straight out.  The runtime may handle some
2595     regions, which also do not require processing internally.  */
2596  for (; region; region = region->outer)
2597    {
2598      enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2599      if (how == RNL_BLOCKED)
2600	return false;
2601      if (how != RNL_NOT_CAUGHT)
2602	return true;
2603    }
2604
2605  return false;
2606}
2607
2608bool
2609can_throw_internal (rtx insn)
2610{
2611  rtx note;
2612
2613  if (! INSN_P (insn))
2614    return false;
2615
2616  if (JUMP_P (insn)
2617      && GET_CODE (PATTERN (insn)) == RESX
2618      && XINT (PATTERN (insn), 0) > 0)
2619    return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2620
2621  if (NONJUMP_INSN_P (insn)
2622      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2623    insn = XVECEXP (PATTERN (insn), 0, 0);
2624
2625  /* Every insn that might throw has an EH_REGION note.  */
2626  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2627  if (!note || INTVAL (XEXP (note, 0)) <= 0)
2628    return false;
2629
2630  return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2631}
2632
2633/* Determine if the given INSN can throw an exception that is
2634   visible outside the function.  */
2635
2636bool
2637can_throw_external_1 (int region_number, bool is_resx)
2638{
2639  struct eh_region *region;
2640  tree type_thrown;
2641
2642  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2643
2644  type_thrown = NULL_TREE;
2645  if (is_resx)
2646    region = region->outer;
2647  else if (region->type == ERT_THROW)
2648    {
2649      type_thrown = region->u.throw.type;
2650      region = region->outer;
2651    }
2652
2653  /* If the exception is caught or blocked by any containing region,
2654     then it is not seen by any calling function.  */
2655  for (; region ; region = region->outer)
2656    if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2657      return false;
2658
2659  return true;
2660}
2661
2662bool
2663can_throw_external (rtx insn)
2664{
2665  rtx note;
2666
2667  if (! INSN_P (insn))
2668    return false;
2669
2670  if (JUMP_P (insn)
2671      && GET_CODE (PATTERN (insn)) == RESX
2672      && XINT (PATTERN (insn), 0) > 0)
2673    return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2674
2675  if (NONJUMP_INSN_P (insn)
2676      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2677    insn = XVECEXP (PATTERN (insn), 0, 0);
2678
2679  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2680  if (!note)
2681    {
2682      /* Calls (and trapping insns) without notes are outside any
2683	 exception handling region in this function.  We have to
2684	 assume it might throw.  Given that the front end and middle
2685	 ends mark known NOTHROW functions, this isn't so wildly
2686	 inaccurate.  */
2687      return (CALL_P (insn)
2688	      || (flag_non_call_exceptions
2689		  && may_trap_p (PATTERN (insn))));
2690    }
2691  if (INTVAL (XEXP (note, 0)) <= 0)
2692    return false;
2693
2694  return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2695}
2696
2697/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls.  */
2698
2699void
2700set_nothrow_function_flags (void)
2701{
2702  rtx insn;
2703
2704  /* If we don't know that this implementation of the function will
2705     actually be used, then we must not set TREE_NOTHROW, since
2706     callers must not assume that this function does not throw.  */
2707  if (DECL_REPLACEABLE_P (current_function_decl))
2708    return;
2709
2710  TREE_NOTHROW (current_function_decl) = 1;
2711
2712  /* Assume cfun->all_throwers_are_sibcalls until we encounter
2713     something that can throw an exception.  We specifically exempt
2714     CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2715     and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
2716     is optimistic.  */
2717
2718  cfun->all_throwers_are_sibcalls = 1;
2719
2720  if (! flag_exceptions)
2721    return;
2722
2723  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2724    if (can_throw_external (insn))
2725      {
2726        TREE_NOTHROW (current_function_decl) = 0;
2727
2728	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2729	  {
2730	    cfun->all_throwers_are_sibcalls = 0;
2731	    return;
2732	  }
2733      }
2734
2735  for (insn = current_function_epilogue_delay_list; insn;
2736       insn = XEXP (insn, 1))
2737    if (can_throw_external (insn))
2738      {
2739        TREE_NOTHROW (current_function_decl) = 0;
2740
2741	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2742	  {
2743	    cfun->all_throwers_are_sibcalls = 0;
2744	    return;
2745	  }
2746      }
2747}
2748
2749struct tree_opt_pass pass_set_nothrow_function_flags =
2750{
2751  NULL,                                 /* name */
2752  NULL,                                 /* gate */
2753  set_nothrow_function_flags,           /* execute */
2754  NULL,                                 /* sub */
2755  NULL,                                 /* next */
2756  0,                                    /* static_pass_number */
2757  0,                                    /* tv_id */
2758  0,                                    /* properties_required */
2759  0,                                    /* properties_provided */
2760  0,                                    /* properties_destroyed */
2761  0,                                    /* todo_flags_start */
2762  0,                                    /* todo_flags_finish */
2763  0                                     /* letter */
2764};
2765
2766
2767/* Various hooks for unwind library.  */
2768
2769/* Do any necessary initialization to access arbitrary stack frames.
2770   On the SPARC, this means flushing the register windows.  */
2771
2772void
2773expand_builtin_unwind_init (void)
2774{
2775  /* Set this so all the registers get saved in our frame; we need to be
2776     able to copy the saved values for any registers from frames we unwind.  */
2777  current_function_has_nonlocal_label = 1;
2778
2779#ifdef SETUP_FRAME_ADDRESSES
2780  SETUP_FRAME_ADDRESSES ();
2781#endif
2782}
2783
2784rtx
2785expand_builtin_eh_return_data_regno (tree arglist)
2786{
2787  tree which = TREE_VALUE (arglist);
2788  unsigned HOST_WIDE_INT iwhich;
2789
2790  if (TREE_CODE (which) != INTEGER_CST)
2791    {
2792      error ("argument of %<__builtin_eh_return_regno%> must be constant");
2793      return constm1_rtx;
2794    }
2795
2796  iwhich = tree_low_cst (which, 1);
2797  iwhich = EH_RETURN_DATA_REGNO (iwhich);
2798  if (iwhich == INVALID_REGNUM)
2799    return constm1_rtx;
2800
2801#ifdef DWARF_FRAME_REGNUM
2802  iwhich = DWARF_FRAME_REGNUM (iwhich);
2803#else
2804  iwhich = DBX_REGISTER_NUMBER (iwhich);
2805#endif
2806
2807  return GEN_INT (iwhich);
2808}
2809
2810/* Given a value extracted from the return address register or stack slot,
2811   return the actual address encoded in that value.  */
2812
2813rtx
2814expand_builtin_extract_return_addr (tree addr_tree)
2815{
2816  rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2817
2818  if (GET_MODE (addr) != Pmode
2819      && GET_MODE (addr) != VOIDmode)
2820    {
2821#ifdef POINTERS_EXTEND_UNSIGNED
2822      addr = convert_memory_address (Pmode, addr);
2823#else
2824      addr = convert_to_mode (Pmode, addr, 0);
2825#endif
2826    }
2827
2828  /* First mask out any unwanted bits.  */
2829#ifdef MASK_RETURN_ADDR
2830  expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2831#endif
2832
2833  /* Then adjust to find the real return address.  */
2834#if defined (RETURN_ADDR_OFFSET)
2835  addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2836#endif
2837
2838  return addr;
2839}
2840
2841/* Given an actual address in addr_tree, do any necessary encoding
2842   and return the value to be stored in the return address register or
2843   stack slot so the epilogue will return to that address.  */
2844
2845rtx
2846expand_builtin_frob_return_addr (tree addr_tree)
2847{
2848  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2849
2850  addr = convert_memory_address (Pmode, addr);
2851
2852#ifdef RETURN_ADDR_OFFSET
2853  addr = force_reg (Pmode, addr);
2854  addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2855#endif
2856
2857  return addr;
2858}
2859
2860/* Set up the epilogue with the magic bits we'll need to return to the
2861   exception handler.  */
2862
2863void
2864expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2865			  tree handler_tree)
2866{
2867  rtx tmp;
2868
2869#ifdef EH_RETURN_STACKADJ_RTX
2870  tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2871  tmp = convert_memory_address (Pmode, tmp);
2872  if (!cfun->eh->ehr_stackadj)
2873    cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2874  else if (tmp != cfun->eh->ehr_stackadj)
2875    emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2876#endif
2877
2878  tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2879  tmp = convert_memory_address (Pmode, tmp);
2880  if (!cfun->eh->ehr_handler)
2881    cfun->eh->ehr_handler = copy_to_reg (tmp);
2882  else if (tmp != cfun->eh->ehr_handler)
2883    emit_move_insn (cfun->eh->ehr_handler, tmp);
2884
2885  if (!cfun->eh->ehr_label)
2886    cfun->eh->ehr_label = gen_label_rtx ();
2887  emit_jump (cfun->eh->ehr_label);
2888}
2889
2890void
2891expand_eh_return (void)
2892{
2893  rtx around_label;
2894
2895  if (! cfun->eh->ehr_label)
2896    return;
2897
2898  current_function_calls_eh_return = 1;
2899
2900#ifdef EH_RETURN_STACKADJ_RTX
2901  emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2902#endif
2903
2904  around_label = gen_label_rtx ();
2905  emit_jump (around_label);
2906
2907  emit_label (cfun->eh->ehr_label);
2908  clobber_return_register ();
2909
2910#ifdef EH_RETURN_STACKADJ_RTX
2911  emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2912#endif
2913
2914#ifdef HAVE_eh_return
2915  if (HAVE_eh_return)
2916    emit_insn (gen_eh_return (cfun->eh->ehr_handler));
2917  else
2918#endif
2919    {
2920#ifdef EH_RETURN_HANDLER_RTX
2921      emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2922#else
2923      error ("__builtin_eh_return not supported on this target");
2924#endif
2925    }
2926
2927  emit_label (around_label);
2928}
2929
2930/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2931   POINTERS_EXTEND_UNSIGNED and return it.  */
2932
2933rtx
2934expand_builtin_extend_pointer (tree addr_tree)
2935{
2936  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2937  int extend;
2938
2939#ifdef POINTERS_EXTEND_UNSIGNED
2940  extend = POINTERS_EXTEND_UNSIGNED;
2941#else
2942  /* The previous EH code did an unsigned extend by default, so we do this also
2943     for consistency.  */
2944  extend = 1;
2945#endif
2946
2947  return convert_modes (word_mode, ptr_mode, addr, extend);
2948}
2949
2950/* In the following functions, we represent entries in the action table
2951   as 1-based indices.  Special cases are:
2952
2953	 0:	null action record, non-null landing pad; implies cleanups
2954	-1:	null action record, null landing pad; implies no action
2955	-2:	no call-site entry; implies must_not_throw
2956	-3:	we have yet to process outer regions
2957
2958   Further, no special cases apply to the "next" field of the record.
2959   For next, 0 means end of list.  */
2960
2961struct action_record
2962{
2963  int offset;
2964  int filter;
2965  int next;
2966};
2967
2968static int
2969action_record_eq (const void *pentry, const void *pdata)
2970{
2971  const struct action_record *entry = (const struct action_record *) pentry;
2972  const struct action_record *data = (const struct action_record *) pdata;
2973  return entry->filter == data->filter && entry->next == data->next;
2974}
2975
2976static hashval_t
2977action_record_hash (const void *pentry)
2978{
2979  const struct action_record *entry = (const struct action_record *) pentry;
2980  return entry->next * 1009 + entry->filter;
2981}
2982
2983static int
2984add_action_record (htab_t ar_hash, int filter, int next)
2985{
2986  struct action_record **slot, *new, tmp;
2987
2988  tmp.filter = filter;
2989  tmp.next = next;
2990  slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2991
2992  if ((new = *slot) == NULL)
2993    {
2994      new = xmalloc (sizeof (*new));
2995      new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2996      new->filter = filter;
2997      new->next = next;
2998      *slot = new;
2999
3000      /* The filter value goes in untouched.  The link to the next
3001	 record is a "self-relative" byte offset, or zero to indicate
3002	 that there is no next record.  So convert the absolute 1 based
3003	 indices we've been carrying around into a displacement.  */
3004
3005      push_sleb128 (&cfun->eh->action_record_data, filter);
3006      if (next)
3007	next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3008      push_sleb128 (&cfun->eh->action_record_data, next);
3009    }
3010
3011  return new->offset;
3012}
3013
3014static int
3015collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3016{
3017  struct eh_region *c;
3018  int next;
3019
3020  /* If we've reached the top of the region chain, then we have
3021     no actions, and require no landing pad.  */
3022  if (region == NULL)
3023    return -1;
3024
3025  switch (region->type)
3026    {
3027    case ERT_CLEANUP:
3028      /* A cleanup adds a zero filter to the beginning of the chain, but
3029	 there are special cases to look out for.  If there are *only*
3030	 cleanups along a path, then it compresses to a zero action.
3031	 Further, if there are multiple cleanups along a path, we only
3032	 need to represent one of them, as that is enough to trigger
3033	 entry to the landing pad at runtime.  */
3034      next = collect_one_action_chain (ar_hash, region->outer);
3035      if (next <= 0)
3036	return 0;
3037      for (c = region->outer; c ; c = c->outer)
3038	if (c->type == ERT_CLEANUP)
3039	  return next;
3040      return add_action_record (ar_hash, 0, next);
3041
3042    case ERT_TRY:
3043      /* Process the associated catch regions in reverse order.
3044	 If there's a catch-all handler, then we don't need to
3045	 search outer regions.  Use a magic -3 value to record
3046	 that we haven't done the outer search.  */
3047      next = -3;
3048      for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3049	{
3050	  if (c->u.catch.type_list == NULL)
3051	    {
3052	      /* Retrieve the filter from the head of the filter list
3053		 where we have stored it (see assign_filter_values).  */
3054	      int filter
3055		= TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3056
3057	      next = add_action_record (ar_hash, filter, 0);
3058	    }
3059	  else
3060	    {
3061	      /* Once the outer search is done, trigger an action record for
3062                 each filter we have.  */
3063	      tree flt_node;
3064
3065	      if (next == -3)
3066		{
3067		  next = collect_one_action_chain (ar_hash, region->outer);
3068
3069		  /* If there is no next action, terminate the chain.  */
3070		  if (next == -1)
3071		    next = 0;
3072		  /* If all outer actions are cleanups or must_not_throw,
3073		     we'll have no action record for it, since we had wanted
3074		     to encode these states in the call-site record directly.
3075		     Add a cleanup action to the chain to catch these.  */
3076		  else if (next <= 0)
3077		    next = add_action_record (ar_hash, 0, 0);
3078		}
3079
3080	      flt_node = c->u.catch.filter_list;
3081	      for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3082		{
3083		  int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3084		  next = add_action_record (ar_hash, filter, next);
3085		}
3086	    }
3087	}
3088      return next;
3089
3090    case ERT_ALLOWED_EXCEPTIONS:
3091      /* An exception specification adds its filter to the
3092	 beginning of the chain.  */
3093      next = collect_one_action_chain (ar_hash, region->outer);
3094
3095      /* If there is no next action, terminate the chain.  */
3096      if (next == -1)
3097	next = 0;
3098      /* If all outer actions are cleanups or must_not_throw,
3099	 we'll have no action record for it, since we had wanted
3100	 to encode these states in the call-site record directly.
3101	 Add a cleanup action to the chain to catch these.  */
3102      else if (next <= 0)
3103	next = add_action_record (ar_hash, 0, 0);
3104
3105      return add_action_record (ar_hash, region->u.allowed.filter, next);
3106
3107    case ERT_MUST_NOT_THROW:
3108      /* A must-not-throw region with no inner handlers or cleanups
3109	 requires no call-site entry.  Note that this differs from
3110	 the no handler or cleanup case in that we do require an lsda
3111	 to be generated.  Return a magic -2 value to record this.  */
3112      return -2;
3113
3114    case ERT_CATCH:
3115    case ERT_THROW:
3116      /* CATCH regions are handled in TRY above.  THROW regions are
3117	 for optimization information only and produce no output.  */
3118      return collect_one_action_chain (ar_hash, region->outer);
3119
3120    default:
3121      gcc_unreachable ();
3122    }
3123}
3124
3125static int
3126add_call_site (rtx landing_pad, int action)
3127{
3128  struct call_site_record *data = cfun->eh->call_site_data;
3129  int used = cfun->eh->call_site_data_used;
3130  int size = cfun->eh->call_site_data_size;
3131
3132  if (used >= size)
3133    {
3134      size = (size ? size * 2 : 64);
3135      data = ggc_realloc (data, sizeof (*data) * size);
3136      cfun->eh->call_site_data = data;
3137      cfun->eh->call_site_data_size = size;
3138    }
3139
3140  data[used].landing_pad = landing_pad;
3141  data[used].action = action;
3142
3143  cfun->eh->call_site_data_used = used + 1;
3144
3145  return used + call_site_base;
3146}
3147
3148/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3149   The new note numbers will not refer to region numbers, but
3150   instead to call site entries.  */
3151
3152void
3153convert_to_eh_region_ranges (void)
3154{
3155  rtx insn, iter, note;
3156  htab_t ar_hash;
3157  int last_action = -3;
3158  rtx last_action_insn = NULL_RTX;
3159  rtx last_landing_pad = NULL_RTX;
3160  rtx first_no_action_insn = NULL_RTX;
3161  int call_site = 0;
3162
3163  if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3164    return;
3165
3166  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3167
3168  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3169
3170  for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3171    if (INSN_P (iter))
3172      {
3173	struct eh_region *region;
3174	int this_action;
3175	rtx this_landing_pad;
3176
3177	insn = iter;
3178	if (NONJUMP_INSN_P (insn)
3179	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
3180	  insn = XVECEXP (PATTERN (insn), 0, 0);
3181
3182	note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3183	if (!note)
3184	  {
3185	    if (! (CALL_P (insn)
3186		   || (flag_non_call_exceptions
3187		       && may_trap_p (PATTERN (insn)))))
3188	      continue;
3189	    this_action = -1;
3190	    region = NULL;
3191	  }
3192	else
3193	  {
3194	    if (INTVAL (XEXP (note, 0)) <= 0)
3195	      continue;
3196	    region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3197	    this_action = collect_one_action_chain (ar_hash, region);
3198	  }
3199
3200	/* Existence of catch handlers, or must-not-throw regions
3201	   implies that an lsda is needed (even if empty).  */
3202	if (this_action != -1)
3203	  cfun->uses_eh_lsda = 1;
3204
3205	/* Delay creation of region notes for no-action regions
3206	   until we're sure that an lsda will be required.  */
3207	else if (last_action == -3)
3208	  {
3209	    first_no_action_insn = iter;
3210	    last_action = -1;
3211	  }
3212
3213	/* Cleanups and handlers may share action chains but not
3214	   landing pads.  Collect the landing pad for this region.  */
3215	if (this_action >= 0)
3216	  {
3217	    struct eh_region *o;
3218	    for (o = region; ! o->landing_pad ; o = o->outer)
3219	      continue;
3220	    this_landing_pad = o->landing_pad;
3221	  }
3222	else
3223	  this_landing_pad = NULL_RTX;
3224
3225	/* Differing actions or landing pads implies a change in call-site
3226	   info, which implies some EH_REGION note should be emitted.  */
3227	if (last_action != this_action
3228	    || last_landing_pad != this_landing_pad)
3229	  {
3230	    /* If we'd not seen a previous action (-3) or the previous
3231	       action was must-not-throw (-2), then we do not need an
3232	       end note.  */
3233	    if (last_action >= -1)
3234	      {
3235		/* If we delayed the creation of the begin, do it now.  */
3236		if (first_no_action_insn)
3237		  {
3238		    call_site = add_call_site (NULL_RTX, 0);
3239		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3240					     first_no_action_insn);
3241		    NOTE_EH_HANDLER (note) = call_site;
3242		    first_no_action_insn = NULL_RTX;
3243		  }
3244
3245		note = emit_note_after (NOTE_INSN_EH_REGION_END,
3246					last_action_insn);
3247		NOTE_EH_HANDLER (note) = call_site;
3248	      }
3249
3250	    /* If the new action is must-not-throw, then no region notes
3251	       are created.  */
3252	    if (this_action >= -1)
3253	      {
3254		call_site = add_call_site (this_landing_pad,
3255					   this_action < 0 ? 0 : this_action);
3256		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3257		NOTE_EH_HANDLER (note) = call_site;
3258	      }
3259
3260	    last_action = this_action;
3261	    last_landing_pad = this_landing_pad;
3262	  }
3263	last_action_insn = iter;
3264      }
3265
3266  if (last_action >= -1 && ! first_no_action_insn)
3267    {
3268      note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3269      NOTE_EH_HANDLER (note) = call_site;
3270    }
3271
3272  htab_delete (ar_hash);
3273}
3274
3275struct tree_opt_pass pass_convert_to_eh_region_ranges =
3276{
3277  "eh-ranges",                          /* name */
3278  NULL,                                 /* gate */
3279  convert_to_eh_region_ranges,          /* execute */
3280  NULL,                                 /* sub */
3281  NULL,                                 /* next */
3282  0,                                    /* static_pass_number */
3283  0,                                    /* tv_id */
3284  0,                                    /* properties_required */
3285  0,                                    /* properties_provided */
3286  0,                                    /* properties_destroyed */
3287  0,                                    /* todo_flags_start */
3288  TODO_dump_func,			/* todo_flags_finish */
3289  0                                     /* letter */
3290};
3291
3292
3293static void
3294push_uleb128 (varray_type *data_area, unsigned int value)
3295{
3296  do
3297    {
3298      unsigned char byte = value & 0x7f;
3299      value >>= 7;
3300      if (value)
3301	byte |= 0x80;
3302      VARRAY_PUSH_UCHAR (*data_area, byte);
3303    }
3304  while (value);
3305}
3306
3307static void
3308push_sleb128 (varray_type *data_area, int value)
3309{
3310  unsigned char byte;
3311  int more;
3312
3313  do
3314    {
3315      byte = value & 0x7f;
3316      value >>= 7;
3317      more = ! ((value == 0 && (byte & 0x40) == 0)
3318		|| (value == -1 && (byte & 0x40) != 0));
3319      if (more)
3320	byte |= 0x80;
3321      VARRAY_PUSH_UCHAR (*data_area, byte);
3322    }
3323  while (more);
3324}
3325
3326
3327#ifndef HAVE_AS_LEB128
3328static int
3329dw2_size_of_call_site_table (void)
3330{
3331  int n = cfun->eh->call_site_data_used;
3332  int size = n * (4 + 4 + 4);
3333  int i;
3334
3335  for (i = 0; i < n; ++i)
3336    {
3337      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3338      size += size_of_uleb128 (cs->action);
3339    }
3340
3341  return size;
3342}
3343
3344static int
3345sjlj_size_of_call_site_table (void)
3346{
3347  int n = cfun->eh->call_site_data_used;
3348  int size = 0;
3349  int i;
3350
3351  for (i = 0; i < n; ++i)
3352    {
3353      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3354      size += size_of_uleb128 (INTVAL (cs->landing_pad));
3355      size += size_of_uleb128 (cs->action);
3356    }
3357
3358  return size;
3359}
3360#endif
3361
3362static void
3363dw2_output_call_site_table (void)
3364{
3365  int n = cfun->eh->call_site_data_used;
3366  int i;
3367
3368  for (i = 0; i < n; ++i)
3369    {
3370      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3371      char reg_start_lab[32];
3372      char reg_end_lab[32];
3373      char landing_pad_lab[32];
3374
3375      ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3376      ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3377
3378      if (cs->landing_pad)
3379	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3380				     CODE_LABEL_NUMBER (cs->landing_pad));
3381
3382      /* ??? Perhaps use insn length scaling if the assembler supports
3383	 generic arithmetic.  */
3384      /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3385	 data4 if the function is small enough.  */
3386#ifdef HAVE_AS_LEB128
3387      dw2_asm_output_delta_uleb128 (reg_start_lab,
3388				    current_function_func_begin_label,
3389				    "region %d start", i);
3390      dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3391				    "length");
3392      if (cs->landing_pad)
3393	dw2_asm_output_delta_uleb128 (landing_pad_lab,
3394				      current_function_func_begin_label,
3395				      "landing pad");
3396      else
3397	dw2_asm_output_data_uleb128 (0, "landing pad");
3398#else
3399      dw2_asm_output_delta (4, reg_start_lab,
3400			    current_function_func_begin_label,
3401			    "region %d start", i);
3402      dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3403      if (cs->landing_pad)
3404	dw2_asm_output_delta (4, landing_pad_lab,
3405			      current_function_func_begin_label,
3406			      "landing pad");
3407      else
3408	dw2_asm_output_data (4, 0, "landing pad");
3409#endif
3410      dw2_asm_output_data_uleb128 (cs->action, "action");
3411    }
3412
3413  call_site_base += n;
3414}
3415
3416static void
3417sjlj_output_call_site_table (void)
3418{
3419  int n = cfun->eh->call_site_data_used;
3420  int i;
3421
3422  for (i = 0; i < n; ++i)
3423    {
3424      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3425
3426      dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3427				   "region %d landing pad", i);
3428      dw2_asm_output_data_uleb128 (cs->action, "action");
3429    }
3430
3431  call_site_base += n;
3432}
3433
3434/* Tell assembler to switch to the section for the exception handling
3435   table.  */
3436
3437void
3438default_exception_section (void)
3439{
3440  if (targetm.have_named_sections)
3441    {
3442      int flags;
3443
3444      if (EH_TABLES_CAN_BE_READ_ONLY)
3445	{
3446	  int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3447
3448	  flags = (! flag_pic
3449		   || ((tt_format & 0x70) != DW_EH_PE_absptr
3450		       && (tt_format & 0x70) != DW_EH_PE_aligned))
3451	    ? 0 : SECTION_WRITE;
3452	}
3453      else
3454	flags = SECTION_WRITE;
3455      named_section_flags (".gcc_except_table", flags);
3456    }
3457  else if (flag_pic)
3458    data_section ();
3459  else
3460    readonly_data_section ();
3461}
3462
3463
3464/* Output a reference from an exception table to the type_info object TYPE.
3465   TT_FORMAT and TT_FORMAT_SIZE descibe the DWARF encoding method used for
3466   the value.  */
3467
3468static void
3469output_ttype (tree type, int tt_format, int tt_format_size)
3470{
3471  rtx value;
3472  bool public = true;
3473
3474  if (type == NULL_TREE)
3475    value = const0_rtx;
3476  else
3477    {
3478      struct cgraph_varpool_node *node;
3479
3480      type = lookup_type_for_runtime (type);
3481      value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3482
3483      /* Let cgraph know that the rtti decl is used.  Not all of the
3484	 paths below go through assemble_integer, which would take
3485	 care of this for us.  */
3486      STRIP_NOPS (type);
3487      if (TREE_CODE (type) == ADDR_EXPR)
3488	{
3489	  type = TREE_OPERAND (type, 0);
3490	  if (TREE_CODE (type) == VAR_DECL)
3491	    {
3492	      node = cgraph_varpool_node (type);
3493	      if (node)
3494		cgraph_varpool_mark_needed_node (node);
3495	      public = TREE_PUBLIC (type);
3496	    }
3497	}
3498      else if (TREE_CODE (type) != INTEGER_CST)
3499	abort ();
3500    }
3501
3502  /* Allow the target to override the type table entry format.  */
3503  if (targetm.asm_out.ttype (value))
3504    return;
3505
3506  if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3507    assemble_integer (value, tt_format_size,
3508		      tt_format_size * BITS_PER_UNIT, 1);
3509  else
3510    dw2_asm_output_encoded_addr_rtx (tt_format, value, public, NULL);
3511}
3512
3513void
3514output_function_exception_table (void)
3515{
3516  int tt_format, cs_format, lp_format, i, n;
3517#ifdef HAVE_AS_LEB128
3518  char ttype_label[32];
3519  char cs_after_size_label[32];
3520  char cs_end_label[32];
3521#else
3522  int call_site_len;
3523#endif
3524  int have_tt_data;
3525  int tt_format_size = 0;
3526
3527  if (eh_personality_libfunc)
3528    assemble_external_libcall (eh_personality_libfunc);
3529
3530  /* Not all functions need anything.  */
3531  if (! cfun->uses_eh_lsda)
3532    return;
3533
3534#ifdef TARGET_UNWIND_INFO
3535  /* TODO: Move this into target file.  */
3536  fputs ("\t.personality\t", asm_out_file);
3537  output_addr_const (asm_out_file, eh_personality_libfunc);
3538  fputs ("\n\t.handlerdata\n", asm_out_file);
3539  /* Note that varasm still thinks we're in the function's code section.
3540     The ".endp" directive that will immediately follow will take us back.  */
3541#else
3542  targetm.asm_out.exception_section ();
3543#endif
3544
3545  have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3546		  || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3547
3548  /* Indicate the format of the @TType entries.  */
3549  if (! have_tt_data)
3550    tt_format = DW_EH_PE_omit;
3551  else
3552    {
3553      tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3554#ifdef HAVE_AS_LEB128
3555      ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3556				   current_function_funcdef_no);
3557#endif
3558      tt_format_size = size_of_encoded_value (tt_format);
3559
3560      assemble_align (tt_format_size * BITS_PER_UNIT);
3561    }
3562
3563  targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3564			     current_function_funcdef_no);
3565
3566  /* The LSDA header.  */
3567
3568  /* Indicate the format of the landing pad start pointer.  An omitted
3569     field implies @LPStart == @Start.  */
3570  /* Currently we always put @LPStart == @Start.  This field would
3571     be most useful in moving the landing pads completely out of
3572     line to another section, but it could also be used to minimize
3573     the size of uleb128 landing pad offsets.  */
3574  lp_format = DW_EH_PE_omit;
3575  dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3576		       eh_data_format_name (lp_format));
3577
3578  /* @LPStart pointer would go here.  */
3579
3580  dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3581		       eh_data_format_name (tt_format));
3582
3583#ifndef HAVE_AS_LEB128
3584  if (USING_SJLJ_EXCEPTIONS)
3585    call_site_len = sjlj_size_of_call_site_table ();
3586  else
3587    call_site_len = dw2_size_of_call_site_table ();
3588#endif
3589
3590  /* A pc-relative 4-byte displacement to the @TType data.  */
3591  if (have_tt_data)
3592    {
3593#ifdef HAVE_AS_LEB128
3594      char ttype_after_disp_label[32];
3595      ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3596				   current_function_funcdef_no);
3597      dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3598				    "@TType base offset");
3599      ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3600#else
3601      /* Ug.  Alignment queers things.  */
3602      unsigned int before_disp, after_disp, last_disp, disp;
3603
3604      before_disp = 1 + 1;
3605      after_disp = (1 + size_of_uleb128 (call_site_len)
3606		    + call_site_len
3607		    + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3608		    + (VEC_length (tree, cfun->eh->ttype_data)
3609		       * tt_format_size));
3610
3611      disp = after_disp;
3612      do
3613	{
3614	  unsigned int disp_size, pad;
3615
3616	  last_disp = disp;
3617	  disp_size = size_of_uleb128 (disp);
3618	  pad = before_disp + disp_size + after_disp;
3619	  if (pad % tt_format_size)
3620	    pad = tt_format_size - (pad % tt_format_size);
3621	  else
3622	    pad = 0;
3623	  disp = after_disp + pad;
3624	}
3625      while (disp != last_disp);
3626
3627      dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3628#endif
3629    }
3630
3631  /* Indicate the format of the call-site offsets.  */
3632#ifdef HAVE_AS_LEB128
3633  cs_format = DW_EH_PE_uleb128;
3634#else
3635  cs_format = DW_EH_PE_udata4;
3636#endif
3637  dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3638		       eh_data_format_name (cs_format));
3639
3640#ifdef HAVE_AS_LEB128
3641  ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3642			       current_function_funcdef_no);
3643  ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3644			       current_function_funcdef_no);
3645  dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3646				"Call-site table length");
3647  ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3648  if (USING_SJLJ_EXCEPTIONS)
3649    sjlj_output_call_site_table ();
3650  else
3651    dw2_output_call_site_table ();
3652  ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3653#else
3654  dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3655  if (USING_SJLJ_EXCEPTIONS)
3656    sjlj_output_call_site_table ();
3657  else
3658    dw2_output_call_site_table ();
3659#endif
3660
3661  /* ??? Decode and interpret the data for flag_debug_asm.  */
3662  n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3663  for (i = 0; i < n; ++i)
3664    dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3665			 (i ? NULL : "Action record table"));
3666
3667  if (have_tt_data)
3668    assemble_align (tt_format_size * BITS_PER_UNIT);
3669
3670  i = VEC_length (tree, cfun->eh->ttype_data);
3671  while (i-- > 0)
3672    {
3673      tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3674      output_ttype (type, tt_format, tt_format_size);
3675    }
3676
3677#ifdef HAVE_AS_LEB128
3678  if (have_tt_data)
3679      ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3680#endif
3681
3682  /* ??? Decode and interpret the data for flag_debug_asm.  */
3683  n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3684  for (i = 0; i < n; ++i)
3685    {
3686      if (targetm.arm_eabi_unwinder)
3687	{
3688	  tree type = VARRAY_TREE (cfun->eh->ehspec_data, i);
3689	  output_ttype (type, tt_format, tt_format_size);
3690	}
3691      else
3692	dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3693			     (i ? NULL : "Exception specification table"));
3694    }
3695
3696  current_function_section (current_function_decl);
3697}
3698
3699void
3700set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3701{
3702  fun->eh->throw_stmt_table = table;
3703}
3704
3705htab_t
3706get_eh_throw_stmt_table (struct function *fun)
3707{
3708  return fun->eh->throw_stmt_table;
3709}
3710
3711/* Dump EH information to OUT.  */
3712void
3713dump_eh_tree (FILE *out, struct function *fun)
3714{
3715  struct eh_region *i;
3716  int depth = 0;
3717  static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3718					   "allowed_exceptions", "must_not_throw",
3719					   "throw"};
3720
3721  i = fun->eh->region_tree;
3722  if (! i)
3723    return;
3724
3725  fprintf (out, "Eh tree:\n");
3726  while (1)
3727    {
3728      fprintf (out, "  %*s %i %s", depth * 2, "",
3729	       i->region_number, type_name [(int)i->type]);
3730      if (i->tree_label)
3731	{
3732          fprintf (out, " tree_label:");
3733	  print_generic_expr (out, i->tree_label, 0);
3734	}
3735      fprintf (out, "\n");
3736      /* If there are sub-regions, process them.  */
3737      if (i->inner)
3738	i = i->inner, depth++;
3739      /* If there are peers, process them.  */
3740      else if (i->next_peer)
3741	i = i->next_peer;
3742      /* Otherwise, step back up the tree to the next peer.  */
3743      else
3744	{
3745	  do {
3746	    i = i->outer;
3747	    depth--;
3748	    if (i == NULL)
3749	      return;
3750	  } while (i->next_peer == NULL);
3751	  i = i->next_peer;
3752	}
3753    }
3754}
3755
3756/* Verify some basic invariants on EH datastructures.  Could be extended to
3757   catch more.  */
3758void
3759verify_eh_tree (struct function *fun)
3760{
3761  struct eh_region *i, *outer = NULL;
3762  bool err = false;
3763  int nvisited = 0;
3764  int count = 0;
3765  int j;
3766  int depth = 0;
3767
3768  i = fun->eh->region_tree;
3769  if (! i)
3770    return;
3771  for (j = fun->eh->last_region_number; j > 0; --j)
3772    if ((i = VEC_index (eh_region, cfun->eh->region_array, j)))
3773      {
3774	count++;
3775	if (i->region_number != j)
3776	  {
3777	    error ("region_array is corrupted for region %i", i->region_number);
3778	    err = true;
3779	  }
3780      }
3781
3782  while (1)
3783    {
3784      if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i)
3785	{
3786	  error ("region_array is corrupted for region %i", i->region_number);
3787	  err = true;
3788	}
3789      if (i->outer != outer)
3790	{
3791	  error ("outer block of region %i is wrong", i->region_number);
3792	  err = true;
3793	}
3794      if (i->may_contain_throw && outer && !outer->may_contain_throw)
3795	{
3796	  error ("region %i may contain throw and is contained in region that may not",
3797		 i->region_number);
3798	  err = true;
3799	}
3800      if (depth < 0)
3801	{
3802	  error ("negative nesting depth of region %i", i->region_number);
3803	  err = true;
3804	}
3805      nvisited ++;
3806      /* If there are sub-regions, process them.  */
3807      if (i->inner)
3808	outer = i, i = i->inner, depth++;
3809      /* If there are peers, process them.  */
3810      else if (i->next_peer)
3811	i = i->next_peer;
3812      /* Otherwise, step back up the tree to the next peer.  */
3813      else
3814	{
3815	  do {
3816	    i = i->outer;
3817	    depth--;
3818	    if (i == NULL)
3819	      {
3820		if (depth != -1)
3821		  {
3822		    error ("tree list ends on depth %i", depth + 1);
3823		    err = true;
3824		  }
3825		if (count != nvisited)
3826		  {
3827		    error ("array does not match the region tree");
3828		    err = true;
3829		  }
3830		if (err)
3831		  {
3832		    dump_eh_tree (stderr, fun);
3833		    internal_error ("verify_eh_tree failed");
3834		  }
3835	        return;
3836	      }
3837	    outer = i->outer;
3838	  } while (i->next_peer == NULL);
3839	  i = i->next_peer;
3840	}
3841    }
3842}
3843
3844/* Initialize unwind_resume_libfunc.  */
3845
3846void
3847default_init_unwind_resume_libfunc (void)
3848{
3849  /* The default c++ routines aren't actually c++ specific, so use those.  */
3850  unwind_resume_libfunc =
3851    init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
3852					     : "_Unwind_Resume");
3853}
3854
3855
3856static bool
3857gate_handle_eh (void)
3858{
3859  return doing_eh (0);
3860}
3861
3862/* Complete generation of exception handling code.  */
3863static void
3864rest_of_handle_eh (void)
3865{
3866  cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
3867  finish_eh_generation ();
3868  cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
3869}
3870
3871struct tree_opt_pass pass_rtl_eh =
3872{
3873  "eh",                                 /* name */
3874  gate_handle_eh,                       /* gate */
3875  rest_of_handle_eh,			/* execute */
3876  NULL,                                 /* sub */
3877  NULL,                                 /* next */
3878  0,                                    /* static_pass_number */
3879  TV_JUMP,                              /* tv_id */
3880  0,                                    /* properties_required */
3881  0,                                    /* properties_provided */
3882  0,                                    /* properties_destroyed */
3883  0,                                    /* todo_flags_start */
3884  TODO_dump_func,                       /* todo_flags_finish */
3885  'h'                                   /* letter */
3886};
3887
3888#include "gt-except.h"
3889