except.c revision 169689
1/* Implements exception handling.
2   Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4   Contributed by Mike Stump <mrs@cygnus.com>.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23
24/* An exception is an event that can be signaled from within a
25   function. This event can then be "caught" or "trapped" by the
26   callers of this function. This potentially allows program flow to
27   be transferred to any arbitrary code associated with a function call
28   several levels up the stack.
29
30   The intended use for this mechanism is for signaling "exceptional
31   events" in an out-of-band fashion, hence its name. The C++ language
32   (and many other OO-styled or functional languages) practically
33   requires such a mechanism, as otherwise it becomes very difficult
34   or even impossible to signal failure conditions in complex
35   situations.  The traditional C++ example is when an error occurs in
36   the process of constructing an object; without such a mechanism, it
37   is impossible to signal that the error occurs without adding global
38   state variables and error checks around every object construction.
39
40   The act of causing this event to occur is referred to as "throwing
41   an exception". (Alternate terms include "raising an exception" or
42   "signaling an exception".) The term "throw" is used because control
43   is returned to the callers of the function that is signaling the
44   exception, and thus there is the concept of "throwing" the
45   exception up the call stack.
46
47   [ Add updated documentation on how to use this.  ]  */
48
49
50#include "config.h"
51#include "system.h"
52#include "coretypes.h"
53#include "tm.h"
54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
57#include "function.h"
58#include "expr.h"
59#include "libfuncs.h"
60#include "insn-config.h"
61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
65#include "output.h"
66#include "dwarf2asm.h"
67#include "dwarf2out.h"
68#include "dwarf2.h"
69#include "toplev.h"
70#include "hashtab.h"
71#include "intl.h"
72#include "ggc.h"
73#include "tm_p.h"
74#include "target.h"
75#include "langhooks.h"
76#include "cgraph.h"
77#include "diagnostic.h"
78#include "tree-pass.h"
79#include "timevar.h"
80
81/* Provide defaults for stuff that may not be defined when using
82   sjlj exceptions.  */
83#ifndef EH_RETURN_DATA_REGNO
84#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85#endif
86
87
88/* Protect cleanup actions with must-not-throw regions, with a call
89   to the given failure handler.  */
90tree (*lang_protect_cleanup_actions) (void);
91
92/* Return true if type A catches type B.  */
93int (*lang_eh_type_covers) (tree a, tree b);
94
95/* Map a type to a runtime object to match type.  */
96tree (*lang_eh_runtime_type) (tree);
97
98/* A hash table of label to region number.  */
99
100struct ehl_map_entry GTY(())
101{
102  rtx label;
103  struct eh_region *region;
104};
105
106static GTY(()) int call_site_base;
107static GTY ((param_is (union tree_node)))
108  htab_t type_to_runtime_map;
109
110/* Describe the SjLj_Function_Context structure.  */
111static GTY(()) tree sjlj_fc_type_node;
112static int sjlj_fc_call_site_ofs;
113static int sjlj_fc_data_ofs;
114static int sjlj_fc_personality_ofs;
115static int sjlj_fc_lsda_ofs;
116static int sjlj_fc_jbuf_ofs;
117
118/* Describes one exception region.  */
119struct eh_region GTY(())
120{
121  /* The immediately surrounding region.  */
122  struct eh_region *outer;
123
124  /* The list of immediately contained regions.  */
125  struct eh_region *inner;
126  struct eh_region *next_peer;
127
128  /* An identifier for this region.  */
129  int region_number;
130
131  /* When a region is deleted, its parents inherit the REG_EH_REGION
132     numbers already assigned.  */
133  bitmap aka;
134
135  /* Each region does exactly one thing.  */
136  enum eh_region_type
137  {
138    ERT_UNKNOWN = 0,
139    ERT_CLEANUP,
140    ERT_TRY,
141    ERT_CATCH,
142    ERT_ALLOWED_EXCEPTIONS,
143    ERT_MUST_NOT_THROW,
144    ERT_THROW
145  } type;
146
147  /* Holds the action to perform based on the preceding type.  */
148  union eh_region_u {
149    /* A list of catch blocks, a surrounding try block,
150       and the label for continuing after a catch.  */
151    struct eh_region_u_try {
152      struct eh_region *catch;
153      struct eh_region *last_catch;
154    } GTY ((tag ("ERT_TRY"))) try;
155
156    /* The list through the catch handlers, the list of type objects
157       matched, and the list of associated filters.  */
158    struct eh_region_u_catch {
159      struct eh_region *next_catch;
160      struct eh_region *prev_catch;
161      tree type_list;
162      tree filter_list;
163    } GTY ((tag ("ERT_CATCH"))) catch;
164
165    /* A tree_list of allowed types.  */
166    struct eh_region_u_allowed {
167      tree type_list;
168      int filter;
169    } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170
171    /* The type given by a call to "throw foo();", or discovered
172       for a throw.  */
173    struct eh_region_u_throw {
174      tree type;
175    } GTY ((tag ("ERT_THROW"))) throw;
176
177    /* Retain the cleanup expression even after expansion so that
178       we can match up fixup regions.  */
179    struct eh_region_u_cleanup {
180      struct eh_region *prev_try;
181    } GTY ((tag ("ERT_CLEANUP"))) cleanup;
182  } GTY ((desc ("%0.type"))) u;
183
184  /* Entry point for this region's handler before landing pads are built.  */
185  rtx label;
186  tree tree_label;
187
188  /* Entry point for this region's handler from the runtime eh library.  */
189  rtx landing_pad;
190
191  /* Entry point for this region's handler from an inner region.  */
192  rtx post_landing_pad;
193
194  /* The RESX insn for handing off control to the next outermost handler,
195     if appropriate.  */
196  rtx resume;
197
198  /* True if something in this region may throw.  */
199  unsigned may_contain_throw : 1;
200};
201
202typedef struct eh_region *eh_region;
203
204struct call_site_record GTY(())
205{
206  rtx landing_pad;
207  int action;
208};
209
210DEF_VEC_P(eh_region);
211DEF_VEC_ALLOC_P(eh_region, gc);
212
213/* Used to save exception status for each function.  */
214struct eh_status GTY(())
215{
216  /* The tree of all regions for this function.  */
217  struct eh_region *region_tree;
218
219  /* The same information as an indexable array.  */
220  VEC(eh_region,gc) *region_array;
221
222  /* The most recently open region.  */
223  struct eh_region *cur_region;
224
225  /* This is the region for which we are processing catch blocks.  */
226  struct eh_region *try_region;
227
228  rtx filter;
229  rtx exc_ptr;
230
231  int built_landing_pads;
232  int last_region_number;
233
234  VEC(tree,gc) *ttype_data;
235  varray_type ehspec_data;
236  varray_type action_record_data;
237
238  htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
239
240  struct call_site_record * GTY ((length ("%h.call_site_data_used")))
241    call_site_data;
242  int call_site_data_used;
243  int call_site_data_size;
244
245  rtx ehr_stackadj;
246  rtx ehr_handler;
247  rtx ehr_label;
248
249  rtx sjlj_fc;
250  rtx sjlj_exit_after;
251
252  htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
253};
254
255static int t2r_eq (const void *, const void *);
256static hashval_t t2r_hash (const void *);
257static void add_type_for_runtime (tree);
258static tree lookup_type_for_runtime (tree);
259
260static void remove_unreachable_regions (rtx);
261
262static int ttypes_filter_eq (const void *, const void *);
263static hashval_t ttypes_filter_hash (const void *);
264static int ehspec_filter_eq (const void *, const void *);
265static hashval_t ehspec_filter_hash (const void *);
266static int add_ttypes_entry (htab_t, tree);
267static int add_ehspec_entry (htab_t, htab_t, tree);
268static void assign_filter_values (void);
269static void build_post_landing_pads (void);
270static void connect_post_landing_pads (void);
271static void dw2_build_landing_pads (void);
272
273struct sjlj_lp_info;
274static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
275static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
276static void sjlj_mark_call_sites (struct sjlj_lp_info *);
277static void sjlj_emit_function_enter (rtx);
278static void sjlj_emit_function_exit (void);
279static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
280static void sjlj_build_landing_pads (void);
281
282static hashval_t ehl_hash (const void *);
283static int ehl_eq (const void *, const void *);
284static void add_ehl_entry (rtx, struct eh_region *);
285static void remove_exception_handler_label (rtx);
286static void remove_eh_handler (struct eh_region *);
287static int for_each_eh_label_1 (void **, void *);
288
289/* The return value of reachable_next_level.  */
290enum reachable_code
291{
292  /* The given exception is not processed by the given region.  */
293  RNL_NOT_CAUGHT,
294  /* The given exception may need processing by the given region.  */
295  RNL_MAYBE_CAUGHT,
296  /* The given exception is completely processed by the given region.  */
297  RNL_CAUGHT,
298  /* The given exception is completely processed by the runtime.  */
299  RNL_BLOCKED
300};
301
302struct reachable_info;
303static enum reachable_code reachable_next_level (struct eh_region *, tree,
304						 struct reachable_info *);
305
306static int action_record_eq (const void *, const void *);
307static hashval_t action_record_hash (const void *);
308static int add_action_record (htab_t, int, int);
309static int collect_one_action_chain (htab_t, struct eh_region *);
310static int add_call_site (rtx, int);
311
312static void push_uleb128 (varray_type *, unsigned int);
313static void push_sleb128 (varray_type *, int);
314#ifndef HAVE_AS_LEB128
315static int dw2_size_of_call_site_table (void);
316static int sjlj_size_of_call_site_table (void);
317#endif
318static void dw2_output_call_site_table (void);
319static void sjlj_output_call_site_table (void);
320
321
322/* Routine to see if exception handling is turned on.
323   DO_WARN is nonzero if we want to inform the user that exception
324   handling is turned off.
325
326   This is used to ensure that -fexceptions has been specified if the
327   compiler tries to use any exception-specific functions.  */
328
329int
330doing_eh (int do_warn)
331{
332  if (! flag_exceptions)
333    {
334      static int warned = 0;
335      if (! warned && do_warn)
336	{
337	  error ("exception handling disabled, use -fexceptions to enable");
338	  warned = 1;
339	}
340      return 0;
341    }
342  return 1;
343}
344
345
346void
347init_eh (void)
348{
349  if (! flag_exceptions)
350    return;
351
352  type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
353
354  /* Create the SjLj_Function_Context structure.  This should match
355     the definition in unwind-sjlj.c.  */
356  if (USING_SJLJ_EXCEPTIONS)
357    {
358      tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
359
360      sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
361
362      f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
363			   build_pointer_type (sjlj_fc_type_node));
364      DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
365
366      f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
367			 integer_type_node);
368      DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
369
370      tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
371      tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
372			      tmp);
373      f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
374      DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
375
376      f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
377			  ptr_type_node);
378      DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
379
380      f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
381			   ptr_type_node);
382      DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
383
384#ifdef DONT_USE_BUILTIN_SETJMP
385#ifdef JMP_BUF_SIZE
386      tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
387#else
388      /* Should be large enough for most systems, if it is not,
389	 JMP_BUF_SIZE should be defined with the proper value.  It will
390	 also tend to be larger than necessary for most systems, a more
391	 optimal port will define JMP_BUF_SIZE.  */
392      tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
393#endif
394#else
395      /* builtin_setjmp takes a pointer to 5 words.  */
396      tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
397#endif
398      tmp = build_index_type (tmp);
399      tmp = build_array_type (ptr_type_node, tmp);
400      f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
401#ifdef DONT_USE_BUILTIN_SETJMP
402      /* We don't know what the alignment requirements of the
403	 runtime's jmp_buf has.  Overestimate.  */
404      DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
405      DECL_USER_ALIGN (f_jbuf) = 1;
406#endif
407      DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
408
409      TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
410      TREE_CHAIN (f_prev) = f_cs;
411      TREE_CHAIN (f_cs) = f_data;
412      TREE_CHAIN (f_data) = f_per;
413      TREE_CHAIN (f_per) = f_lsda;
414      TREE_CHAIN (f_lsda) = f_jbuf;
415
416      layout_type (sjlj_fc_type_node);
417
418      /* Cache the interesting field offsets so that we have
419	 easy access from rtl.  */
420      sjlj_fc_call_site_ofs
421	= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
422	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
423      sjlj_fc_data_ofs
424	= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
425	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
426      sjlj_fc_personality_ofs
427	= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
428	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
429      sjlj_fc_lsda_ofs
430	= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
431	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
432      sjlj_fc_jbuf_ofs
433	= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
434	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
435    }
436}
437
438void
439init_eh_for_function (void)
440{
441  cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
442}
443
444/* Routines to generate the exception tree somewhat directly.
445   These are used from tree-eh.c when processing exception related
446   nodes during tree optimization.  */
447
448static struct eh_region *
449gen_eh_region (enum eh_region_type type, struct eh_region *outer)
450{
451  struct eh_region *new;
452
453#ifdef ENABLE_CHECKING
454  gcc_assert (doing_eh (0));
455#endif
456
457  /* Insert a new blank region as a leaf in the tree.  */
458  new = ggc_alloc_cleared (sizeof (*new));
459  new->type = type;
460  new->outer = outer;
461  if (outer)
462    {
463      new->next_peer = outer->inner;
464      outer->inner = new;
465    }
466  else
467    {
468      new->next_peer = cfun->eh->region_tree;
469      cfun->eh->region_tree = new;
470    }
471
472  new->region_number = ++cfun->eh->last_region_number;
473
474  return new;
475}
476
477struct eh_region *
478gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
479{
480  struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
481  cleanup->u.cleanup.prev_try = prev_try;
482  return cleanup;
483}
484
485struct eh_region *
486gen_eh_region_try (struct eh_region *outer)
487{
488  return gen_eh_region (ERT_TRY, outer);
489}
490
491struct eh_region *
492gen_eh_region_catch (struct eh_region *t, tree type_or_list)
493{
494  struct eh_region *c, *l;
495  tree type_list, type_node;
496
497  /* Ensure to always end up with a type list to normalize further
498     processing, then register each type against the runtime types map.  */
499  type_list = type_or_list;
500  if (type_or_list)
501    {
502      if (TREE_CODE (type_or_list) != TREE_LIST)
503	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
504
505      type_node = type_list;
506      for (; type_node; type_node = TREE_CHAIN (type_node))
507	add_type_for_runtime (TREE_VALUE (type_node));
508    }
509
510  c = gen_eh_region (ERT_CATCH, t->outer);
511  c->u.catch.type_list = type_list;
512  l = t->u.try.last_catch;
513  c->u.catch.prev_catch = l;
514  if (l)
515    l->u.catch.next_catch = c;
516  else
517    t->u.try.catch = c;
518  t->u.try.last_catch = c;
519
520  return c;
521}
522
523struct eh_region *
524gen_eh_region_allowed (struct eh_region *outer, tree allowed)
525{
526  struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
527  region->u.allowed.type_list = allowed;
528
529  for (; allowed ; allowed = TREE_CHAIN (allowed))
530    add_type_for_runtime (TREE_VALUE (allowed));
531
532  return region;
533}
534
535struct eh_region *
536gen_eh_region_must_not_throw (struct eh_region *outer)
537{
538  return gen_eh_region (ERT_MUST_NOT_THROW, outer);
539}
540
541int
542get_eh_region_number (struct eh_region *region)
543{
544  return region->region_number;
545}
546
547bool
548get_eh_region_may_contain_throw (struct eh_region *region)
549{
550  return region->may_contain_throw;
551}
552
553tree
554get_eh_region_tree_label (struct eh_region *region)
555{
556  return region->tree_label;
557}
558
559void
560set_eh_region_tree_label (struct eh_region *region, tree lab)
561{
562  region->tree_label = lab;
563}
564
565void
566expand_resx_expr (tree exp)
567{
568  int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
569  struct eh_region *reg = VEC_index (eh_region,
570				     cfun->eh->region_array, region_nr);
571
572  gcc_assert (!reg->resume);
573  reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
574  emit_barrier ();
575}
576
577/* Note that the current EH region (if any) may contain a throw, or a
578   call to a function which itself may contain a throw.  */
579
580void
581note_eh_region_may_contain_throw (struct eh_region *region)
582{
583  while (region && !region->may_contain_throw)
584    {
585      region->may_contain_throw = 1;
586      region = region->outer;
587    }
588}
589
590void
591note_current_region_may_contain_throw (void)
592{
593  note_eh_region_may_contain_throw (cfun->eh->cur_region);
594}
595
596
597/* Return an rtl expression for a pointer to the exception object
598   within a handler.  */
599
600rtx
601get_exception_pointer (struct function *fun)
602{
603  rtx exc_ptr = fun->eh->exc_ptr;
604  if (fun == cfun && ! exc_ptr)
605    {
606      exc_ptr = gen_reg_rtx (ptr_mode);
607      fun->eh->exc_ptr = exc_ptr;
608    }
609  return exc_ptr;
610}
611
612/* Return an rtl expression for the exception dispatch filter
613   within a handler.  */
614
615rtx
616get_exception_filter (struct function *fun)
617{
618  rtx filter = fun->eh->filter;
619  if (fun == cfun && ! filter)
620    {
621      filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
622      fun->eh->filter = filter;
623    }
624  return filter;
625}
626
627/* This section is for the exception handling specific optimization pass.  */
628
629/* Random access the exception region tree.  */
630
631void
632collect_eh_region_array (void)
633{
634  struct eh_region *i;
635
636  i = cfun->eh->region_tree;
637  if (! i)
638    return;
639
640  VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
641		 cfun->eh->last_region_number + 1);
642  VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
643
644  while (1)
645    {
646      VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
647
648      /* If there are sub-regions, process them.  */
649      if (i->inner)
650	i = i->inner;
651      /* If there are peers, process them.  */
652      else if (i->next_peer)
653	i = i->next_peer;
654      /* Otherwise, step back up the tree to the next peer.  */
655      else
656	{
657	  do {
658	    i = i->outer;
659	    if (i == NULL)
660	      return;
661	  } while (i->next_peer == NULL);
662	  i = i->next_peer;
663	}
664    }
665}
666
667/* Remove all regions whose labels are not reachable from insns.  */
668
669static void
670remove_unreachable_regions (rtx insns)
671{
672  int i, *uid_region_num;
673  bool *reachable;
674  struct eh_region *r;
675  rtx insn;
676
677  uid_region_num = xcalloc (get_max_uid (), sizeof(int));
678  reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
679
680  for (i = cfun->eh->last_region_number; i > 0; --i)
681    {
682      r = VEC_index (eh_region, cfun->eh->region_array, i);
683      if (!r || r->region_number != i)
684	continue;
685
686      if (r->resume)
687	{
688	  gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
689	  uid_region_num[INSN_UID (r->resume)] = i;
690	}
691      if (r->label)
692	{
693	  gcc_assert (!uid_region_num[INSN_UID (r->label)]);
694	  uid_region_num[INSN_UID (r->label)] = i;
695	}
696    }
697
698  for (insn = insns; insn; insn = NEXT_INSN (insn))
699    reachable[uid_region_num[INSN_UID (insn)]] = true;
700
701  for (i = cfun->eh->last_region_number; i > 0; --i)
702    {
703      r = VEC_index (eh_region, cfun->eh->region_array, i);
704      if (r && r->region_number == i && !reachable[i])
705	{
706	  bool kill_it = true;
707	  switch (r->type)
708	    {
709	    case ERT_THROW:
710	      /* Don't remove ERT_THROW regions if their outer region
711		 is reachable.  */
712	      if (r->outer && reachable[r->outer->region_number])
713		kill_it = false;
714	      break;
715
716	    case ERT_MUST_NOT_THROW:
717	      /* MUST_NOT_THROW regions are implementable solely in the
718		 runtime, but their existence continues to affect calls
719		 within that region.  Never delete them here.  */
720	      kill_it = false;
721	      break;
722
723	    case ERT_TRY:
724	      {
725		/* TRY regions are reachable if any of its CATCH regions
726		   are reachable.  */
727		struct eh_region *c;
728		for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
729		  if (reachable[c->region_number])
730		    {
731		      kill_it = false;
732		      break;
733		    }
734		break;
735	      }
736
737	    default:
738	      break;
739	    }
740
741	  if (kill_it)
742	    remove_eh_handler (r);
743	}
744    }
745
746  free (reachable);
747  free (uid_region_num);
748}
749
750/* Set up EH labels for RTL.  */
751
752void
753convert_from_eh_region_ranges (void)
754{
755  rtx insns = get_insns ();
756  int i, n = cfun->eh->last_region_number;
757
758  /* Most of the work is already done at the tree level.  All we need to
759     do is collect the rtl labels that correspond to the tree labels that
760     collect the rtl labels that correspond to the tree labels
761     we allocated earlier.  */
762  for (i = 1; i <= n; ++i)
763    {
764      struct eh_region *region;
765
766      region = VEC_index (eh_region, cfun->eh->region_array, i);
767      if (region && region->tree_label)
768	region->label = DECL_RTL_IF_SET (region->tree_label);
769    }
770
771  remove_unreachable_regions (insns);
772}
773
774static void
775add_ehl_entry (rtx label, struct eh_region *region)
776{
777  struct ehl_map_entry **slot, *entry;
778
779  LABEL_PRESERVE_P (label) = 1;
780
781  entry = ggc_alloc (sizeof (*entry));
782  entry->label = label;
783  entry->region = region;
784
785  slot = (struct ehl_map_entry **)
786    htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
787
788  /* Before landing pad creation, each exception handler has its own
789     label.  After landing pad creation, the exception handlers may
790     share landing pads.  This is ok, since maybe_remove_eh_handler
791     only requires the 1-1 mapping before landing pad creation.  */
792  gcc_assert (!*slot || cfun->eh->built_landing_pads);
793
794  *slot = entry;
795}
796
797void
798find_exception_handler_labels (void)
799{
800  int i;
801
802  if (cfun->eh->exception_handler_label_map)
803    htab_empty (cfun->eh->exception_handler_label_map);
804  else
805    {
806      /* ??? The expansion factor here (3/2) must be greater than the htab
807	 occupancy factor (4/3) to avoid unnecessary resizing.  */
808      cfun->eh->exception_handler_label_map
809        = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
810			   ehl_hash, ehl_eq, NULL);
811    }
812
813  if (cfun->eh->region_tree == NULL)
814    return;
815
816  for (i = cfun->eh->last_region_number; i > 0; --i)
817    {
818      struct eh_region *region;
819      rtx lab;
820
821      region = VEC_index (eh_region, cfun->eh->region_array, i);
822      if (! region || region->region_number != i)
823	continue;
824      if (cfun->eh->built_landing_pads)
825	lab = region->landing_pad;
826      else
827	lab = region->label;
828
829      if (lab)
830	add_ehl_entry (lab, region);
831    }
832
833  /* For sjlj exceptions, need the return label to remain live until
834     after landing pad generation.  */
835  if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
836    add_ehl_entry (return_label, NULL);
837}
838
839/* Returns true if the current function has exception handling regions.  */
840
841bool
842current_function_has_exception_handlers (void)
843{
844  int i;
845
846  for (i = cfun->eh->last_region_number; i > 0; --i)
847    {
848      struct eh_region *region;
849
850      region = VEC_index (eh_region, cfun->eh->region_array, i);
851      if (region
852	  && region->region_number == i
853	  && region->type != ERT_THROW)
854	return true;
855    }
856
857  return false;
858}
859
860/* A subroutine of duplicate_eh_regions.  Search the region tree under O
861   for the minimum and maximum region numbers.  Update *MIN and *MAX.  */
862
863static void
864duplicate_eh_regions_0 (eh_region o, int *min, int *max)
865{
866  if (o->region_number < *min)
867    *min = o->region_number;
868  if (o->region_number > *max)
869    *max = o->region_number;
870
871  if (o->inner)
872    {
873      o = o->inner;
874      duplicate_eh_regions_0 (o, min, max);
875      while (o->next_peer)
876	{
877	  o = o->next_peer;
878	  duplicate_eh_regions_0 (o, min, max);
879	}
880    }
881}
882
883/* A subroutine of duplicate_eh_regions.  Copy the region tree under OLD.
884   Root it at OUTER, and apply EH_OFFSET to the region number.  Don't worry
885   about the other internal pointers just yet, just the tree-like pointers.  */
886
887static eh_region
888duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
889{
890  eh_region ret, n;
891
892  ret = n = ggc_alloc (sizeof (struct eh_region));
893
894  *n = *old;
895  n->outer = outer;
896  n->next_peer = NULL;
897  gcc_assert (!old->aka);
898
899  n->region_number += eh_offset;
900  VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
901
902  if (old->inner)
903    {
904      old = old->inner;
905      n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
906      while (old->next_peer)
907	{
908	  old = old->next_peer;
909	  n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
910	}
911    }
912
913  return ret;
914}
915
916/* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
917   function and root the tree below OUTER_REGION.  Remap labels using MAP
918   callback.  The special case of COPY_REGION of 0 means all regions.  */
919
920int
921duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
922		      void *data, int copy_region, int outer_region)
923{
924  eh_region cur, prev_try, outer, *splice;
925  int i, min_region, max_region, eh_offset, cfun_last_region_number;
926  int num_regions;
927
928  if (!ifun->eh->region_tree)
929    return 0;
930
931  /* Find the range of region numbers to be copied.  The interface we
932     provide here mandates a single offset to find new number from old,
933     which means we must look at the numbers present, instead of the
934     count or something else.  */
935  if (copy_region > 0)
936    {
937      min_region = INT_MAX;
938      max_region = 0;
939
940      cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
941      duplicate_eh_regions_0 (cur, &min_region, &max_region);
942    }
943  else
944    min_region = 1, max_region = ifun->eh->last_region_number;
945  num_regions = max_region - min_region + 1;
946  cfun_last_region_number = cfun->eh->last_region_number;
947  eh_offset = cfun_last_region_number + 1 - min_region;
948
949  /* If we've not yet created a region array, do so now.  */
950  VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
951		 cfun_last_region_number + 1 + num_regions);
952  cfun->eh->last_region_number = max_region + eh_offset;
953
954  /* We may have just allocated the array for the first time.
955     Make sure that element zero is null.  */
956  VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
957
958  /* Zero all entries in the range allocated.  */
959  memset (VEC_address (eh_region, cfun->eh->region_array)
960	  + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region));
961
962  /* Locate the spot at which to insert the new tree.  */
963  if (outer_region > 0)
964    {
965      outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
966      splice = &outer->inner;
967    }
968  else
969    {
970      outer = NULL;
971      splice = &cfun->eh->region_tree;
972    }
973  while (*splice)
974    splice = &(*splice)->next_peer;
975
976  /* Copy all the regions in the subtree.  */
977  if (copy_region > 0)
978    {
979      cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
980      *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
981    }
982  else
983    {
984      eh_region n;
985
986      cur = ifun->eh->region_tree;
987      *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
988      while (cur->next_peer)
989	{
990	  cur = cur->next_peer;
991	  n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
992	}
993    }
994
995  /* Remap all the labels in the new regions.  */
996  for (i = cfun_last_region_number + 1;
997       VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
998    if (cur && cur->tree_label)
999      cur->tree_label = map (cur->tree_label, data);
1000
1001  /* Search for the containing ERT_TRY region to fix up
1002     the prev_try short-cuts for ERT_CLEANUP regions.  */
1003  prev_try = NULL;
1004  if (outer_region > 0)
1005    for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1006         prev_try && prev_try->type != ERT_TRY;
1007	 prev_try = prev_try->outer)
1008      ;
1009
1010  /* Remap all of the internal catch and cleanup linkages.  Since we
1011     duplicate entire subtrees, all of the referenced regions will have
1012     been copied too.  And since we renumbered them as a block, a simple
1013     bit of arithmetic finds us the index for the replacement region.  */
1014  for (i = cfun_last_region_number + 1;
1015       VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1016    {
1017      if (cur == NULL)
1018	continue;
1019
1020#define REMAP(REG) \
1021	(REG) = VEC_index (eh_region, cfun->eh->region_array, \
1022			   (REG)->region_number + eh_offset)
1023
1024      switch (cur->type)
1025	{
1026	case ERT_TRY:
1027	  if (cur->u.try.catch)
1028	    REMAP (cur->u.try.catch);
1029	  if (cur->u.try.last_catch)
1030	    REMAP (cur->u.try.last_catch);
1031	  break;
1032
1033	case ERT_CATCH:
1034	  if (cur->u.catch.next_catch)
1035	    REMAP (cur->u.catch.next_catch);
1036	  if (cur->u.catch.prev_catch)
1037	    REMAP (cur->u.catch.prev_catch);
1038	  break;
1039
1040	case ERT_CLEANUP:
1041	  if (cur->u.cleanup.prev_try)
1042	    REMAP (cur->u.cleanup.prev_try);
1043	  else
1044	    cur->u.cleanup.prev_try = prev_try;
1045	  break;
1046
1047	default:
1048	  break;
1049	}
1050
1051#undef REMAP
1052    }
1053
1054  return eh_offset;
1055}
1056
1057/* Return true if REGION_A is outer to REGION_B in IFUN.  */
1058
1059bool
1060eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1061{
1062  struct eh_region *rp_a, *rp_b;
1063
1064  gcc_assert (ifun->eh->last_region_number > 0);
1065  gcc_assert (ifun->eh->region_tree);
1066
1067  rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1068  rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1069  gcc_assert (rp_a != NULL);
1070  gcc_assert (rp_b != NULL);
1071
1072  do
1073    {
1074      if (rp_a == rp_b)
1075	return true;
1076      rp_b = rp_b->outer;
1077    }
1078  while (rp_b);
1079
1080  return false;
1081}
1082
1083/* Return region number of region that is outer to both if REGION_A and
1084   REGION_B in IFUN.  */
1085
1086int
1087eh_region_outermost (struct function *ifun, int region_a, int region_b)
1088{
1089  struct eh_region *rp_a, *rp_b;
1090  sbitmap b_outer;
1091
1092  gcc_assert (ifun->eh->last_region_number > 0);
1093  gcc_assert (ifun->eh->region_tree);
1094
1095  rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1096  rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1097  gcc_assert (rp_a != NULL);
1098  gcc_assert (rp_b != NULL);
1099
1100  b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1101  sbitmap_zero (b_outer);
1102
1103  do
1104    {
1105      SET_BIT (b_outer, rp_b->region_number);
1106      rp_b = rp_b->outer;
1107    }
1108  while (rp_b);
1109
1110  do
1111    {
1112      if (TEST_BIT (b_outer, rp_a->region_number))
1113	{
1114	  sbitmap_free (b_outer);
1115	  return rp_a->region_number;
1116	}
1117      rp_a = rp_a->outer;
1118    }
1119  while (rp_a);
1120
1121  sbitmap_free (b_outer);
1122  return -1;
1123}
1124
1125static int
1126t2r_eq (const void *pentry, const void *pdata)
1127{
1128  tree entry = (tree) pentry;
1129  tree data = (tree) pdata;
1130
1131  return TREE_PURPOSE (entry) == data;
1132}
1133
1134static hashval_t
1135t2r_hash (const void *pentry)
1136{
1137  tree entry = (tree) pentry;
1138  return TREE_HASH (TREE_PURPOSE (entry));
1139}
1140
1141static void
1142add_type_for_runtime (tree type)
1143{
1144  tree *slot;
1145
1146  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1147					    TREE_HASH (type), INSERT);
1148  if (*slot == NULL)
1149    {
1150      tree runtime = (*lang_eh_runtime_type) (type);
1151      *slot = tree_cons (type, runtime, NULL_TREE);
1152    }
1153}
1154
1155static tree
1156lookup_type_for_runtime (tree type)
1157{
1158  tree *slot;
1159
1160  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1161					    TREE_HASH (type), NO_INSERT);
1162
1163  /* We should have always inserted the data earlier.  */
1164  return TREE_VALUE (*slot);
1165}
1166
1167
1168/* Represent an entry in @TTypes for either catch actions
1169   or exception filter actions.  */
1170struct ttypes_filter GTY(())
1171{
1172  tree t;
1173  int filter;
1174};
1175
1176/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1177   (a tree) for a @TTypes type node we are thinking about adding.  */
1178
1179static int
1180ttypes_filter_eq (const void *pentry, const void *pdata)
1181{
1182  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1183  tree data = (tree) pdata;
1184
1185  return entry->t == data;
1186}
1187
1188static hashval_t
1189ttypes_filter_hash (const void *pentry)
1190{
1191  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1192  return TREE_HASH (entry->t);
1193}
1194
1195/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1196   exception specification list we are thinking about adding.  */
1197/* ??? Currently we use the type lists in the order given.  Someone
1198   should put these in some canonical order.  */
1199
1200static int
1201ehspec_filter_eq (const void *pentry, const void *pdata)
1202{
1203  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1204  const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1205
1206  return type_list_equal (entry->t, data->t);
1207}
1208
1209/* Hash function for exception specification lists.  */
1210
1211static hashval_t
1212ehspec_filter_hash (const void *pentry)
1213{
1214  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1215  hashval_t h = 0;
1216  tree list;
1217
1218  for (list = entry->t; list ; list = TREE_CHAIN (list))
1219    h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1220  return h;
1221}
1222
1223/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1224   to speed up the search.  Return the filter value to be used.  */
1225
1226static int
1227add_ttypes_entry (htab_t ttypes_hash, tree type)
1228{
1229  struct ttypes_filter **slot, *n;
1230
1231  slot = (struct ttypes_filter **)
1232    htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1233
1234  if ((n = *slot) == NULL)
1235    {
1236      /* Filter value is a 1 based table index.  */
1237
1238      n = XNEW (struct ttypes_filter);
1239      n->t = type;
1240      n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1241      *slot = n;
1242
1243      VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1244    }
1245
1246  return n->filter;
1247}
1248
1249/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1250   to speed up the search.  Return the filter value to be used.  */
1251
1252static int
1253add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1254{
1255  struct ttypes_filter **slot, *n;
1256  struct ttypes_filter dummy;
1257
1258  dummy.t = list;
1259  slot = (struct ttypes_filter **)
1260    htab_find_slot (ehspec_hash, &dummy, INSERT);
1261
1262  if ((n = *slot) == NULL)
1263    {
1264      /* Filter value is a -1 based byte index into a uleb128 buffer.  */
1265
1266      n = XNEW (struct ttypes_filter);
1267      n->t = list;
1268      n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1269      *slot = n;
1270
1271      /* Generate a 0 terminated list of filter values.  */
1272      for (; list ; list = TREE_CHAIN (list))
1273	{
1274	  if (targetm.arm_eabi_unwinder)
1275	    VARRAY_PUSH_TREE (cfun->eh->ehspec_data, TREE_VALUE (list));
1276	  else
1277	    {
1278	      /* Look up each type in the list and encode its filter
1279		 value as a uleb128.  */
1280	      push_uleb128 (&cfun->eh->ehspec_data,
1281		  add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1282	    }
1283	}
1284      if (targetm.arm_eabi_unwinder)
1285	VARRAY_PUSH_TREE (cfun->eh->ehspec_data, NULL_TREE);
1286      else
1287	VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1288    }
1289
1290  return n->filter;
1291}
1292
1293/* Generate the action filter values to be used for CATCH and
1294   ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
1295   we use lots of landing pads, and so every type or list can share
1296   the same filter value, which saves table space.  */
1297
1298static void
1299assign_filter_values (void)
1300{
1301  int i;
1302  htab_t ttypes, ehspec;
1303
1304  cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1305  if (targetm.arm_eabi_unwinder)
1306    VARRAY_TREE_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1307  else
1308    VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1309
1310  ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1311  ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1312
1313  for (i = cfun->eh->last_region_number; i > 0; --i)
1314    {
1315      struct eh_region *r;
1316
1317      r = VEC_index (eh_region, cfun->eh->region_array, i);
1318
1319      /* Mind we don't process a region more than once.  */
1320      if (!r || r->region_number != i)
1321	continue;
1322
1323      switch (r->type)
1324	{
1325	case ERT_CATCH:
1326	  /* Whatever type_list is (NULL or true list), we build a list
1327	     of filters for the region.  */
1328	  r->u.catch.filter_list = NULL_TREE;
1329
1330	  if (r->u.catch.type_list != NULL)
1331	    {
1332	      /* Get a filter value for each of the types caught and store
1333		 them in the region's dedicated list.  */
1334	      tree tp_node = r->u.catch.type_list;
1335
1336	      for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1337		{
1338		  int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1339		  tree flt_node = build_int_cst (NULL_TREE, flt);
1340
1341		  r->u.catch.filter_list
1342		    = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1343		}
1344	    }
1345	  else
1346	    {
1347	      /* Get a filter value for the NULL list also since it will need
1348		 an action record anyway.  */
1349	      int flt = add_ttypes_entry (ttypes, NULL);
1350	      tree flt_node = build_int_cst (NULL_TREE, flt);
1351
1352	      r->u.catch.filter_list
1353		= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1354	    }
1355
1356	  break;
1357
1358	case ERT_ALLOWED_EXCEPTIONS:
1359	  r->u.allowed.filter
1360	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1361	  break;
1362
1363	default:
1364	  break;
1365	}
1366    }
1367
1368  htab_delete (ttypes);
1369  htab_delete (ehspec);
1370}
1371
1372/* Emit SEQ into basic block just before INSN (that is assumed to be
1373   first instruction of some existing BB and return the newly
1374   produced block.  */
1375static basic_block
1376emit_to_new_bb_before (rtx seq, rtx insn)
1377{
1378  rtx last;
1379  basic_block bb;
1380  edge e;
1381  edge_iterator ei;
1382
1383  /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1384     call), we don't want it to go into newly created landing pad or other EH
1385     construct.  */
1386  for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1387    if (e->flags & EDGE_FALLTHRU)
1388      force_nonfallthru (e);
1389    else
1390      ei_next (&ei);
1391  last = emit_insn_before (seq, insn);
1392  if (BARRIER_P (last))
1393    last = PREV_INSN (last);
1394  bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1395  update_bb_for_insn (bb);
1396  bb->flags |= BB_SUPERBLOCK;
1397  return bb;
1398}
1399
1400/* Generate the code to actually handle exceptions, which will follow the
1401   landing pads.  */
1402
1403static void
1404build_post_landing_pads (void)
1405{
1406  int i;
1407
1408  for (i = cfun->eh->last_region_number; i > 0; --i)
1409    {
1410      struct eh_region *region;
1411      rtx seq;
1412
1413      region = VEC_index (eh_region, cfun->eh->region_array, i);
1414      /* Mind we don't process a region more than once.  */
1415      if (!region || region->region_number != i)
1416	continue;
1417
1418      switch (region->type)
1419	{
1420	case ERT_TRY:
1421	  /* ??? Collect the set of all non-overlapping catch handlers
1422	       all the way up the chain until blocked by a cleanup.  */
1423	  /* ??? Outer try regions can share landing pads with inner
1424	     try regions if the types are completely non-overlapping,
1425	     and there are no intervening cleanups.  */
1426
1427	  region->post_landing_pad = gen_label_rtx ();
1428
1429	  start_sequence ();
1430
1431	  emit_label (region->post_landing_pad);
1432
1433	  /* ??? It is mighty inconvenient to call back into the
1434	     switch statement generation code in expand_end_case.
1435	     Rapid prototyping sez a sequence of ifs.  */
1436	  {
1437	    struct eh_region *c;
1438	    for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1439	      {
1440		if (c->u.catch.type_list == NULL)
1441		  emit_jump (c->label);
1442		else
1443		  {
1444		    /* Need for one cmp/jump per type caught. Each type
1445		       list entry has a matching entry in the filter list
1446		       (see assign_filter_values).  */
1447		    tree tp_node = c->u.catch.type_list;
1448		    tree flt_node = c->u.catch.filter_list;
1449
1450		    for (; tp_node; )
1451		      {
1452			emit_cmp_and_jump_insns
1453			  (cfun->eh->filter,
1454			   GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1455			   EQ, NULL_RTX,
1456			   targetm.eh_return_filter_mode (), 0, c->label);
1457
1458			tp_node = TREE_CHAIN (tp_node);
1459			flt_node = TREE_CHAIN (flt_node);
1460		      }
1461		  }
1462	      }
1463	  }
1464
1465	  /* We delay the generation of the _Unwind_Resume until we generate
1466	     landing pads.  We emit a marker here so as to get good control
1467	     flow data in the meantime.  */
1468	  region->resume
1469	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1470	  emit_barrier ();
1471
1472	  seq = get_insns ();
1473	  end_sequence ();
1474
1475	  emit_to_new_bb_before (seq, region->u.try.catch->label);
1476
1477	  break;
1478
1479	case ERT_ALLOWED_EXCEPTIONS:
1480	  region->post_landing_pad = gen_label_rtx ();
1481
1482	  start_sequence ();
1483
1484	  emit_label (region->post_landing_pad);
1485
1486	  emit_cmp_and_jump_insns (cfun->eh->filter,
1487				   GEN_INT (region->u.allowed.filter),
1488				   EQ, NULL_RTX,
1489				   targetm.eh_return_filter_mode (), 0, region->label);
1490
1491	  /* We delay the generation of the _Unwind_Resume until we generate
1492	     landing pads.  We emit a marker here so as to get good control
1493	     flow data in the meantime.  */
1494	  region->resume
1495	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1496	  emit_barrier ();
1497
1498	  seq = get_insns ();
1499	  end_sequence ();
1500
1501	  emit_to_new_bb_before (seq, region->label);
1502	  break;
1503
1504	case ERT_CLEANUP:
1505	case ERT_MUST_NOT_THROW:
1506	  region->post_landing_pad = region->label;
1507	  break;
1508
1509	case ERT_CATCH:
1510	case ERT_THROW:
1511	  /* Nothing to do.  */
1512	  break;
1513
1514	default:
1515	  gcc_unreachable ();
1516	}
1517    }
1518}
1519
1520/* Replace RESX patterns with jumps to the next handler if any, or calls to
1521   _Unwind_Resume otherwise.  */
1522
1523static void
1524connect_post_landing_pads (void)
1525{
1526  int i;
1527
1528  for (i = cfun->eh->last_region_number; i > 0; --i)
1529    {
1530      struct eh_region *region;
1531      struct eh_region *outer;
1532      rtx seq;
1533      rtx barrier;
1534
1535      region = VEC_index (eh_region, cfun->eh->region_array, i);
1536      /* Mind we don't process a region more than once.  */
1537      if (!region || region->region_number != i)
1538	continue;
1539
1540      /* If there is no RESX, or it has been deleted by flow, there's
1541	 nothing to fix up.  */
1542      if (! region->resume || INSN_DELETED_P (region->resume))
1543	continue;
1544
1545      /* Search for another landing pad in this function.  */
1546      for (outer = region->outer; outer ; outer = outer->outer)
1547	if (outer->post_landing_pad)
1548	  break;
1549
1550      start_sequence ();
1551
1552      if (outer)
1553	{
1554	  edge e;
1555	  basic_block src, dest;
1556
1557	  emit_jump (outer->post_landing_pad);
1558	  src = BLOCK_FOR_INSN (region->resume);
1559	  dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1560	  while (EDGE_COUNT (src->succs) > 0)
1561	    remove_edge (EDGE_SUCC (src, 0));
1562	  e = make_edge (src, dest, 0);
1563	  e->probability = REG_BR_PROB_BASE;
1564	  e->count = src->count;
1565	}
1566      else
1567	{
1568	  emit_library_call (unwind_resume_libfunc, LCT_THROW,
1569			     VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1570
1571	  /* What we just emitted was a throwing libcall, so it got a
1572	     barrier automatically added after it.  If the last insn in
1573	     the libcall sequence isn't the barrier, it's because the
1574	     target emits multiple insns for a call, and there are insns
1575	     after the actual call insn (which are redundant and would be
1576	     optimized away).  The barrier is inserted exactly after the
1577	     call insn, so let's go get that and delete the insns after
1578	     it, because below we need the barrier to be the last insn in
1579	     the sequence.  */
1580	  delete_insns_since (NEXT_INSN (last_call_insn ()));
1581	}
1582
1583      seq = get_insns ();
1584      end_sequence ();
1585      barrier = emit_insn_before (seq, region->resume);
1586      /* Avoid duplicate barrier.  */
1587      gcc_assert (BARRIER_P (barrier));
1588      delete_insn (barrier);
1589      delete_insn (region->resume);
1590
1591      /* ??? From tree-ssa we can wind up with catch regions whose
1592	 label is not instantiated, but whose resx is present.  Now
1593	 that we've dealt with the resx, kill the region.  */
1594      if (region->label == NULL && region->type == ERT_CLEANUP)
1595	remove_eh_handler (region);
1596    }
1597}
1598
1599
1600static void
1601dw2_build_landing_pads (void)
1602{
1603  int i;
1604
1605  for (i = cfun->eh->last_region_number; i > 0; --i)
1606    {
1607      struct eh_region *region;
1608      rtx seq;
1609      basic_block bb;
1610      edge e;
1611
1612      region = VEC_index (eh_region, cfun->eh->region_array, i);
1613      /* Mind we don't process a region more than once.  */
1614      if (!region || region->region_number != i)
1615	continue;
1616
1617      if (region->type != ERT_CLEANUP
1618	  && region->type != ERT_TRY
1619	  && region->type != ERT_ALLOWED_EXCEPTIONS)
1620	continue;
1621
1622      start_sequence ();
1623
1624      region->landing_pad = gen_label_rtx ();
1625      emit_label (region->landing_pad);
1626
1627#ifdef HAVE_exception_receiver
1628      if (HAVE_exception_receiver)
1629	emit_insn (gen_exception_receiver ());
1630      else
1631#endif
1632#ifdef HAVE_nonlocal_goto_receiver
1633	if (HAVE_nonlocal_goto_receiver)
1634	  emit_insn (gen_nonlocal_goto_receiver ());
1635	else
1636#endif
1637	  { /* Nothing */ }
1638
1639      emit_move_insn (cfun->eh->exc_ptr,
1640		      gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1641      emit_move_insn (cfun->eh->filter,
1642		      gen_rtx_REG (targetm.eh_return_filter_mode (),
1643				   EH_RETURN_DATA_REGNO (1)));
1644
1645      seq = get_insns ();
1646      end_sequence ();
1647
1648      bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1649      e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1650      e->count = bb->count;
1651      e->probability = REG_BR_PROB_BASE;
1652    }
1653}
1654
1655
1656struct sjlj_lp_info
1657{
1658  int directly_reachable;
1659  int action_index;
1660  int dispatch_index;
1661  int call_site_index;
1662};
1663
1664static bool
1665sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1666{
1667  rtx insn;
1668  bool found_one = false;
1669
1670  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1671    {
1672      struct eh_region *region;
1673      enum reachable_code rc;
1674      tree type_thrown;
1675      rtx note;
1676
1677      if (! INSN_P (insn))
1678	continue;
1679
1680      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1681      if (!note || INTVAL (XEXP (note, 0)) <= 0)
1682	continue;
1683
1684      region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1685
1686      type_thrown = NULL_TREE;
1687      if (region->type == ERT_THROW)
1688	{
1689	  type_thrown = region->u.throw.type;
1690	  region = region->outer;
1691	}
1692
1693      /* Find the first containing region that might handle the exception.
1694	 That's the landing pad to which we will transfer control.  */
1695      rc = RNL_NOT_CAUGHT;
1696      for (; region; region = region->outer)
1697	{
1698	  rc = reachable_next_level (region, type_thrown, NULL);
1699	  if (rc != RNL_NOT_CAUGHT)
1700	    break;
1701	}
1702      if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1703	{
1704	  lp_info[region->region_number].directly_reachable = 1;
1705	  found_one = true;
1706	}
1707    }
1708
1709  return found_one;
1710}
1711
1712static void
1713sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1714{
1715  htab_t ar_hash;
1716  int i, index;
1717
1718  /* First task: build the action table.  */
1719
1720  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1721  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1722
1723  for (i = cfun->eh->last_region_number; i > 0; --i)
1724    if (lp_info[i].directly_reachable)
1725      {
1726	struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1727
1728	r->landing_pad = dispatch_label;
1729	lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1730	if (lp_info[i].action_index != -1)
1731	  cfun->uses_eh_lsda = 1;
1732      }
1733
1734  htab_delete (ar_hash);
1735
1736  /* Next: assign dispatch values.  In dwarf2 terms, this would be the
1737     landing pad label for the region.  For sjlj though, there is one
1738     common landing pad from which we dispatch to the post-landing pads.
1739
1740     A region receives a dispatch index if it is directly reachable
1741     and requires in-function processing.  Regions that share post-landing
1742     pads may share dispatch indices.  */
1743  /* ??? Post-landing pad sharing doesn't actually happen at the moment
1744     (see build_post_landing_pads) so we don't bother checking for it.  */
1745
1746  index = 0;
1747  for (i = cfun->eh->last_region_number; i > 0; --i)
1748    if (lp_info[i].directly_reachable)
1749      lp_info[i].dispatch_index = index++;
1750
1751  /* Finally: assign call-site values.  If dwarf2 terms, this would be
1752     the region number assigned by convert_to_eh_region_ranges, but
1753     handles no-action and must-not-throw differently.  */
1754
1755  call_site_base = 1;
1756  for (i = cfun->eh->last_region_number; i > 0; --i)
1757    if (lp_info[i].directly_reachable)
1758      {
1759	int action = lp_info[i].action_index;
1760
1761	/* Map must-not-throw to otherwise unused call-site index 0.  */
1762	if (action == -2)
1763	  index = 0;
1764	/* Map no-action to otherwise unused call-site index -1.  */
1765	else if (action == -1)
1766	  index = -1;
1767	/* Otherwise, look it up in the table.  */
1768	else
1769	  index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1770
1771	lp_info[i].call_site_index = index;
1772      }
1773}
1774
1775static void
1776sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1777{
1778  int last_call_site = -2;
1779  rtx insn, mem;
1780
1781  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1782    {
1783      struct eh_region *region;
1784      int this_call_site;
1785      rtx note, before, p;
1786
1787      /* Reset value tracking at extended basic block boundaries.  */
1788      if (LABEL_P (insn))
1789	last_call_site = -2;
1790
1791      if (! INSN_P (insn))
1792	continue;
1793
1794      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1795      if (!note)
1796	{
1797	  /* Calls (and trapping insns) without notes are outside any
1798	     exception handling region in this function.  Mark them as
1799	     no action.  */
1800	  if (CALL_P (insn)
1801	      || (flag_non_call_exceptions
1802		  && may_trap_p (PATTERN (insn))))
1803	    this_call_site = -1;
1804	  else
1805	    continue;
1806	}
1807      else
1808	{
1809	  /* Calls that are known to not throw need not be marked.  */
1810	  if (INTVAL (XEXP (note, 0)) <= 0)
1811	    continue;
1812
1813	  region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1814	  this_call_site = lp_info[region->region_number].call_site_index;
1815	}
1816
1817      if (this_call_site == last_call_site)
1818	continue;
1819
1820      /* Don't separate a call from it's argument loads.  */
1821      before = insn;
1822      if (CALL_P (insn))
1823	before = find_first_parameter_load (insn, NULL_RTX);
1824
1825      start_sequence ();
1826      mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1827			    sjlj_fc_call_site_ofs);
1828      emit_move_insn (mem, GEN_INT (this_call_site));
1829      p = get_insns ();
1830      end_sequence ();
1831
1832      emit_insn_before (p, before);
1833      last_call_site = this_call_site;
1834    }
1835}
1836
1837/* Construct the SjLj_Function_Context.  */
1838
1839static void
1840sjlj_emit_function_enter (rtx dispatch_label)
1841{
1842  rtx fn_begin, fc, mem, seq;
1843  bool fn_begin_outside_block;
1844
1845  fc = cfun->eh->sjlj_fc;
1846
1847  start_sequence ();
1848
1849  /* We're storing this libcall's address into memory instead of
1850     calling it directly.  Thus, we must call assemble_external_libcall
1851     here, as we can not depend on emit_library_call to do it for us.  */
1852  assemble_external_libcall (eh_personality_libfunc);
1853  mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1854  emit_move_insn (mem, eh_personality_libfunc);
1855
1856  mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1857  if (cfun->uses_eh_lsda)
1858    {
1859      char buf[20];
1860      rtx sym;
1861
1862      ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1863      sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1864      SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1865      emit_move_insn (mem, sym);
1866    }
1867  else
1868    emit_move_insn (mem, const0_rtx);
1869
1870#ifdef DONT_USE_BUILTIN_SETJMP
1871  {
1872    rtx x, note;
1873    x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1874				 TYPE_MODE (integer_type_node), 1,
1875				 plus_constant (XEXP (fc, 0),
1876						sjlj_fc_jbuf_ofs), Pmode);
1877
1878    note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1879    NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1880
1881    emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1882			     TYPE_MODE (integer_type_node), 0, dispatch_label);
1883  }
1884#else
1885  expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1886			       dispatch_label);
1887#endif
1888
1889  emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1890		     1, XEXP (fc, 0), Pmode);
1891
1892  seq = get_insns ();
1893  end_sequence ();
1894
1895  /* ??? Instead of doing this at the beginning of the function,
1896     do this in a block that is at loop level 0 and dominates all
1897     can_throw_internal instructions.  */
1898
1899  fn_begin_outside_block = true;
1900  for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1901    if (NOTE_P (fn_begin))
1902      {
1903	if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1904	  break;
1905	else if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK)
1906	  fn_begin_outside_block = false;
1907      }
1908
1909  if (fn_begin_outside_block)
1910    insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1911  else
1912    emit_insn_after (seq, fn_begin);
1913}
1914
1915/* Call back from expand_function_end to know where we should put
1916   the call to unwind_sjlj_unregister_libfunc if needed.  */
1917
1918void
1919sjlj_emit_function_exit_after (rtx after)
1920{
1921  cfun->eh->sjlj_exit_after = after;
1922}
1923
1924static void
1925sjlj_emit_function_exit (void)
1926{
1927  rtx seq;
1928  edge e;
1929  edge_iterator ei;
1930
1931  start_sequence ();
1932
1933  emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1934		     1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1935
1936  seq = get_insns ();
1937  end_sequence ();
1938
1939  /* ??? Really this can be done in any block at loop level 0 that
1940     post-dominates all can_throw_internal instructions.  This is
1941     the last possible moment.  */
1942
1943  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1944    if (e->flags & EDGE_FALLTHRU)
1945      break;
1946  if (e)
1947    {
1948      rtx insn;
1949
1950      /* Figure out whether the place we are supposed to insert libcall
1951         is inside the last basic block or after it.  In the other case
1952         we need to emit to edge.  */
1953      gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1954      for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1955	{
1956	  if (insn == cfun->eh->sjlj_exit_after)
1957	    {
1958	      if (LABEL_P (insn))
1959		insn = NEXT_INSN (insn);
1960	      emit_insn_after (seq, insn);
1961	      return;
1962	    }
1963	  if (insn == BB_END (e->src))
1964	    break;
1965	}
1966      insert_insn_on_edge (seq, e);
1967    }
1968}
1969
1970static void
1971sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1972{
1973  int i, first_reachable;
1974  rtx mem, dispatch, seq, fc;
1975  rtx before;
1976  basic_block bb;
1977  edge e;
1978
1979  fc = cfun->eh->sjlj_fc;
1980
1981  start_sequence ();
1982
1983  emit_label (dispatch_label);
1984
1985#ifndef DONT_USE_BUILTIN_SETJMP
1986  expand_builtin_setjmp_receiver (dispatch_label);
1987#endif
1988
1989  /* Load up dispatch index, exc_ptr and filter values from the
1990     function context.  */
1991  mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1992			sjlj_fc_call_site_ofs);
1993  dispatch = copy_to_reg (mem);
1994
1995  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
1996  if (word_mode != ptr_mode)
1997    {
1998#ifdef POINTERS_EXTEND_UNSIGNED
1999      mem = convert_memory_address (ptr_mode, mem);
2000#else
2001      mem = convert_to_mode (ptr_mode, mem, 0);
2002#endif
2003    }
2004  emit_move_insn (cfun->eh->exc_ptr, mem);
2005
2006  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2007  emit_move_insn (cfun->eh->filter, mem);
2008
2009  /* Jump to one of the directly reachable regions.  */
2010  /* ??? This really ought to be using a switch statement.  */
2011
2012  first_reachable = 0;
2013  for (i = cfun->eh->last_region_number; i > 0; --i)
2014    {
2015      if (! lp_info[i].directly_reachable)
2016	continue;
2017
2018      if (! first_reachable)
2019	{
2020	  first_reachable = i;
2021	  continue;
2022	}
2023
2024      emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2025			       EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2026	                       ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2027				->post_landing_pad);
2028    }
2029
2030  seq = get_insns ();
2031  end_sequence ();
2032
2033  before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2034	    ->post_landing_pad);
2035
2036  bb = emit_to_new_bb_before (seq, before);
2037  e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2038  e->count = bb->count;
2039  e->probability = REG_BR_PROB_BASE;
2040}
2041
2042static void
2043sjlj_build_landing_pads (void)
2044{
2045  struct sjlj_lp_info *lp_info;
2046
2047  lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2048
2049  if (sjlj_find_directly_reachable_regions (lp_info))
2050    {
2051      rtx dispatch_label = gen_label_rtx ();
2052
2053      cfun->eh->sjlj_fc
2054	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2055			      int_size_in_bytes (sjlj_fc_type_node),
2056			      TYPE_ALIGN (sjlj_fc_type_node));
2057
2058      sjlj_assign_call_site_values (dispatch_label, lp_info);
2059      sjlj_mark_call_sites (lp_info);
2060
2061      sjlj_emit_function_enter (dispatch_label);
2062      sjlj_emit_dispatch_table (dispatch_label, lp_info);
2063      sjlj_emit_function_exit ();
2064    }
2065
2066  free (lp_info);
2067}
2068
2069void
2070finish_eh_generation (void)
2071{
2072  basic_block bb;
2073
2074  /* Nothing to do if no regions created.  */
2075  if (cfun->eh->region_tree == NULL)
2076    return;
2077
2078  /* The object here is to provide find_basic_blocks with detailed
2079     information (via reachable_handlers) on how exception control
2080     flows within the function.  In this first pass, we can include
2081     type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2082     regions, and hope that it will be useful in deleting unreachable
2083     handlers.  Subsequently, we will generate landing pads which will
2084     connect many of the handlers, and then type information will not
2085     be effective.  Still, this is a win over previous implementations.  */
2086
2087  /* These registers are used by the landing pads.  Make sure they
2088     have been generated.  */
2089  get_exception_pointer (cfun);
2090  get_exception_filter (cfun);
2091
2092  /* Construct the landing pads.  */
2093
2094  assign_filter_values ();
2095  build_post_landing_pads ();
2096  connect_post_landing_pads ();
2097  if (USING_SJLJ_EXCEPTIONS)
2098    sjlj_build_landing_pads ();
2099  else
2100    dw2_build_landing_pads ();
2101
2102  cfun->eh->built_landing_pads = 1;
2103
2104  /* We've totally changed the CFG.  Start over.  */
2105  find_exception_handler_labels ();
2106  break_superblocks ();
2107  if (USING_SJLJ_EXCEPTIONS)
2108    commit_edge_insertions ();
2109  FOR_EACH_BB (bb)
2110    {
2111      edge e;
2112      edge_iterator ei;
2113      bool eh = false;
2114      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2115	{
2116	  if (e->flags & EDGE_EH)
2117	    {
2118	      remove_edge (e);
2119	      eh = true;
2120	    }
2121	  else
2122	    ei_next (&ei);
2123	}
2124      if (eh)
2125	rtl_make_eh_edge (NULL, bb, BB_END (bb));
2126    }
2127}
2128
2129static hashval_t
2130ehl_hash (const void *pentry)
2131{
2132  struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2133
2134  /* 2^32 * ((sqrt(5) - 1) / 2) */
2135  const hashval_t scaled_golden_ratio = 0x9e3779b9;
2136  return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2137}
2138
2139static int
2140ehl_eq (const void *pentry, const void *pdata)
2141{
2142  struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2143  struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2144
2145  return entry->label == data->label;
2146}
2147
2148/* This section handles removing dead code for flow.  */
2149
2150/* Remove LABEL from exception_handler_label_map.  */
2151
2152static void
2153remove_exception_handler_label (rtx label)
2154{
2155  struct ehl_map_entry **slot, tmp;
2156
2157  /* If exception_handler_label_map was not built yet,
2158     there is nothing to do.  */
2159  if (cfun->eh->exception_handler_label_map == NULL)
2160    return;
2161
2162  tmp.label = label;
2163  slot = (struct ehl_map_entry **)
2164    htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2165  gcc_assert (slot);
2166
2167  htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2168}
2169
2170/* Splice REGION from the region tree etc.  */
2171
2172static void
2173remove_eh_handler (struct eh_region *region)
2174{
2175  struct eh_region **pp, **pp_start, *p, *outer, *inner;
2176  rtx lab;
2177
2178  /* For the benefit of efficiently handling REG_EH_REGION notes,
2179     replace this region in the region array with its containing
2180     region.  Note that previous region deletions may result in
2181     multiple copies of this region in the array, so we have a
2182     list of alternate numbers by which we are known.  */
2183
2184  outer = region->outer;
2185  VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2186  if (region->aka)
2187    {
2188      unsigned i;
2189      bitmap_iterator bi;
2190
2191      EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2192	{
2193          VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2194	}
2195    }
2196
2197  if (outer)
2198    {
2199      if (!outer->aka)
2200        outer->aka = BITMAP_GGC_ALLOC ();
2201      if (region->aka)
2202	bitmap_ior_into (outer->aka, region->aka);
2203      bitmap_set_bit (outer->aka, region->region_number);
2204    }
2205
2206  if (cfun->eh->built_landing_pads)
2207    lab = region->landing_pad;
2208  else
2209    lab = region->label;
2210  if (lab)
2211    remove_exception_handler_label (lab);
2212
2213  if (outer)
2214    pp_start = &outer->inner;
2215  else
2216    pp_start = &cfun->eh->region_tree;
2217  for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2218    continue;
2219  *pp = region->next_peer;
2220
2221  inner = region->inner;
2222  if (inner)
2223    {
2224      for (p = inner; p->next_peer ; p = p->next_peer)
2225	p->outer = outer;
2226      p->outer = outer;
2227
2228      p->next_peer = *pp_start;
2229      *pp_start = inner;
2230    }
2231
2232  if (region->type == ERT_CATCH)
2233    {
2234      struct eh_region *try, *next, *prev;
2235
2236      for (try = region->next_peer;
2237	   try->type == ERT_CATCH;
2238	   try = try->next_peer)
2239	continue;
2240      gcc_assert (try->type == ERT_TRY);
2241
2242      next = region->u.catch.next_catch;
2243      prev = region->u.catch.prev_catch;
2244
2245      if (next)
2246	next->u.catch.prev_catch = prev;
2247      else
2248	try->u.try.last_catch = prev;
2249      if (prev)
2250	prev->u.catch.next_catch = next;
2251      else
2252	{
2253	  try->u.try.catch = next;
2254	  if (! next)
2255	    remove_eh_handler (try);
2256	}
2257    }
2258}
2259
2260/* LABEL heads a basic block that is about to be deleted.  If this
2261   label corresponds to an exception region, we may be able to
2262   delete the region.  */
2263
2264void
2265maybe_remove_eh_handler (rtx label)
2266{
2267  struct ehl_map_entry **slot, tmp;
2268  struct eh_region *region;
2269
2270  /* ??? After generating landing pads, it's not so simple to determine
2271     if the region data is completely unused.  One must examine the
2272     landing pad and the post landing pad, and whether an inner try block
2273     is referencing the catch handlers directly.  */
2274  if (cfun->eh->built_landing_pads)
2275    return;
2276
2277  tmp.label = label;
2278  slot = (struct ehl_map_entry **)
2279    htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2280  if (! slot)
2281    return;
2282  region = (*slot)->region;
2283  if (! region)
2284    return;
2285
2286  /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2287     because there is no path to the fallback call to terminate.
2288     But the region continues to affect call-site data until there
2289     are no more contained calls, which we don't see here.  */
2290  if (region->type == ERT_MUST_NOT_THROW)
2291    {
2292      htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2293      region->label = NULL_RTX;
2294    }
2295  else
2296    remove_eh_handler (region);
2297}
2298
2299/* Invokes CALLBACK for every exception handler label.  Only used by old
2300   loop hackery; should not be used by new code.  */
2301
2302void
2303for_each_eh_label (void (*callback) (rtx))
2304{
2305  htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2306		 (void *) &callback);
2307}
2308
2309static int
2310for_each_eh_label_1 (void **pentry, void *data)
2311{
2312  struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2313  void (*callback) (rtx) = *(void (**) (rtx)) data;
2314
2315  (*callback) (entry->label);
2316  return 1;
2317}
2318
2319/* Invoke CALLBACK for every exception region in the current function.  */
2320
2321void
2322for_each_eh_region (void (*callback) (struct eh_region *))
2323{
2324  int i, n = cfun->eh->last_region_number;
2325  for (i = 1; i <= n; ++i)
2326    {
2327      struct eh_region *region;
2328
2329      region = VEC_index (eh_region, cfun->eh->region_array, i);
2330      if (region)
2331	(*callback) (region);
2332    }
2333}
2334
2335/* This section describes CFG exception edges for flow.  */
2336
2337/* For communicating between calls to reachable_next_level.  */
2338struct reachable_info
2339{
2340  tree types_caught;
2341  tree types_allowed;
2342  void (*callback) (struct eh_region *, void *);
2343  void *callback_data;
2344  bool saw_any_handlers;
2345};
2346
2347/* A subroutine of reachable_next_level.  Return true if TYPE, or a
2348   base class of TYPE, is in HANDLED.  */
2349
2350static int
2351check_handled (tree handled, tree type)
2352{
2353  tree t;
2354
2355  /* We can check for exact matches without front-end help.  */
2356  if (! lang_eh_type_covers)
2357    {
2358      for (t = handled; t ; t = TREE_CHAIN (t))
2359	if (TREE_VALUE (t) == type)
2360	  return 1;
2361    }
2362  else
2363    {
2364      for (t = handled; t ; t = TREE_CHAIN (t))
2365	if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2366	  return 1;
2367    }
2368
2369  return 0;
2370}
2371
2372/* A subroutine of reachable_next_level.  If we are collecting a list
2373   of handlers, add one.  After landing pad generation, reference
2374   it instead of the handlers themselves.  Further, the handlers are
2375   all wired together, so by referencing one, we've got them all.
2376   Before landing pad generation we reference each handler individually.
2377
2378   LP_REGION contains the landing pad; REGION is the handler.  */
2379
2380static void
2381add_reachable_handler (struct reachable_info *info,
2382		       struct eh_region *lp_region, struct eh_region *region)
2383{
2384  if (! info)
2385    return;
2386
2387  info->saw_any_handlers = true;
2388
2389  if (cfun->eh->built_landing_pads)
2390    info->callback (lp_region, info->callback_data);
2391  else
2392    info->callback (region, info->callback_data);
2393}
2394
2395/* Process one level of exception regions for reachability.
2396   If TYPE_THROWN is non-null, then it is the *exact* type being
2397   propagated.  If INFO is non-null, then collect handler labels
2398   and caught/allowed type information between invocations.  */
2399
2400static enum reachable_code
2401reachable_next_level (struct eh_region *region, tree type_thrown,
2402		      struct reachable_info *info)
2403{
2404  switch (region->type)
2405    {
2406    case ERT_CLEANUP:
2407      /* Before landing-pad generation, we model control flow
2408	 directly to the individual handlers.  In this way we can
2409	 see that catch handler types may shadow one another.  */
2410      add_reachable_handler (info, region, region);
2411      return RNL_MAYBE_CAUGHT;
2412
2413    case ERT_TRY:
2414      {
2415	struct eh_region *c;
2416	enum reachable_code ret = RNL_NOT_CAUGHT;
2417
2418	for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2419	  {
2420	    /* A catch-all handler ends the search.  */
2421	    if (c->u.catch.type_list == NULL)
2422	      {
2423		add_reachable_handler (info, region, c);
2424		return RNL_CAUGHT;
2425	      }
2426
2427	    if (type_thrown)
2428	      {
2429		/* If we have at least one type match, end the search.  */
2430		tree tp_node = c->u.catch.type_list;
2431
2432		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2433		  {
2434		    tree type = TREE_VALUE (tp_node);
2435
2436		    if (type == type_thrown
2437			|| (lang_eh_type_covers
2438			    && (*lang_eh_type_covers) (type, type_thrown)))
2439		      {
2440			add_reachable_handler (info, region, c);
2441			return RNL_CAUGHT;
2442		      }
2443		  }
2444
2445		/* If we have definitive information of a match failure,
2446		   the catch won't trigger.  */
2447		if (lang_eh_type_covers)
2448		  return RNL_NOT_CAUGHT;
2449	      }
2450
2451	    /* At this point, we either don't know what type is thrown or
2452	       don't have front-end assistance to help deciding if it is
2453	       covered by one of the types in the list for this region.
2454
2455	       We'd then like to add this region to the list of reachable
2456	       handlers since it is indeed potentially reachable based on the
2457	       information we have.
2458
2459	       Actually, this handler is for sure not reachable if all the
2460	       types it matches have already been caught. That is, it is only
2461	       potentially reachable if at least one of the types it catches
2462	       has not been previously caught.  */
2463
2464	    if (! info)
2465	      ret = RNL_MAYBE_CAUGHT;
2466	    else
2467	      {
2468		tree tp_node = c->u.catch.type_list;
2469		bool maybe_reachable = false;
2470
2471		/* Compute the potential reachability of this handler and
2472		   update the list of types caught at the same time.  */
2473		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2474		  {
2475		    tree type = TREE_VALUE (tp_node);
2476
2477		    if (! check_handled (info->types_caught, type))
2478		      {
2479			info->types_caught
2480			  = tree_cons (NULL, type, info->types_caught);
2481
2482			maybe_reachable = true;
2483		      }
2484		  }
2485
2486		if (maybe_reachable)
2487		  {
2488		    add_reachable_handler (info, region, c);
2489
2490		    /* ??? If the catch type is a base class of every allowed
2491		       type, then we know we can stop the search.  */
2492		    ret = RNL_MAYBE_CAUGHT;
2493		  }
2494	      }
2495	  }
2496
2497	return ret;
2498      }
2499
2500    case ERT_ALLOWED_EXCEPTIONS:
2501      /* An empty list of types definitely ends the search.  */
2502      if (region->u.allowed.type_list == NULL_TREE)
2503	{
2504	  add_reachable_handler (info, region, region);
2505	  return RNL_CAUGHT;
2506	}
2507
2508      /* Collect a list of lists of allowed types for use in detecting
2509	 when a catch may be transformed into a catch-all.  */
2510      if (info)
2511	info->types_allowed = tree_cons (NULL_TREE,
2512					 region->u.allowed.type_list,
2513					 info->types_allowed);
2514
2515      /* If we have definitive information about the type hierarchy,
2516	 then we can tell if the thrown type will pass through the
2517	 filter.  */
2518      if (type_thrown && lang_eh_type_covers)
2519	{
2520	  if (check_handled (region->u.allowed.type_list, type_thrown))
2521	    return RNL_NOT_CAUGHT;
2522	  else
2523	    {
2524	      add_reachable_handler (info, region, region);
2525	      return RNL_CAUGHT;
2526	    }
2527	}
2528
2529      add_reachable_handler (info, region, region);
2530      return RNL_MAYBE_CAUGHT;
2531
2532    case ERT_CATCH:
2533      /* Catch regions are handled by their controlling try region.  */
2534      return RNL_NOT_CAUGHT;
2535
2536    case ERT_MUST_NOT_THROW:
2537      /* Here we end our search, since no exceptions may propagate.
2538	 If we've touched down at some landing pad previous, then the
2539	 explicit function call we generated may be used.  Otherwise
2540	 the call is made by the runtime.
2541
2542         Before inlining, do not perform this optimization.  We may
2543	 inline a subroutine that contains handlers, and that will
2544	 change the value of saw_any_handlers.  */
2545
2546      if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2547	{
2548	  add_reachable_handler (info, region, region);
2549	  return RNL_CAUGHT;
2550	}
2551      else
2552	return RNL_BLOCKED;
2553
2554    case ERT_THROW:
2555    case ERT_UNKNOWN:
2556      /* Shouldn't see these here.  */
2557      gcc_unreachable ();
2558      break;
2559    default:
2560      gcc_unreachable ();
2561    }
2562}
2563
2564/* Invoke CALLBACK on each region reachable from REGION_NUMBER.  */
2565
2566void
2567foreach_reachable_handler (int region_number, bool is_resx,
2568			   void (*callback) (struct eh_region *, void *),
2569			   void *callback_data)
2570{
2571  struct reachable_info info;
2572  struct eh_region *region;
2573  tree type_thrown;
2574
2575  memset (&info, 0, sizeof (info));
2576  info.callback = callback;
2577  info.callback_data = callback_data;
2578
2579  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2580
2581  type_thrown = NULL_TREE;
2582  if (is_resx)
2583    {
2584      /* A RESX leaves a region instead of entering it.  Thus the
2585	 region itself may have been deleted out from under us.  */
2586      if (region == NULL)
2587	return;
2588      region = region->outer;
2589    }
2590  else if (region->type == ERT_THROW)
2591    {
2592      type_thrown = region->u.throw.type;
2593      region = region->outer;
2594    }
2595
2596  while (region)
2597    {
2598      if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2599	break;
2600      /* If we have processed one cleanup, there is no point in
2601	 processing any more of them.  Each cleanup will have an edge
2602	 to the next outer cleanup region, so the flow graph will be
2603	 accurate.  */
2604      if (region->type == ERT_CLEANUP)
2605	region = region->u.cleanup.prev_try;
2606      else
2607	region = region->outer;
2608    }
2609}
2610
2611/* Retrieve a list of labels of exception handlers which can be
2612   reached by a given insn.  */
2613
2614static void
2615arh_to_landing_pad (struct eh_region *region, void *data)
2616{
2617  rtx *p_handlers = data;
2618  if (! *p_handlers)
2619    *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2620}
2621
2622static void
2623arh_to_label (struct eh_region *region, void *data)
2624{
2625  rtx *p_handlers = data;
2626  *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2627}
2628
2629rtx
2630reachable_handlers (rtx insn)
2631{
2632  bool is_resx = false;
2633  rtx handlers = NULL;
2634  int region_number;
2635
2636  if (JUMP_P (insn)
2637      && GET_CODE (PATTERN (insn)) == RESX)
2638    {
2639      region_number = XINT (PATTERN (insn), 0);
2640      is_resx = true;
2641    }
2642  else
2643    {
2644      rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2645      if (!note || INTVAL (XEXP (note, 0)) <= 0)
2646	return NULL;
2647      region_number = INTVAL (XEXP (note, 0));
2648    }
2649
2650  foreach_reachable_handler (region_number, is_resx,
2651			     (cfun->eh->built_landing_pads
2652			      ? arh_to_landing_pad
2653			      : arh_to_label),
2654			     &handlers);
2655
2656  return handlers;
2657}
2658
2659/* Determine if the given INSN can throw an exception that is caught
2660   within the function.  */
2661
2662bool
2663can_throw_internal_1 (int region_number, bool is_resx)
2664{
2665  struct eh_region *region;
2666  tree type_thrown;
2667
2668  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2669
2670  type_thrown = NULL_TREE;
2671  if (is_resx)
2672    region = region->outer;
2673  else if (region->type == ERT_THROW)
2674    {
2675      type_thrown = region->u.throw.type;
2676      region = region->outer;
2677    }
2678
2679  /* If this exception is ignored by each and every containing region,
2680     then control passes straight out.  The runtime may handle some
2681     regions, which also do not require processing internally.  */
2682  for (; region; region = region->outer)
2683    {
2684      enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2685      if (how == RNL_BLOCKED)
2686	return false;
2687      if (how != RNL_NOT_CAUGHT)
2688	return true;
2689    }
2690
2691  return false;
2692}
2693
2694bool
2695can_throw_internal (rtx insn)
2696{
2697  rtx note;
2698
2699  if (! INSN_P (insn))
2700    return false;
2701
2702  if (JUMP_P (insn)
2703      && GET_CODE (PATTERN (insn)) == RESX
2704      && XINT (PATTERN (insn), 0) > 0)
2705    return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2706
2707  if (NONJUMP_INSN_P (insn)
2708      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2709    insn = XVECEXP (PATTERN (insn), 0, 0);
2710
2711  /* Every insn that might throw has an EH_REGION note.  */
2712  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2713  if (!note || INTVAL (XEXP (note, 0)) <= 0)
2714    return false;
2715
2716  return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2717}
2718
2719/* Determine if the given INSN can throw an exception that is
2720   visible outside the function.  */
2721
2722bool
2723can_throw_external_1 (int region_number, bool is_resx)
2724{
2725  struct eh_region *region;
2726  tree type_thrown;
2727
2728  region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2729
2730  type_thrown = NULL_TREE;
2731  if (is_resx)
2732    region = region->outer;
2733  else if (region->type == ERT_THROW)
2734    {
2735      type_thrown = region->u.throw.type;
2736      region = region->outer;
2737    }
2738
2739  /* If the exception is caught or blocked by any containing region,
2740     then it is not seen by any calling function.  */
2741  for (; region ; region = region->outer)
2742    if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2743      return false;
2744
2745  return true;
2746}
2747
2748bool
2749can_throw_external (rtx insn)
2750{
2751  rtx note;
2752
2753  if (! INSN_P (insn))
2754    return false;
2755
2756  if (JUMP_P (insn)
2757      && GET_CODE (PATTERN (insn)) == RESX
2758      && XINT (PATTERN (insn), 0) > 0)
2759    return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2760
2761  if (NONJUMP_INSN_P (insn)
2762      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2763    insn = XVECEXP (PATTERN (insn), 0, 0);
2764
2765  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2766  if (!note)
2767    {
2768      /* Calls (and trapping insns) without notes are outside any
2769	 exception handling region in this function.  We have to
2770	 assume it might throw.  Given that the front end and middle
2771	 ends mark known NOTHROW functions, this isn't so wildly
2772	 inaccurate.  */
2773      return (CALL_P (insn)
2774	      || (flag_non_call_exceptions
2775		  && may_trap_p (PATTERN (insn))));
2776    }
2777  if (INTVAL (XEXP (note, 0)) <= 0)
2778    return false;
2779
2780  return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2781}
2782
2783/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls.  */
2784
2785unsigned int
2786set_nothrow_function_flags (void)
2787{
2788  rtx insn;
2789
2790  /* If we don't know that this implementation of the function will
2791     actually be used, then we must not set TREE_NOTHROW, since
2792     callers must not assume that this function does not throw.  */
2793  if (DECL_REPLACEABLE_P (current_function_decl))
2794    return 0;
2795
2796  TREE_NOTHROW (current_function_decl) = 1;
2797
2798  /* Assume cfun->all_throwers_are_sibcalls until we encounter
2799     something that can throw an exception.  We specifically exempt
2800     CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2801     and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
2802     is optimistic.  */
2803
2804  cfun->all_throwers_are_sibcalls = 1;
2805
2806  if (! flag_exceptions)
2807    return 0;
2808
2809  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2810    if (can_throw_external (insn))
2811      {
2812        TREE_NOTHROW (current_function_decl) = 0;
2813
2814	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2815	  {
2816	    cfun->all_throwers_are_sibcalls = 0;
2817	    return 0;
2818	  }
2819      }
2820
2821  for (insn = current_function_epilogue_delay_list; insn;
2822       insn = XEXP (insn, 1))
2823    if (can_throw_external (insn))
2824      {
2825        TREE_NOTHROW (current_function_decl) = 0;
2826
2827	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2828	  {
2829	    cfun->all_throwers_are_sibcalls = 0;
2830	    return 0;
2831	  }
2832      }
2833  return 0;
2834}
2835
2836struct tree_opt_pass pass_set_nothrow_function_flags =
2837{
2838  NULL,                                 /* name */
2839  NULL,                                 /* gate */
2840  set_nothrow_function_flags,           /* execute */
2841  NULL,                                 /* sub */
2842  NULL,                                 /* next */
2843  0,                                    /* static_pass_number */
2844  0,                                    /* tv_id */
2845  0,                                    /* properties_required */
2846  0,                                    /* properties_provided */
2847  0,                                    /* properties_destroyed */
2848  0,                                    /* todo_flags_start */
2849  0,                                    /* todo_flags_finish */
2850  0                                     /* letter */
2851};
2852
2853
2854/* Various hooks for unwind library.  */
2855
2856/* Do any necessary initialization to access arbitrary stack frames.
2857   On the SPARC, this means flushing the register windows.  */
2858
2859void
2860expand_builtin_unwind_init (void)
2861{
2862  /* Set this so all the registers get saved in our frame; we need to be
2863     able to copy the saved values for any registers from frames we unwind.  */
2864  current_function_has_nonlocal_label = 1;
2865
2866#ifdef SETUP_FRAME_ADDRESSES
2867  SETUP_FRAME_ADDRESSES ();
2868#endif
2869}
2870
2871rtx
2872expand_builtin_eh_return_data_regno (tree arglist)
2873{
2874  tree which = TREE_VALUE (arglist);
2875  unsigned HOST_WIDE_INT iwhich;
2876
2877  if (TREE_CODE (which) != INTEGER_CST)
2878    {
2879      error ("argument of %<__builtin_eh_return_regno%> must be constant");
2880      return constm1_rtx;
2881    }
2882
2883  iwhich = tree_low_cst (which, 1);
2884  iwhich = EH_RETURN_DATA_REGNO (iwhich);
2885  if (iwhich == INVALID_REGNUM)
2886    return constm1_rtx;
2887
2888#ifdef DWARF_FRAME_REGNUM
2889  iwhich = DWARF_FRAME_REGNUM (iwhich);
2890#else
2891  iwhich = DBX_REGISTER_NUMBER (iwhich);
2892#endif
2893
2894  return GEN_INT (iwhich);
2895}
2896
2897/* Given a value extracted from the return address register or stack slot,
2898   return the actual address encoded in that value.  */
2899
2900rtx
2901expand_builtin_extract_return_addr (tree addr_tree)
2902{
2903  rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2904
2905  if (GET_MODE (addr) != Pmode
2906      && GET_MODE (addr) != VOIDmode)
2907    {
2908#ifdef POINTERS_EXTEND_UNSIGNED
2909      addr = convert_memory_address (Pmode, addr);
2910#else
2911      addr = convert_to_mode (Pmode, addr, 0);
2912#endif
2913    }
2914
2915  /* First mask out any unwanted bits.  */
2916#ifdef MASK_RETURN_ADDR
2917  expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2918#endif
2919
2920  /* Then adjust to find the real return address.  */
2921#if defined (RETURN_ADDR_OFFSET)
2922  addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2923#endif
2924
2925  return addr;
2926}
2927
2928/* Given an actual address in addr_tree, do any necessary encoding
2929   and return the value to be stored in the return address register or
2930   stack slot so the epilogue will return to that address.  */
2931
2932rtx
2933expand_builtin_frob_return_addr (tree addr_tree)
2934{
2935  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2936
2937  addr = convert_memory_address (Pmode, addr);
2938
2939#ifdef RETURN_ADDR_OFFSET
2940  addr = force_reg (Pmode, addr);
2941  addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2942#endif
2943
2944  return addr;
2945}
2946
2947/* Set up the epilogue with the magic bits we'll need to return to the
2948   exception handler.  */
2949
2950void
2951expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2952			  tree handler_tree)
2953{
2954  rtx tmp;
2955
2956#ifdef EH_RETURN_STACKADJ_RTX
2957  tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2958  tmp = convert_memory_address (Pmode, tmp);
2959  if (!cfun->eh->ehr_stackadj)
2960    cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2961  else if (tmp != cfun->eh->ehr_stackadj)
2962    emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2963#endif
2964
2965  tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2966  tmp = convert_memory_address (Pmode, tmp);
2967  if (!cfun->eh->ehr_handler)
2968    cfun->eh->ehr_handler = copy_to_reg (tmp);
2969  else if (tmp != cfun->eh->ehr_handler)
2970    emit_move_insn (cfun->eh->ehr_handler, tmp);
2971
2972  if (!cfun->eh->ehr_label)
2973    cfun->eh->ehr_label = gen_label_rtx ();
2974  emit_jump (cfun->eh->ehr_label);
2975}
2976
2977void
2978expand_eh_return (void)
2979{
2980  rtx around_label;
2981
2982  if (! cfun->eh->ehr_label)
2983    return;
2984
2985  current_function_calls_eh_return = 1;
2986
2987#ifdef EH_RETURN_STACKADJ_RTX
2988  emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2989#endif
2990
2991  around_label = gen_label_rtx ();
2992  emit_jump (around_label);
2993
2994  emit_label (cfun->eh->ehr_label);
2995  clobber_return_register ();
2996
2997#ifdef EH_RETURN_STACKADJ_RTX
2998  emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2999#endif
3000
3001#ifdef HAVE_eh_return
3002  if (HAVE_eh_return)
3003    emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3004  else
3005#endif
3006    {
3007#ifdef EH_RETURN_HANDLER_RTX
3008      emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3009#else
3010      error ("__builtin_eh_return not supported on this target");
3011#endif
3012    }
3013
3014  emit_label (around_label);
3015}
3016
3017/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3018   POINTERS_EXTEND_UNSIGNED and return it.  */
3019
3020rtx
3021expand_builtin_extend_pointer (tree addr_tree)
3022{
3023  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3024  int extend;
3025
3026#ifdef POINTERS_EXTEND_UNSIGNED
3027  extend = POINTERS_EXTEND_UNSIGNED;
3028#else
3029  /* The previous EH code did an unsigned extend by default, so we do this also
3030     for consistency.  */
3031  extend = 1;
3032#endif
3033
3034  return convert_modes (word_mode, ptr_mode, addr, extend);
3035}
3036
3037/* In the following functions, we represent entries in the action table
3038   as 1-based indices.  Special cases are:
3039
3040	 0:	null action record, non-null landing pad; implies cleanups
3041	-1:	null action record, null landing pad; implies no action
3042	-2:	no call-site entry; implies must_not_throw
3043	-3:	we have yet to process outer regions
3044
3045   Further, no special cases apply to the "next" field of the record.
3046   For next, 0 means end of list.  */
3047
3048struct action_record
3049{
3050  int offset;
3051  int filter;
3052  int next;
3053};
3054
3055static int
3056action_record_eq (const void *pentry, const void *pdata)
3057{
3058  const struct action_record *entry = (const struct action_record *) pentry;
3059  const struct action_record *data = (const struct action_record *) pdata;
3060  return entry->filter == data->filter && entry->next == data->next;
3061}
3062
3063static hashval_t
3064action_record_hash (const void *pentry)
3065{
3066  const struct action_record *entry = (const struct action_record *) pentry;
3067  return entry->next * 1009 + entry->filter;
3068}
3069
3070static int
3071add_action_record (htab_t ar_hash, int filter, int next)
3072{
3073  struct action_record **slot, *new, tmp;
3074
3075  tmp.filter = filter;
3076  tmp.next = next;
3077  slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3078
3079  if ((new = *slot) == NULL)
3080    {
3081      new = xmalloc (sizeof (*new));
3082      new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3083      new->filter = filter;
3084      new->next = next;
3085      *slot = new;
3086
3087      /* The filter value goes in untouched.  The link to the next
3088	 record is a "self-relative" byte offset, or zero to indicate
3089	 that there is no next record.  So convert the absolute 1 based
3090	 indices we've been carrying around into a displacement.  */
3091
3092      push_sleb128 (&cfun->eh->action_record_data, filter);
3093      if (next)
3094	next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3095      push_sleb128 (&cfun->eh->action_record_data, next);
3096    }
3097
3098  return new->offset;
3099}
3100
3101static int
3102collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3103{
3104  struct eh_region *c;
3105  int next;
3106
3107  /* If we've reached the top of the region chain, then we have
3108     no actions, and require no landing pad.  */
3109  if (region == NULL)
3110    return -1;
3111
3112  switch (region->type)
3113    {
3114    case ERT_CLEANUP:
3115      /* A cleanup adds a zero filter to the beginning of the chain, but
3116	 there are special cases to look out for.  If there are *only*
3117	 cleanups along a path, then it compresses to a zero action.
3118	 Further, if there are multiple cleanups along a path, we only
3119	 need to represent one of them, as that is enough to trigger
3120	 entry to the landing pad at runtime.  */
3121      next = collect_one_action_chain (ar_hash, region->outer);
3122      if (next <= 0)
3123	return 0;
3124      for (c = region->outer; c ; c = c->outer)
3125	if (c->type == ERT_CLEANUP)
3126	  return next;
3127      return add_action_record (ar_hash, 0, next);
3128
3129    case ERT_TRY:
3130      /* Process the associated catch regions in reverse order.
3131	 If there's a catch-all handler, then we don't need to
3132	 search outer regions.  Use a magic -3 value to record
3133	 that we haven't done the outer search.  */
3134      next = -3;
3135      for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3136	{
3137	  if (c->u.catch.type_list == NULL)
3138	    {
3139	      /* Retrieve the filter from the head of the filter list
3140		 where we have stored it (see assign_filter_values).  */
3141	      int filter
3142		= TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3143
3144	      next = add_action_record (ar_hash, filter, 0);
3145	    }
3146	  else
3147	    {
3148	      /* Once the outer search is done, trigger an action record for
3149                 each filter we have.  */
3150	      tree flt_node;
3151
3152	      if (next == -3)
3153		{
3154		  next = collect_one_action_chain (ar_hash, region->outer);
3155
3156		  /* If there is no next action, terminate the chain.  */
3157		  if (next == -1)
3158		    next = 0;
3159		  /* If all outer actions are cleanups or must_not_throw,
3160		     we'll have no action record for it, since we had wanted
3161		     to encode these states in the call-site record directly.
3162		     Add a cleanup action to the chain to catch these.  */
3163		  else if (next <= 0)
3164		    next = add_action_record (ar_hash, 0, 0);
3165		}
3166
3167	      flt_node = c->u.catch.filter_list;
3168	      for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3169		{
3170		  int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3171		  next = add_action_record (ar_hash, filter, next);
3172		}
3173	    }
3174	}
3175      return next;
3176
3177    case ERT_ALLOWED_EXCEPTIONS:
3178      /* An exception specification adds its filter to the
3179	 beginning of the chain.  */
3180      next = collect_one_action_chain (ar_hash, region->outer);
3181
3182      /* If there is no next action, terminate the chain.  */
3183      if (next == -1)
3184	next = 0;
3185      /* If all outer actions are cleanups or must_not_throw,
3186	 we'll have no action record for it, since we had wanted
3187	 to encode these states in the call-site record directly.
3188	 Add a cleanup action to the chain to catch these.  */
3189      else if (next <= 0)
3190	next = add_action_record (ar_hash, 0, 0);
3191
3192      return add_action_record (ar_hash, region->u.allowed.filter, next);
3193
3194    case ERT_MUST_NOT_THROW:
3195      /* A must-not-throw region with no inner handlers or cleanups
3196	 requires no call-site entry.  Note that this differs from
3197	 the no handler or cleanup case in that we do require an lsda
3198	 to be generated.  Return a magic -2 value to record this.  */
3199      return -2;
3200
3201    case ERT_CATCH:
3202    case ERT_THROW:
3203      /* CATCH regions are handled in TRY above.  THROW regions are
3204	 for optimization information only and produce no output.  */
3205      return collect_one_action_chain (ar_hash, region->outer);
3206
3207    default:
3208      gcc_unreachable ();
3209    }
3210}
3211
3212static int
3213add_call_site (rtx landing_pad, int action)
3214{
3215  struct call_site_record *data = cfun->eh->call_site_data;
3216  int used = cfun->eh->call_site_data_used;
3217  int size = cfun->eh->call_site_data_size;
3218
3219  if (used >= size)
3220    {
3221      size = (size ? size * 2 : 64);
3222      data = ggc_realloc (data, sizeof (*data) * size);
3223      cfun->eh->call_site_data = data;
3224      cfun->eh->call_site_data_size = size;
3225    }
3226
3227  data[used].landing_pad = landing_pad;
3228  data[used].action = action;
3229
3230  cfun->eh->call_site_data_used = used + 1;
3231
3232  return used + call_site_base;
3233}
3234
3235/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3236   The new note numbers will not refer to region numbers, but
3237   instead to call site entries.  */
3238
3239unsigned int
3240convert_to_eh_region_ranges (void)
3241{
3242  rtx insn, iter, note;
3243  htab_t ar_hash;
3244  int last_action = -3;
3245  rtx last_action_insn = NULL_RTX;
3246  rtx last_landing_pad = NULL_RTX;
3247  rtx first_no_action_insn = NULL_RTX;
3248  int call_site = 0;
3249
3250  if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3251    return 0;
3252
3253  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3254
3255  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3256
3257  for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3258    if (INSN_P (iter))
3259      {
3260	struct eh_region *region;
3261	int this_action;
3262	rtx this_landing_pad;
3263
3264	insn = iter;
3265	if (NONJUMP_INSN_P (insn)
3266	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
3267	  insn = XVECEXP (PATTERN (insn), 0, 0);
3268
3269	note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3270	if (!note)
3271	  {
3272	    if (! (CALL_P (insn)
3273		   || (flag_non_call_exceptions
3274		       && may_trap_p (PATTERN (insn)))))
3275	      continue;
3276	    this_action = -1;
3277	    region = NULL;
3278	  }
3279	else
3280	  {
3281	    if (INTVAL (XEXP (note, 0)) <= 0)
3282	      continue;
3283	    region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3284	    this_action = collect_one_action_chain (ar_hash, region);
3285	  }
3286
3287	/* Existence of catch handlers, or must-not-throw regions
3288	   implies that an lsda is needed (even if empty).  */
3289	if (this_action != -1)
3290	  cfun->uses_eh_lsda = 1;
3291
3292	/* Delay creation of region notes for no-action regions
3293	   until we're sure that an lsda will be required.  */
3294	else if (last_action == -3)
3295	  {
3296	    first_no_action_insn = iter;
3297	    last_action = -1;
3298	  }
3299
3300	/* Cleanups and handlers may share action chains but not
3301	   landing pads.  Collect the landing pad for this region.  */
3302	if (this_action >= 0)
3303	  {
3304	    struct eh_region *o;
3305	    for (o = region; ! o->landing_pad ; o = o->outer)
3306	      continue;
3307	    this_landing_pad = o->landing_pad;
3308	  }
3309	else
3310	  this_landing_pad = NULL_RTX;
3311
3312	/* Differing actions or landing pads implies a change in call-site
3313	   info, which implies some EH_REGION note should be emitted.  */
3314	if (last_action != this_action
3315	    || last_landing_pad != this_landing_pad)
3316	  {
3317	    /* If we'd not seen a previous action (-3) or the previous
3318	       action was must-not-throw (-2), then we do not need an
3319	       end note.  */
3320	    if (last_action >= -1)
3321	      {
3322		/* If we delayed the creation of the begin, do it now.  */
3323		if (first_no_action_insn)
3324		  {
3325		    call_site = add_call_site (NULL_RTX, 0);
3326		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3327					     first_no_action_insn);
3328		    NOTE_EH_HANDLER (note) = call_site;
3329		    first_no_action_insn = NULL_RTX;
3330		  }
3331
3332		note = emit_note_after (NOTE_INSN_EH_REGION_END,
3333					last_action_insn);
3334		NOTE_EH_HANDLER (note) = call_site;
3335	      }
3336
3337	    /* If the new action is must-not-throw, then no region notes
3338	       are created.  */
3339	    if (this_action >= -1)
3340	      {
3341		call_site = add_call_site (this_landing_pad,
3342					   this_action < 0 ? 0 : this_action);
3343		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3344		NOTE_EH_HANDLER (note) = call_site;
3345	      }
3346
3347	    last_action = this_action;
3348	    last_landing_pad = this_landing_pad;
3349	  }
3350	last_action_insn = iter;
3351      }
3352
3353  if (last_action >= -1 && ! first_no_action_insn)
3354    {
3355      note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3356      NOTE_EH_HANDLER (note) = call_site;
3357    }
3358
3359  htab_delete (ar_hash);
3360  return 0;
3361}
3362
3363struct tree_opt_pass pass_convert_to_eh_region_ranges =
3364{
3365  "eh-ranges",                          /* name */
3366  NULL,                                 /* gate */
3367  convert_to_eh_region_ranges,          /* execute */
3368  NULL,                                 /* sub */
3369  NULL,                                 /* next */
3370  0,                                    /* static_pass_number */
3371  0,                                    /* tv_id */
3372  0,                                    /* properties_required */
3373  0,                                    /* properties_provided */
3374  0,                                    /* properties_destroyed */
3375  0,                                    /* todo_flags_start */
3376  TODO_dump_func,			/* todo_flags_finish */
3377  0                                     /* letter */
3378};
3379
3380
3381static void
3382push_uleb128 (varray_type *data_area, unsigned int value)
3383{
3384  do
3385    {
3386      unsigned char byte = value & 0x7f;
3387      value >>= 7;
3388      if (value)
3389	byte |= 0x80;
3390      VARRAY_PUSH_UCHAR (*data_area, byte);
3391    }
3392  while (value);
3393}
3394
3395static void
3396push_sleb128 (varray_type *data_area, int value)
3397{
3398  unsigned char byte;
3399  int more;
3400
3401  do
3402    {
3403      byte = value & 0x7f;
3404      value >>= 7;
3405      more = ! ((value == 0 && (byte & 0x40) == 0)
3406		|| (value == -1 && (byte & 0x40) != 0));
3407      if (more)
3408	byte |= 0x80;
3409      VARRAY_PUSH_UCHAR (*data_area, byte);
3410    }
3411  while (more);
3412}
3413
3414
3415#ifndef HAVE_AS_LEB128
3416static int
3417dw2_size_of_call_site_table (void)
3418{
3419  int n = cfun->eh->call_site_data_used;
3420  int size = n * (4 + 4 + 4);
3421  int i;
3422
3423  for (i = 0; i < n; ++i)
3424    {
3425      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3426      size += size_of_uleb128 (cs->action);
3427    }
3428
3429  return size;
3430}
3431
3432static int
3433sjlj_size_of_call_site_table (void)
3434{
3435  int n = cfun->eh->call_site_data_used;
3436  int size = 0;
3437  int i;
3438
3439  for (i = 0; i < n; ++i)
3440    {
3441      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3442      size += size_of_uleb128 (INTVAL (cs->landing_pad));
3443      size += size_of_uleb128 (cs->action);
3444    }
3445
3446  return size;
3447}
3448#endif
3449
3450static void
3451dw2_output_call_site_table (void)
3452{
3453  int n = cfun->eh->call_site_data_used;
3454  int i;
3455
3456  for (i = 0; i < n; ++i)
3457    {
3458      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3459      char reg_start_lab[32];
3460      char reg_end_lab[32];
3461      char landing_pad_lab[32];
3462
3463      ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3464      ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3465
3466      if (cs->landing_pad)
3467	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3468				     CODE_LABEL_NUMBER (cs->landing_pad));
3469
3470      /* ??? Perhaps use insn length scaling if the assembler supports
3471	 generic arithmetic.  */
3472      /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3473	 data4 if the function is small enough.  */
3474#ifdef HAVE_AS_LEB128
3475      dw2_asm_output_delta_uleb128 (reg_start_lab,
3476				    current_function_func_begin_label,
3477				    "region %d start", i);
3478      dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3479				    "length");
3480      if (cs->landing_pad)
3481	dw2_asm_output_delta_uleb128 (landing_pad_lab,
3482				      current_function_func_begin_label,
3483				      "landing pad");
3484      else
3485	dw2_asm_output_data_uleb128 (0, "landing pad");
3486#else
3487      dw2_asm_output_delta (4, reg_start_lab,
3488			    current_function_func_begin_label,
3489			    "region %d start", i);
3490      dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3491      if (cs->landing_pad)
3492	dw2_asm_output_delta (4, landing_pad_lab,
3493			      current_function_func_begin_label,
3494			      "landing pad");
3495      else
3496	dw2_asm_output_data (4, 0, "landing pad");
3497#endif
3498      dw2_asm_output_data_uleb128 (cs->action, "action");
3499    }
3500
3501  call_site_base += n;
3502}
3503
3504static void
3505sjlj_output_call_site_table (void)
3506{
3507  int n = cfun->eh->call_site_data_used;
3508  int i;
3509
3510  for (i = 0; i < n; ++i)
3511    {
3512      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3513
3514      dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3515				   "region %d landing pad", i);
3516      dw2_asm_output_data_uleb128 (cs->action, "action");
3517    }
3518
3519  call_site_base += n;
3520}
3521
3522#ifndef TARGET_UNWIND_INFO
3523/* Switch to the section that should be used for exception tables.  */
3524
3525static void
3526switch_to_exception_section (void)
3527{
3528  if (exception_section == 0)
3529    {
3530      if (targetm.have_named_sections)
3531	{
3532	  int flags;
3533
3534	  if (EH_TABLES_CAN_BE_READ_ONLY)
3535	    {
3536	      int tt_format =
3537		ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3538	      flags = ((! flag_pic
3539			|| ((tt_format & 0x70) != DW_EH_PE_absptr
3540			    && (tt_format & 0x70) != DW_EH_PE_aligned))
3541		       ? 0 : SECTION_WRITE);
3542	    }
3543	  else
3544	    flags = SECTION_WRITE;
3545	  exception_section = get_section (".gcc_except_table", flags, NULL);
3546	}
3547      else
3548	exception_section = flag_pic ? data_section : readonly_data_section;
3549    }
3550  switch_to_section (exception_section);
3551}
3552#endif
3553
3554
3555/* Output a reference from an exception table to the type_info object TYPE.
3556   TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3557   the value.  */
3558
3559static void
3560output_ttype (tree type, int tt_format, int tt_format_size)
3561{
3562  rtx value;
3563  bool public = true;
3564
3565  if (type == NULL_TREE)
3566    value = const0_rtx;
3567  else
3568    {
3569      struct cgraph_varpool_node *node;
3570
3571      type = lookup_type_for_runtime (type);
3572      value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3573
3574      /* Let cgraph know that the rtti decl is used.  Not all of the
3575	 paths below go through assemble_integer, which would take
3576	 care of this for us.  */
3577      STRIP_NOPS (type);
3578      if (TREE_CODE (type) == ADDR_EXPR)
3579	{
3580	  type = TREE_OPERAND (type, 0);
3581	  if (TREE_CODE (type) == VAR_DECL)
3582	    {
3583	      node = cgraph_varpool_node (type);
3584	      if (node)
3585		cgraph_varpool_mark_needed_node (node);
3586	      public = TREE_PUBLIC (type);
3587	    }
3588	}
3589      else
3590	gcc_assert (TREE_CODE (type) == INTEGER_CST);
3591    }
3592
3593  /* Allow the target to override the type table entry format.  */
3594  if (targetm.asm_out.ttype (value))
3595    return;
3596
3597  if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3598    assemble_integer (value, tt_format_size,
3599		      tt_format_size * BITS_PER_UNIT, 1);
3600  else
3601    dw2_asm_output_encoded_addr_rtx (tt_format, value, public, NULL);
3602}
3603
3604void
3605output_function_exception_table (void)
3606{
3607  int tt_format, cs_format, lp_format, i, n;
3608#ifdef HAVE_AS_LEB128
3609  char ttype_label[32];
3610  char cs_after_size_label[32];
3611  char cs_end_label[32];
3612#else
3613  int call_site_len;
3614#endif
3615  int have_tt_data;
3616  int tt_format_size = 0;
3617
3618  if (eh_personality_libfunc)
3619    assemble_external_libcall (eh_personality_libfunc);
3620
3621  /* Not all functions need anything.  */
3622  if (! cfun->uses_eh_lsda)
3623    return;
3624
3625#ifdef TARGET_UNWIND_INFO
3626  /* TODO: Move this into target file.  */
3627  fputs ("\t.personality\t", asm_out_file);
3628  output_addr_const (asm_out_file, eh_personality_libfunc);
3629  fputs ("\n\t.handlerdata\n", asm_out_file);
3630  /* Note that varasm still thinks we're in the function's code section.
3631     The ".endp" directive that will immediately follow will take us back.  */
3632#else
3633  switch_to_exception_section ();
3634#endif
3635
3636  /* If the target wants a label to begin the table, emit it here.  */
3637  targetm.asm_out.except_table_label (asm_out_file);
3638
3639  have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3640		  || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3641
3642  /* Indicate the format of the @TType entries.  */
3643  if (! have_tt_data)
3644    tt_format = DW_EH_PE_omit;
3645  else
3646    {
3647      tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3648#ifdef HAVE_AS_LEB128
3649      ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3650				   current_function_funcdef_no);
3651#endif
3652      tt_format_size = size_of_encoded_value (tt_format);
3653
3654      assemble_align (tt_format_size * BITS_PER_UNIT);
3655    }
3656
3657  targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3658			     current_function_funcdef_no);
3659
3660  /* The LSDA header.  */
3661
3662  /* Indicate the format of the landing pad start pointer.  An omitted
3663     field implies @LPStart == @Start.  */
3664  /* Currently we always put @LPStart == @Start.  This field would
3665     be most useful in moving the landing pads completely out of
3666     line to another section, but it could also be used to minimize
3667     the size of uleb128 landing pad offsets.  */
3668  lp_format = DW_EH_PE_omit;
3669  dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3670		       eh_data_format_name (lp_format));
3671
3672  /* @LPStart pointer would go here.  */
3673
3674  dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3675		       eh_data_format_name (tt_format));
3676
3677#ifndef HAVE_AS_LEB128
3678  if (USING_SJLJ_EXCEPTIONS)
3679    call_site_len = sjlj_size_of_call_site_table ();
3680  else
3681    call_site_len = dw2_size_of_call_site_table ();
3682#endif
3683
3684  /* A pc-relative 4-byte displacement to the @TType data.  */
3685  if (have_tt_data)
3686    {
3687#ifdef HAVE_AS_LEB128
3688      char ttype_after_disp_label[32];
3689      ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3690				   current_function_funcdef_no);
3691      dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3692				    "@TType base offset");
3693      ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3694#else
3695      /* Ug.  Alignment queers things.  */
3696      unsigned int before_disp, after_disp, last_disp, disp;
3697
3698      before_disp = 1 + 1;
3699      after_disp = (1 + size_of_uleb128 (call_site_len)
3700		    + call_site_len
3701		    + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3702		    + (VEC_length (tree, cfun->eh->ttype_data)
3703		       * tt_format_size));
3704
3705      disp = after_disp;
3706      do
3707	{
3708	  unsigned int disp_size, pad;
3709
3710	  last_disp = disp;
3711	  disp_size = size_of_uleb128 (disp);
3712	  pad = before_disp + disp_size + after_disp;
3713	  if (pad % tt_format_size)
3714	    pad = tt_format_size - (pad % tt_format_size);
3715	  else
3716	    pad = 0;
3717	  disp = after_disp + pad;
3718	}
3719      while (disp != last_disp);
3720
3721      dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3722#endif
3723    }
3724
3725  /* Indicate the format of the call-site offsets.  */
3726#ifdef HAVE_AS_LEB128
3727  cs_format = DW_EH_PE_uleb128;
3728#else
3729  cs_format = DW_EH_PE_udata4;
3730#endif
3731  dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3732		       eh_data_format_name (cs_format));
3733
3734#ifdef HAVE_AS_LEB128
3735  ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3736			       current_function_funcdef_no);
3737  ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3738			       current_function_funcdef_no);
3739  dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3740				"Call-site table length");
3741  ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3742  if (USING_SJLJ_EXCEPTIONS)
3743    sjlj_output_call_site_table ();
3744  else
3745    dw2_output_call_site_table ();
3746  ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3747#else
3748  dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3749  if (USING_SJLJ_EXCEPTIONS)
3750    sjlj_output_call_site_table ();
3751  else
3752    dw2_output_call_site_table ();
3753#endif
3754
3755  /* ??? Decode and interpret the data for flag_debug_asm.  */
3756  n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3757  for (i = 0; i < n; ++i)
3758    dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3759			 (i ? NULL : "Action record table"));
3760
3761  if (have_tt_data)
3762    assemble_align (tt_format_size * BITS_PER_UNIT);
3763
3764  i = VEC_length (tree, cfun->eh->ttype_data);
3765  while (i-- > 0)
3766    {
3767      tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3768      output_ttype (type, tt_format, tt_format_size);
3769    }
3770
3771#ifdef HAVE_AS_LEB128
3772  if (have_tt_data)
3773      ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3774#endif
3775
3776  /* ??? Decode and interpret the data for flag_debug_asm.  */
3777  n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3778  for (i = 0; i < n; ++i)
3779    {
3780      if (targetm.arm_eabi_unwinder)
3781	{
3782	  tree type = VARRAY_TREE (cfun->eh->ehspec_data, i);
3783	  output_ttype (type, tt_format, tt_format_size);
3784	}
3785      else
3786	dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3787			     (i ? NULL : "Exception specification table"));
3788    }
3789
3790  switch_to_section (current_function_section ());
3791}
3792
3793void
3794set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3795{
3796  fun->eh->throw_stmt_table = table;
3797}
3798
3799htab_t
3800get_eh_throw_stmt_table (struct function *fun)
3801{
3802  return fun->eh->throw_stmt_table;
3803}
3804
3805/* Dump EH information to OUT.  */
3806void
3807dump_eh_tree (FILE *out, struct function *fun)
3808{
3809  struct eh_region *i;
3810  int depth = 0;
3811  static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3812					   "allowed_exceptions", "must_not_throw",
3813					   "throw"};
3814
3815  i = fun->eh->region_tree;
3816  if (! i)
3817    return;
3818
3819  fprintf (out, "Eh tree:\n");
3820  while (1)
3821    {
3822      fprintf (out, "  %*s %i %s", depth * 2, "",
3823	       i->region_number, type_name [(int)i->type]);
3824      if (i->tree_label)
3825	{
3826          fprintf (out, " tree_label:");
3827	  print_generic_expr (out, i->tree_label, 0);
3828	}
3829      fprintf (out, "\n");
3830      /* If there are sub-regions, process them.  */
3831      if (i->inner)
3832	i = i->inner, depth++;
3833      /* If there are peers, process them.  */
3834      else if (i->next_peer)
3835	i = i->next_peer;
3836      /* Otherwise, step back up the tree to the next peer.  */
3837      else
3838	{
3839	  do {
3840	    i = i->outer;
3841	    depth--;
3842	    if (i == NULL)
3843	      return;
3844	  } while (i->next_peer == NULL);
3845	  i = i->next_peer;
3846	}
3847    }
3848}
3849
3850/* Verify some basic invariants on EH datastructures.  Could be extended to
3851   catch more.  */
3852void
3853verify_eh_tree (struct function *fun)
3854{
3855  struct eh_region *i, *outer = NULL;
3856  bool err = false;
3857  int nvisited = 0;
3858  int count = 0;
3859  int j;
3860  int depth = 0;
3861
3862  i = fun->eh->region_tree;
3863  if (! i)
3864    return;
3865  for (j = fun->eh->last_region_number; j > 0; --j)
3866    if ((i = VEC_index (eh_region, cfun->eh->region_array, j)))
3867      {
3868	count++;
3869	if (i->region_number != j)
3870	  {
3871	    error ("region_array is corrupted for region %i", i->region_number);
3872	    err = true;
3873	  }
3874      }
3875
3876  while (1)
3877    {
3878      if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i)
3879	{
3880	  error ("region_array is corrupted for region %i", i->region_number);
3881	  err = true;
3882	}
3883      if (i->outer != outer)
3884	{
3885	  error ("outer block of region %i is wrong", i->region_number);
3886	  err = true;
3887	}
3888      if (i->may_contain_throw && outer && !outer->may_contain_throw)
3889	{
3890	  error ("region %i may contain throw and is contained in region that may not",
3891		 i->region_number);
3892	  err = true;
3893	}
3894      if (depth < 0)
3895	{
3896	  error ("negative nesting depth of region %i", i->region_number);
3897	  err = true;
3898	}
3899      nvisited ++;
3900      /* If there are sub-regions, process them.  */
3901      if (i->inner)
3902	outer = i, i = i->inner, depth++;
3903      /* If there are peers, process them.  */
3904      else if (i->next_peer)
3905	i = i->next_peer;
3906      /* Otherwise, step back up the tree to the next peer.  */
3907      else
3908	{
3909	  do {
3910	    i = i->outer;
3911	    depth--;
3912	    if (i == NULL)
3913	      {
3914		if (depth != -1)
3915		  {
3916		    error ("tree list ends on depth %i", depth + 1);
3917		    err = true;
3918		  }
3919		if (count != nvisited)
3920		  {
3921		    error ("array does not match the region tree");
3922		    err = true;
3923		  }
3924		if (err)
3925		  {
3926		    dump_eh_tree (stderr, fun);
3927		    internal_error ("verify_eh_tree failed");
3928		  }
3929	        return;
3930	      }
3931	    outer = i->outer;
3932	  } while (i->next_peer == NULL);
3933	  i = i->next_peer;
3934	}
3935    }
3936}
3937
3938/* Initialize unwind_resume_libfunc.  */
3939
3940void
3941default_init_unwind_resume_libfunc (void)
3942{
3943  /* The default c++ routines aren't actually c++ specific, so use those.  */
3944  unwind_resume_libfunc =
3945    init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
3946					     : "_Unwind_Resume");
3947}
3948
3949
3950static bool
3951gate_handle_eh (void)
3952{
3953  return doing_eh (0);
3954}
3955
3956/* Complete generation of exception handling code.  */
3957static unsigned int
3958rest_of_handle_eh (void)
3959{
3960  cleanup_cfg (CLEANUP_NO_INSN_DEL);
3961  finish_eh_generation ();
3962  cleanup_cfg (CLEANUP_NO_INSN_DEL);
3963  return 0;
3964}
3965
3966struct tree_opt_pass pass_rtl_eh =
3967{
3968  "eh",                                 /* name */
3969  gate_handle_eh,                       /* gate */
3970  rest_of_handle_eh,			/* execute */
3971  NULL,                                 /* sub */
3972  NULL,                                 /* next */
3973  0,                                    /* static_pass_number */
3974  TV_JUMP,                              /* tv_id */
3975  0,                                    /* properties_required */
3976  0,                                    /* properties_provided */
3977  0,                                    /* properties_destroyed */
3978  0,                                    /* todo_flags_start */
3979  TODO_dump_func,                       /* todo_flags_finish */
3980  'h'                                   /* letter */
3981};
3982
3983#include "gt-except.h"
3984