except.c revision 132718
1/* Implements exception handling.
2   Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4   Contributed by Mike Stump <mrs@cygnus.com>.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA.  */
22
23
24/* An exception is an event that can be signaled from within a
25   function. This event can then be "caught" or "trapped" by the
26   callers of this function. This potentially allows program flow to
27   be transferred to any arbitrary code associated with a function call
28   several levels up the stack.
29
30   The intended use for this mechanism is for signaling "exceptional
31   events" in an out-of-band fashion, hence its name. The C++ language
32   (and many other OO-styled or functional languages) practically
33   requires such a mechanism, as otherwise it becomes very difficult
34   or even impossible to signal failure conditions in complex
35   situations.  The traditional C++ example is when an error occurs in
36   the process of constructing an object; without such a mechanism, it
37   is impossible to signal that the error occurs without adding global
38   state variables and error checks around every object construction.
39
40   The act of causing this event to occur is referred to as "throwing
41   an exception". (Alternate terms include "raising an exception" or
42   "signaling an exception".) The term "throw" is used because control
43   is returned to the callers of the function that is signaling the
44   exception, and thus there is the concept of "throwing" the
45   exception up the call stack.
46
47   [ Add updated documentation on how to use this.  ]  */
48
49
50#include "config.h"
51#include "system.h"
52#include "coretypes.h"
53#include "tm.h"
54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
57#include "function.h"
58#include "expr.h"
59#include "libfuncs.h"
60#include "insn-config.h"
61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
65#include "output.h"
66#include "dwarf2asm.h"
67#include "dwarf2out.h"
68#include "dwarf2.h"
69#include "toplev.h"
70#include "hashtab.h"
71#include "intl.h"
72#include "ggc.h"
73#include "tm_p.h"
74#include "target.h"
75#include "langhooks.h"
76#include "cgraph.h"
77
78/* Provide defaults for stuff that may not be defined when using
79   sjlj exceptions.  */
80#ifndef EH_RETURN_DATA_REGNO
81#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
82#endif
83
84
85/* Nonzero means enable synchronous exceptions for non-call instructions.  */
86int flag_non_call_exceptions;
87
88/* Protect cleanup actions with must-not-throw regions, with a call
89   to the given failure handler.  */
90tree (*lang_protect_cleanup_actions) (void);
91
92/* Return true if type A catches type B.  */
93int (*lang_eh_type_covers) (tree a, tree b);
94
95/* Map a type to a runtime object to match type.  */
96tree (*lang_eh_runtime_type) (tree);
97
98/* A hash table of label to region number.  */
99
100struct ehl_map_entry GTY(())
101{
102  rtx label;
103  struct eh_region *region;
104};
105
106static GTY(()) int call_site_base;
107static GTY ((param_is (union tree_node)))
108  htab_t type_to_runtime_map;
109
110/* Describe the SjLj_Function_Context structure.  */
111static GTY(()) tree sjlj_fc_type_node;
112static int sjlj_fc_call_site_ofs;
113static int sjlj_fc_data_ofs;
114static int sjlj_fc_personality_ofs;
115static int sjlj_fc_lsda_ofs;
116static int sjlj_fc_jbuf_ofs;
117
118/* Describes one exception region.  */
119struct eh_region GTY(())
120{
121  /* The immediately surrounding region.  */
122  struct eh_region *outer;
123
124  /* The list of immediately contained regions.  */
125  struct eh_region *inner;
126  struct eh_region *next_peer;
127
128  /* An identifier for this region.  */
129  int region_number;
130
131  /* When a region is deleted, its parents inherit the REG_EH_REGION
132     numbers already assigned.  */
133  bitmap aka;
134
135  /* Each region does exactly one thing.  */
136  enum eh_region_type
137  {
138    ERT_UNKNOWN = 0,
139    ERT_CLEANUP,
140    ERT_TRY,
141    ERT_CATCH,
142    ERT_ALLOWED_EXCEPTIONS,
143    ERT_MUST_NOT_THROW,
144    ERT_THROW,
145    ERT_FIXUP
146  } type;
147
148  /* Holds the action to perform based on the preceding type.  */
149  union eh_region_u {
150    /* A list of catch blocks, a surrounding try block,
151       and the label for continuing after a catch.  */
152    struct eh_region_u_try {
153      struct eh_region *catch;
154      struct eh_region *last_catch;
155      struct eh_region *prev_try;
156      rtx continue_label;
157    } GTY ((tag ("ERT_TRY"))) try;
158
159    /* The list through the catch handlers, the list of type objects
160       matched, and the list of associated filters.  */
161    struct eh_region_u_catch {
162      struct eh_region *next_catch;
163      struct eh_region *prev_catch;
164      tree type_list;
165      tree filter_list;
166    } GTY ((tag ("ERT_CATCH"))) catch;
167
168    /* A tree_list of allowed types.  */
169    struct eh_region_u_allowed {
170      tree type_list;
171      int filter;
172    } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
173
174    /* The type given by a call to "throw foo();", or discovered
175       for a throw.  */
176    struct eh_region_u_throw {
177      tree type;
178    } GTY ((tag ("ERT_THROW"))) throw;
179
180    /* Retain the cleanup expression even after expansion so that
181       we can match up fixup regions.  */
182    struct eh_region_u_cleanup {
183      tree exp;
184      struct eh_region *prev_try;
185    } GTY ((tag ("ERT_CLEANUP"))) cleanup;
186
187    /* The real region (by expression and by pointer) that fixup code
188       should live in.  */
189    struct eh_region_u_fixup {
190      tree cleanup_exp;
191      struct eh_region *real_region;
192      bool resolved;
193    } GTY ((tag ("ERT_FIXUP"))) fixup;
194  } GTY ((desc ("%0.type"))) u;
195
196  /* Entry point for this region's handler before landing pads are built.  */
197  rtx label;
198
199  /* Entry point for this region's handler from the runtime eh library.  */
200  rtx landing_pad;
201
202  /* Entry point for this region's handler from an inner region.  */
203  rtx post_landing_pad;
204
205  /* The RESX insn for handing off control to the next outermost handler,
206     if appropriate.  */
207  rtx resume;
208
209  /* True if something in this region may throw.  */
210  unsigned may_contain_throw : 1;
211};
212
213struct call_site_record GTY(())
214{
215  rtx landing_pad;
216  int action;
217};
218
219/* Used to save exception status for each function.  */
220struct eh_status GTY(())
221{
222  /* The tree of all regions for this function.  */
223  struct eh_region *region_tree;
224
225  /* The same information as an indexable array.  */
226  struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
227
228  /* The most recently open region.  */
229  struct eh_region *cur_region;
230
231  /* This is the region for which we are processing catch blocks.  */
232  struct eh_region *try_region;
233
234  rtx filter;
235  rtx exc_ptr;
236
237  int built_landing_pads;
238  int last_region_number;
239
240  varray_type ttype_data;
241  varray_type ehspec_data;
242  varray_type action_record_data;
243
244  htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
245
246  struct call_site_record * GTY ((length ("%h.call_site_data_used")))
247    call_site_data;
248  int call_site_data_used;
249  int call_site_data_size;
250
251  rtx ehr_stackadj;
252  rtx ehr_handler;
253  rtx ehr_label;
254
255  rtx sjlj_fc;
256  rtx sjlj_exit_after;
257};
258
259
260static int t2r_eq (const void *, const void *);
261static hashval_t t2r_hash (const void *);
262static void add_type_for_runtime (tree);
263static tree lookup_type_for_runtime (tree);
264
265static struct eh_region *expand_eh_region_end (void);
266
267static rtx get_exception_filter (struct function *);
268
269static void collect_eh_region_array (void);
270static void resolve_fixup_regions (void);
271static void remove_fixup_regions (void);
272static void remove_unreachable_regions (rtx);
273static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
274
275static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
276						struct inline_remap *);
277static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
278static int ttypes_filter_eq (const void *, const void *);
279static hashval_t ttypes_filter_hash (const void *);
280static int ehspec_filter_eq (const void *, const void *);
281static hashval_t ehspec_filter_hash (const void *);
282static int add_ttypes_entry (htab_t, tree);
283static int add_ehspec_entry (htab_t, htab_t, tree);
284static void assign_filter_values (void);
285static void build_post_landing_pads (void);
286static void connect_post_landing_pads (void);
287static void dw2_build_landing_pads (void);
288
289struct sjlj_lp_info;
290static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
291static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
292static void sjlj_mark_call_sites (struct sjlj_lp_info *);
293static void sjlj_emit_function_enter (rtx);
294static void sjlj_emit_function_exit (void);
295static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
296static void sjlj_build_landing_pads (void);
297
298static hashval_t ehl_hash (const void *);
299static int ehl_eq (const void *, const void *);
300static void add_ehl_entry (rtx, struct eh_region *);
301static void remove_exception_handler_label (rtx);
302static void remove_eh_handler (struct eh_region *);
303static int for_each_eh_label_1 (void **, void *);
304
305struct reachable_info;
306
307/* The return value of reachable_next_level.  */
308enum reachable_code
309{
310  /* The given exception is not processed by the given region.  */
311  RNL_NOT_CAUGHT,
312  /* The given exception may need processing by the given region.  */
313  RNL_MAYBE_CAUGHT,
314  /* The given exception is completely processed by the given region.  */
315  RNL_CAUGHT,
316  /* The given exception is completely processed by the runtime.  */
317  RNL_BLOCKED
318};
319
320static int check_handled (tree, tree);
321static void add_reachable_handler (struct reachable_info *,
322				   struct eh_region *, struct eh_region *);
323static enum reachable_code reachable_next_level (struct eh_region *, tree,
324						 struct reachable_info *);
325
326static int action_record_eq (const void *, const void *);
327static hashval_t action_record_hash (const void *);
328static int add_action_record (htab_t, int, int);
329static int collect_one_action_chain (htab_t, struct eh_region *);
330static int add_call_site (rtx, int);
331
332static void push_uleb128 (varray_type *, unsigned int);
333static void push_sleb128 (varray_type *, int);
334#ifndef HAVE_AS_LEB128
335static int dw2_size_of_call_site_table (void);
336static int sjlj_size_of_call_site_table (void);
337#endif
338static void dw2_output_call_site_table (void);
339static void sjlj_output_call_site_table (void);
340
341
342/* Routine to see if exception handling is turned on.
343   DO_WARN is nonzero if we want to inform the user that exception
344   handling is turned off.
345
346   This is used to ensure that -fexceptions has been specified if the
347   compiler tries to use any exception-specific functions.  */
348
349int
350doing_eh (int do_warn)
351{
352  if (! flag_exceptions)
353    {
354      static int warned = 0;
355      if (! warned && do_warn)
356	{
357	  error ("exception handling disabled, use -fexceptions to enable");
358	  warned = 1;
359	}
360      return 0;
361    }
362  return 1;
363}
364
365
366void
367init_eh (void)
368{
369  if (! flag_exceptions)
370    return;
371
372  type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
373
374  /* Create the SjLj_Function_Context structure.  This should match
375     the definition in unwind-sjlj.c.  */
376  if (USING_SJLJ_EXCEPTIONS)
377    {
378      tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
379
380      sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
381
382      f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
383			   build_pointer_type (sjlj_fc_type_node));
384      DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
385
386      f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
387			 integer_type_node);
388      DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
389
390      tmp = build_index_type (build_int_2 (4 - 1, 0));
391      tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
392			      tmp);
393      f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
394      DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
395
396      f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
397			  ptr_type_node);
398      DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
399
400      f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
401			   ptr_type_node);
402      DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
403
404#ifdef DONT_USE_BUILTIN_SETJMP
405#ifdef JMP_BUF_SIZE
406      tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
407#else
408      /* Should be large enough for most systems, if it is not,
409	 JMP_BUF_SIZE should be defined with the proper value.  It will
410	 also tend to be larger than necessary for most systems, a more
411	 optimal port will define JMP_BUF_SIZE.  */
412      tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
413#endif
414#else
415      /* builtin_setjmp takes a pointer to 5 words.  */
416      tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
417#endif
418      tmp = build_index_type (tmp);
419      tmp = build_array_type (ptr_type_node, tmp);
420      f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
421#ifdef DONT_USE_BUILTIN_SETJMP
422      /* We don't know what the alignment requirements of the
423	 runtime's jmp_buf has.  Overestimate.  */
424      DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
425      DECL_USER_ALIGN (f_jbuf) = 1;
426#endif
427      DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
428
429      TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
430      TREE_CHAIN (f_prev) = f_cs;
431      TREE_CHAIN (f_cs) = f_data;
432      TREE_CHAIN (f_data) = f_per;
433      TREE_CHAIN (f_per) = f_lsda;
434      TREE_CHAIN (f_lsda) = f_jbuf;
435
436      layout_type (sjlj_fc_type_node);
437
438      /* Cache the interesting field offsets so that we have
439	 easy access from rtl.  */
440      sjlj_fc_call_site_ofs
441	= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
442	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
443      sjlj_fc_data_ofs
444	= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
445	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
446      sjlj_fc_personality_ofs
447	= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
448	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
449      sjlj_fc_lsda_ofs
450	= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
451	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
452      sjlj_fc_jbuf_ofs
453	= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
454	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
455    }
456}
457
458void
459init_eh_for_function (void)
460{
461  cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
462}
463
464/* Start an exception handling region.  All instructions emitted
465   after this point are considered to be part of the region until
466   expand_eh_region_end is invoked.  */
467
468void
469expand_eh_region_start (void)
470{
471  struct eh_region *new_region;
472  struct eh_region *cur_region;
473  rtx note;
474
475  if (! doing_eh (0))
476    return;
477
478  /* Insert a new blank region as a leaf in the tree.  */
479  new_region = ggc_alloc_cleared (sizeof (*new_region));
480  cur_region = cfun->eh->cur_region;
481  new_region->outer = cur_region;
482  if (cur_region)
483    {
484      new_region->next_peer = cur_region->inner;
485      cur_region->inner = new_region;
486    }
487  else
488    {
489      new_region->next_peer = cfun->eh->region_tree;
490      cfun->eh->region_tree = new_region;
491    }
492  cfun->eh->cur_region = new_region;
493
494  /* Create a note marking the start of this region.  */
495  new_region->region_number = ++cfun->eh->last_region_number;
496  note = emit_note (NOTE_INSN_EH_REGION_BEG);
497  NOTE_EH_HANDLER (note) = new_region->region_number;
498}
499
500/* Common code to end a region.  Returns the region just ended.  */
501
502static struct eh_region *
503expand_eh_region_end (void)
504{
505  struct eh_region *cur_region = cfun->eh->cur_region;
506  rtx note;
507
508  /* Create a note marking the end of this region.  */
509  note = emit_note (NOTE_INSN_EH_REGION_END);
510  NOTE_EH_HANDLER (note) = cur_region->region_number;
511
512  /* Pop.  */
513  cfun->eh->cur_region = cur_region->outer;
514
515  return cur_region;
516}
517
518/* End an exception handling region for a cleanup.  HANDLER is an
519   expression to expand for the cleanup.  */
520
521void
522expand_eh_region_end_cleanup (tree handler)
523{
524  struct eh_region *region;
525  tree protect_cleanup_actions;
526  rtx around_label;
527  rtx data_save[2];
528
529  if (! doing_eh (0))
530    return;
531
532  region = expand_eh_region_end ();
533  region->type = ERT_CLEANUP;
534  region->label = gen_label_rtx ();
535  region->u.cleanup.exp = handler;
536  region->u.cleanup.prev_try = cfun->eh->try_region;
537
538  around_label = gen_label_rtx ();
539  emit_jump (around_label);
540
541  emit_label (region->label);
542
543  if (flag_non_call_exceptions || region->may_contain_throw)
544    {
545      /* Give the language a chance to specify an action to be taken if an
546	 exception is thrown that would propagate out of the HANDLER.  */
547      protect_cleanup_actions
548	= (lang_protect_cleanup_actions
549	   ? (*lang_protect_cleanup_actions) ()
550	   : NULL_TREE);
551
552      if (protect_cleanup_actions)
553	expand_eh_region_start ();
554
555      /* In case this cleanup involves an inline destructor with a try block in
556	 it, we need to save the EH return data registers around it.  */
557      data_save[0] = gen_reg_rtx (ptr_mode);
558      emit_move_insn (data_save[0], get_exception_pointer (cfun));
559      data_save[1] = gen_reg_rtx (word_mode);
560      emit_move_insn (data_save[1], get_exception_filter (cfun));
561
562      expand_expr (handler, const0_rtx, VOIDmode, 0);
563
564      emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
565      emit_move_insn (cfun->eh->filter, data_save[1]);
566
567      if (protect_cleanup_actions)
568	expand_eh_region_end_must_not_throw (protect_cleanup_actions);
569
570      /* We need any stack adjustment complete before the around_label.  */
571      do_pending_stack_adjust ();
572    }
573
574  /* We delay the generation of the _Unwind_Resume until we generate
575     landing pads.  We emit a marker here so as to get good control
576     flow data in the meantime.  */
577  region->resume
578    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
579  emit_barrier ();
580
581  emit_label (around_label);
582}
583
584/* End an exception handling region for a try block, and prepares
585   for subsequent calls to expand_start_catch.  */
586
587void
588expand_start_all_catch (void)
589{
590  struct eh_region *region;
591
592  if (! doing_eh (1))
593    return;
594
595  region = expand_eh_region_end ();
596  region->type = ERT_TRY;
597  region->u.try.prev_try = cfun->eh->try_region;
598  region->u.try.continue_label = gen_label_rtx ();
599
600  cfun->eh->try_region = region;
601
602  emit_jump (region->u.try.continue_label);
603}
604
605/* Begin a catch clause.  TYPE is the type caught, a list of such types, or
606   null if this is a catch-all clause. Providing a type list enables to
607   associate the catch region with potentially several exception types, which
608   is useful e.g. for Ada.  */
609
610void
611expand_start_catch (tree type_or_list)
612{
613  struct eh_region *t, *c, *l;
614  tree type_list;
615
616  if (! doing_eh (0))
617    return;
618
619  type_list = type_or_list;
620
621  if (type_or_list)
622    {
623      /* Ensure to always end up with a type list to normalize further
624         processing, then register each type against the runtime types
625         map.  */
626      tree type_node;
627
628      if (TREE_CODE (type_or_list) != TREE_LIST)
629	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
630
631      type_node = type_list;
632      for (; type_node; type_node = TREE_CHAIN (type_node))
633	add_type_for_runtime (TREE_VALUE (type_node));
634    }
635
636  expand_eh_region_start ();
637
638  t = cfun->eh->try_region;
639  c = cfun->eh->cur_region;
640  c->type = ERT_CATCH;
641  c->u.catch.type_list = type_list;
642  c->label = gen_label_rtx ();
643
644  l = t->u.try.last_catch;
645  c->u.catch.prev_catch = l;
646  if (l)
647    l->u.catch.next_catch = c;
648  else
649    t->u.try.catch = c;
650  t->u.try.last_catch = c;
651
652  emit_label (c->label);
653}
654
655/* End a catch clause.  Control will resume after the try/catch block.  */
656
657void
658expand_end_catch (void)
659{
660  struct eh_region *try_region;
661
662  if (! doing_eh (0))
663    return;
664
665  expand_eh_region_end ();
666  try_region = cfun->eh->try_region;
667
668  emit_jump (try_region->u.try.continue_label);
669}
670
671/* End a sequence of catch handlers for a try block.  */
672
673void
674expand_end_all_catch (void)
675{
676  struct eh_region *try_region;
677
678  if (! doing_eh (0))
679    return;
680
681  try_region = cfun->eh->try_region;
682  cfun->eh->try_region = try_region->u.try.prev_try;
683
684  emit_label (try_region->u.try.continue_label);
685}
686
687/* End an exception region for an exception type filter.  ALLOWED is a
688   TREE_LIST of types to be matched by the runtime.  FAILURE is an
689   expression to invoke if a mismatch occurs.
690
691   ??? We could use these semantics for calls to rethrow, too; if we can
692   see the surrounding catch clause, we know that the exception we're
693   rethrowing satisfies the "filter" of the catch type.  */
694
695void
696expand_eh_region_end_allowed (tree allowed, tree failure)
697{
698  struct eh_region *region;
699  rtx around_label;
700
701  if (! doing_eh (0))
702    return;
703
704  region = expand_eh_region_end ();
705  region->type = ERT_ALLOWED_EXCEPTIONS;
706  region->u.allowed.type_list = allowed;
707  region->label = gen_label_rtx ();
708
709  for (; allowed ; allowed = TREE_CHAIN (allowed))
710    add_type_for_runtime (TREE_VALUE (allowed));
711
712  /* We must emit the call to FAILURE here, so that if this function
713     throws a different exception, that it will be processed by the
714     correct region.  */
715
716  around_label = gen_label_rtx ();
717  emit_jump (around_label);
718
719  emit_label (region->label);
720  expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
721  /* We must adjust the stack before we reach the AROUND_LABEL because
722     the call to FAILURE does not occur on all paths to the
723     AROUND_LABEL.  */
724  do_pending_stack_adjust ();
725
726  emit_label (around_label);
727}
728
729/* End an exception region for a must-not-throw filter.  FAILURE is an
730   expression invoke if an uncaught exception propagates this far.
731
732   This is conceptually identical to expand_eh_region_end_allowed with
733   an empty allowed list (if you passed "std::terminate" instead of
734   "__cxa_call_unexpected"), but they are represented differently in
735   the C++ LSDA.  */
736
737void
738expand_eh_region_end_must_not_throw (tree failure)
739{
740  struct eh_region *region;
741  rtx around_label;
742
743  if (! doing_eh (0))
744    return;
745
746  region = expand_eh_region_end ();
747  region->type = ERT_MUST_NOT_THROW;
748  region->label = gen_label_rtx ();
749
750  /* We must emit the call to FAILURE here, so that if this function
751     throws a different exception, that it will be processed by the
752     correct region.  */
753
754  around_label = gen_label_rtx ();
755  emit_jump (around_label);
756
757  emit_label (region->label);
758  expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
759
760  emit_label (around_label);
761}
762
763/* End an exception region for a throw.  No handling goes on here,
764   but it's the easiest way for the front-end to indicate what type
765   is being thrown.  */
766
767void
768expand_eh_region_end_throw (tree type)
769{
770  struct eh_region *region;
771
772  if (! doing_eh (0))
773    return;
774
775  region = expand_eh_region_end ();
776  region->type = ERT_THROW;
777  region->u.throw.type = type;
778}
779
780/* End a fixup region.  Within this region the cleanups for the immediately
781   enclosing region are _not_ run.  This is used for goto cleanup to avoid
782   destroying an object twice.
783
784   This would be an extraordinarily simple prospect, were it not for the
785   fact that we don't actually know what the immediately enclosing region
786   is.  This surprising fact is because expand_cleanups is currently
787   generating a sequence that it will insert somewhere else.  We collect
788   the proper notion of "enclosing" in convert_from_eh_region_ranges.  */
789
790void
791expand_eh_region_end_fixup (tree handler)
792{
793  struct eh_region *fixup;
794
795  if (! doing_eh (0))
796    return;
797
798  fixup = expand_eh_region_end ();
799  fixup->type = ERT_FIXUP;
800  fixup->u.fixup.cleanup_exp = handler;
801}
802
803/* Note that the current EH region (if any) may contain a throw, or a
804   call to a function which itself may contain a throw.  */
805
806void
807note_eh_region_may_contain_throw (void)
808{
809  struct eh_region *region;
810
811  region = cfun->eh->cur_region;
812  while (region && !region->may_contain_throw)
813    {
814      region->may_contain_throw = 1;
815      region = region->outer;
816    }
817}
818
819/* Return an rtl expression for a pointer to the exception object
820   within a handler.  */
821
822rtx
823get_exception_pointer (struct function *fun)
824{
825  rtx exc_ptr = fun->eh->exc_ptr;
826  if (fun == cfun && ! exc_ptr)
827    {
828      exc_ptr = gen_reg_rtx (ptr_mode);
829      fun->eh->exc_ptr = exc_ptr;
830    }
831  return exc_ptr;
832}
833
834/* Return an rtl expression for the exception dispatch filter
835   within a handler.  */
836
837static rtx
838get_exception_filter (struct function *fun)
839{
840  rtx filter = fun->eh->filter;
841  if (fun == cfun && ! filter)
842    {
843      filter = gen_reg_rtx (word_mode);
844      fun->eh->filter = filter;
845    }
846  return filter;
847}
848
849/* This section is for the exception handling specific optimization pass.  */
850
851/* Random access the exception region tree.  It's just as simple to
852   collect the regions this way as in expand_eh_region_start, but
853   without having to realloc memory.  */
854
855static void
856collect_eh_region_array (void)
857{
858  struct eh_region **array, *i;
859
860  i = cfun->eh->region_tree;
861  if (! i)
862    return;
863
864  array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
865			     * sizeof (*array));
866  cfun->eh->region_array = array;
867
868  while (1)
869    {
870      array[i->region_number] = i;
871
872      /* If there are sub-regions, process them.  */
873      if (i->inner)
874	i = i->inner;
875      /* If there are peers, process them.  */
876      else if (i->next_peer)
877	i = i->next_peer;
878      /* Otherwise, step back up the tree to the next peer.  */
879      else
880	{
881	  do {
882	    i = i->outer;
883	    if (i == NULL)
884	      return;
885	  } while (i->next_peer == NULL);
886	  i = i->next_peer;
887	}
888    }
889}
890
891static void
892resolve_one_fixup_region (struct eh_region *fixup)
893{
894  struct eh_region *cleanup, *real;
895  int j, n;
896
897  n = cfun->eh->last_region_number;
898  cleanup = 0;
899
900  for (j = 1; j <= n; ++j)
901    {
902      cleanup = cfun->eh->region_array[j];
903      if (cleanup && cleanup->type == ERT_CLEANUP
904	  && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
905	break;
906    }
907  if (j > n)
908    abort ();
909
910  real = cleanup->outer;
911  if (real && real->type == ERT_FIXUP)
912    {
913      if (!real->u.fixup.resolved)
914	resolve_one_fixup_region (real);
915      real = real->u.fixup.real_region;
916    }
917
918  fixup->u.fixup.real_region = real;
919  fixup->u.fixup.resolved = true;
920}
921
922static void
923resolve_fixup_regions (void)
924{
925  int i, n = cfun->eh->last_region_number;
926
927  for (i = 1; i <= n; ++i)
928    {
929      struct eh_region *fixup = cfun->eh->region_array[i];
930
931      if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
932	continue;
933
934      resolve_one_fixup_region (fixup);
935    }
936}
937
938/* Now that we've discovered what region actually encloses a fixup,
939   we can shuffle pointers and remove them from the tree.  */
940
941static void
942remove_fixup_regions (void)
943{
944  int i;
945  rtx insn, note;
946  struct eh_region *fixup;
947
948  /* Walk the insn chain and adjust the REG_EH_REGION numbers
949     for instructions referencing fixup regions.  This is only
950     strictly necessary for fixup regions with no parent, but
951     doesn't hurt to do it for all regions.  */
952  for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
953    if (INSN_P (insn)
954	&& (note = find_reg_note (insn, REG_EH_REGION, NULL))
955	&& INTVAL (XEXP (note, 0)) > 0
956	&& (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
957	&& fixup->type == ERT_FIXUP)
958      {
959	if (fixup->u.fixup.real_region)
960	  XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
961	else
962	  remove_note (insn, note);
963      }
964
965  /* Remove the fixup regions from the tree.  */
966  for (i = cfun->eh->last_region_number; i > 0; --i)
967    {
968      fixup = cfun->eh->region_array[i];
969      if (! fixup)
970	continue;
971
972      /* Allow GC to maybe free some memory.  */
973      if (fixup->type == ERT_CLEANUP)
974	fixup->u.cleanup.exp = NULL_TREE;
975
976      if (fixup->type != ERT_FIXUP)
977	continue;
978
979      if (fixup->inner)
980	{
981	  struct eh_region *parent, *p, **pp;
982
983	  parent = fixup->u.fixup.real_region;
984
985	  /* Fix up the children's parent pointers; find the end of
986	     the list.  */
987	  for (p = fixup->inner; ; p = p->next_peer)
988	    {
989	      p->outer = parent;
990	      if (! p->next_peer)
991		break;
992	    }
993
994	  /* In the tree of cleanups, only outer-inner ordering matters.
995	     So link the children back in anywhere at the correct level.  */
996	  if (parent)
997	    pp = &parent->inner;
998	  else
999	    pp = &cfun->eh->region_tree;
1000	  p->next_peer = *pp;
1001	  *pp = fixup->inner;
1002	  fixup->inner = NULL;
1003	}
1004
1005      remove_eh_handler (fixup);
1006    }
1007}
1008
1009/* Remove all regions whose labels are not reachable from insns.  */
1010
1011static void
1012remove_unreachable_regions (rtx insns)
1013{
1014  int i, *uid_region_num;
1015  bool *reachable;
1016  struct eh_region *r;
1017  rtx insn;
1018
1019  uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1020  reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1021
1022  for (i = cfun->eh->last_region_number; i > 0; --i)
1023    {
1024      r = cfun->eh->region_array[i];
1025      if (!r || r->region_number != i)
1026	continue;
1027
1028      if (r->resume)
1029	{
1030	  if (uid_region_num[INSN_UID (r->resume)])
1031	    abort ();
1032	  uid_region_num[INSN_UID (r->resume)] = i;
1033	}
1034      if (r->label)
1035	{
1036	  if (uid_region_num[INSN_UID (r->label)])
1037	    abort ();
1038	  uid_region_num[INSN_UID (r->label)] = i;
1039	}
1040      if (r->type == ERT_TRY && r->u.try.continue_label)
1041	{
1042	  if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1043	    abort ();
1044	  uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1045	}
1046    }
1047
1048  for (insn = insns; insn; insn = NEXT_INSN (insn))
1049    {
1050      reachable[uid_region_num[INSN_UID (insn)]] = true;
1051
1052      if (GET_CODE (insn) == CALL_INSN
1053	  && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1054	for (i = 0; i < 3; i++)
1055	  {
1056	    rtx sub = XEXP (PATTERN (insn), i);
1057	    for (; sub ; sub = NEXT_INSN (sub))
1058	      reachable[uid_region_num[INSN_UID (sub)]] = true;
1059	  }
1060    }
1061
1062  for (i = cfun->eh->last_region_number; i > 0; --i)
1063    {
1064      r = cfun->eh->region_array[i];
1065      if (r && r->region_number == i && !reachable[i])
1066	{
1067	  /* Don't remove ERT_THROW regions if their outer region
1068	     is reachable.  */
1069	  if (r->type == ERT_THROW
1070	      && r->outer
1071	      && reachable[r->outer->region_number])
1072	    continue;
1073
1074	  remove_eh_handler (r);
1075	}
1076    }
1077
1078  free (reachable);
1079  free (uid_region_num);
1080}
1081
1082/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1083   can_throw instruction in the region.  */
1084
1085static void
1086convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
1087{
1088  int *sp = orig_sp;
1089  rtx insn, next;
1090
1091  for (insn = *pinsns; insn ; insn = next)
1092    {
1093      next = NEXT_INSN (insn);
1094      if (GET_CODE (insn) == NOTE)
1095	{
1096	  int kind = NOTE_LINE_NUMBER (insn);
1097	  if (kind == NOTE_INSN_EH_REGION_BEG
1098	      || kind == NOTE_INSN_EH_REGION_END)
1099	    {
1100	      if (kind == NOTE_INSN_EH_REGION_BEG)
1101		{
1102		  struct eh_region *r;
1103
1104		  *sp++ = cur;
1105		  cur = NOTE_EH_HANDLER (insn);
1106
1107		  r = cfun->eh->region_array[cur];
1108		  if (r->type == ERT_FIXUP)
1109		    {
1110		      r = r->u.fixup.real_region;
1111		      cur = r ? r->region_number : 0;
1112		    }
1113		  else if (r->type == ERT_CATCH)
1114		    {
1115		      r = r->outer;
1116		      cur = r ? r->region_number : 0;
1117		    }
1118		}
1119	      else
1120		cur = *--sp;
1121
1122	      /* Removing the first insn of a CALL_PLACEHOLDER sequence
1123		 requires extra care to adjust sequence start.  */
1124	      if (insn == *pinsns)
1125		*pinsns = next;
1126	      remove_insn (insn);
1127	      continue;
1128	    }
1129	}
1130      else if (INSN_P (insn))
1131	{
1132	  if (cur > 0
1133	      && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1134	      /* Calls can always potentially throw exceptions, unless
1135		 they have a REG_EH_REGION note with a value of 0 or less.
1136		 Which should be the only possible kind so far.  */
1137	      && (GET_CODE (insn) == CALL_INSN
1138		  /* If we wanted exceptions for non-call insns, then
1139		     any may_trap_p instruction could throw.  */
1140		  || (flag_non_call_exceptions
1141		      && GET_CODE (PATTERN (insn)) != CLOBBER
1142		      && GET_CODE (PATTERN (insn)) != USE
1143		      && may_trap_p (PATTERN (insn)))))
1144	    {
1145	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1146						  REG_NOTES (insn));
1147	    }
1148
1149	  if (GET_CODE (insn) == CALL_INSN
1150	      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1151	    {
1152	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1153					       sp, cur);
1154	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1155					       sp, cur);
1156	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1157					       sp, cur);
1158	    }
1159	}
1160    }
1161
1162  if (sp != orig_sp)
1163    abort ();
1164}
1165
1166void
1167convert_from_eh_region_ranges (void)
1168{
1169  int *stack;
1170  rtx insns;
1171
1172  collect_eh_region_array ();
1173  resolve_fixup_regions ();
1174
1175  stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1176  insns = get_insns ();
1177  convert_from_eh_region_ranges_1 (&insns, stack, 0);
1178  free (stack);
1179
1180  remove_fixup_regions ();
1181  remove_unreachable_regions (insns);
1182}
1183
1184static void
1185add_ehl_entry (rtx label, struct eh_region *region)
1186{
1187  struct ehl_map_entry **slot, *entry;
1188
1189  LABEL_PRESERVE_P (label) = 1;
1190
1191  entry = ggc_alloc (sizeof (*entry));
1192  entry->label = label;
1193  entry->region = region;
1194
1195  slot = (struct ehl_map_entry **)
1196    htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1197
1198  /* Before landing pad creation, each exception handler has its own
1199     label.  After landing pad creation, the exception handlers may
1200     share landing pads.  This is ok, since maybe_remove_eh_handler
1201     only requires the 1-1 mapping before landing pad creation.  */
1202  if (*slot && !cfun->eh->built_landing_pads)
1203    abort ();
1204
1205  *slot = entry;
1206}
1207
1208void
1209find_exception_handler_labels (void)
1210{
1211  int i;
1212
1213  if (cfun->eh->exception_handler_label_map)
1214    htab_empty (cfun->eh->exception_handler_label_map);
1215  else
1216    {
1217      /* ??? The expansion factor here (3/2) must be greater than the htab
1218	 occupancy factor (4/3) to avoid unnecessary resizing.  */
1219      cfun->eh->exception_handler_label_map
1220        = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1221			   ehl_hash, ehl_eq, NULL);
1222    }
1223
1224  if (cfun->eh->region_tree == NULL)
1225    return;
1226
1227  for (i = cfun->eh->last_region_number; i > 0; --i)
1228    {
1229      struct eh_region *region = cfun->eh->region_array[i];
1230      rtx lab;
1231
1232      if (! region || region->region_number != i)
1233	continue;
1234      if (cfun->eh->built_landing_pads)
1235	lab = region->landing_pad;
1236      else
1237	lab = region->label;
1238
1239      if (lab)
1240	add_ehl_entry (lab, region);
1241    }
1242
1243  /* For sjlj exceptions, need the return label to remain live until
1244     after landing pad generation.  */
1245  if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1246    add_ehl_entry (return_label, NULL);
1247}
1248
1249bool
1250current_function_has_exception_handlers (void)
1251{
1252  int i;
1253
1254  for (i = cfun->eh->last_region_number; i > 0; --i)
1255    {
1256      struct eh_region *region = cfun->eh->region_array[i];
1257
1258      if (! region || region->region_number != i)
1259	continue;
1260      if (region->type != ERT_THROW)
1261	return true;
1262    }
1263
1264  return false;
1265}
1266
1267static struct eh_region *
1268duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
1269{
1270  struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
1271
1272  n->region_number = o->region_number + cfun->eh->last_region_number;
1273  n->type = o->type;
1274
1275  switch (n->type)
1276    {
1277    case ERT_CLEANUP:
1278    case ERT_MUST_NOT_THROW:
1279      break;
1280
1281    case ERT_TRY:
1282      if (o->u.try.continue_label)
1283	n->u.try.continue_label
1284	  = get_label_from_map (map,
1285				CODE_LABEL_NUMBER (o->u.try.continue_label));
1286      break;
1287
1288    case ERT_CATCH:
1289      n->u.catch.type_list = o->u.catch.type_list;
1290      break;
1291
1292    case ERT_ALLOWED_EXCEPTIONS:
1293      n->u.allowed.type_list = o->u.allowed.type_list;
1294      break;
1295
1296    case ERT_THROW:
1297      n->u.throw.type = o->u.throw.type;
1298
1299    default:
1300      abort ();
1301    }
1302
1303  if (o->label)
1304    n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1305  if (o->resume)
1306    {
1307      n->resume = map->insn_map[INSN_UID (o->resume)];
1308      if (n->resume == NULL)
1309	abort ();
1310    }
1311
1312  return n;
1313}
1314
1315static void
1316duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
1317{
1318  struct eh_region *n = n_array[o->region_number];
1319
1320  switch (n->type)
1321    {
1322    case ERT_TRY:
1323      n->u.try.catch = n_array[o->u.try.catch->region_number];
1324      n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1325      break;
1326
1327    case ERT_CATCH:
1328      if (o->u.catch.next_catch)
1329	n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1330      if (o->u.catch.prev_catch)
1331	n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1332      break;
1333
1334    default:
1335      break;
1336    }
1337
1338  if (o->outer)
1339    n->outer = n_array[o->outer->region_number];
1340  if (o->inner)
1341    n->inner = n_array[o->inner->region_number];
1342  if (o->next_peer)
1343    n->next_peer = n_array[o->next_peer->region_number];
1344}
1345
1346int
1347duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
1348{
1349  int ifun_last_region_number = ifun->eh->last_region_number;
1350  struct eh_region **n_array, *root, *cur;
1351  int i;
1352
1353  if (ifun_last_region_number == 0)
1354    return 0;
1355
1356  n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1357
1358  for (i = 1; i <= ifun_last_region_number; ++i)
1359    {
1360      cur = ifun->eh->region_array[i];
1361      if (!cur || cur->region_number != i)
1362	continue;
1363      n_array[i] = duplicate_eh_region_1 (cur, map);
1364    }
1365  for (i = 1; i <= ifun_last_region_number; ++i)
1366    {
1367      cur = ifun->eh->region_array[i];
1368      if (!cur || cur->region_number != i)
1369	continue;
1370      duplicate_eh_region_2 (cur, n_array);
1371    }
1372
1373  root = n_array[ifun->eh->region_tree->region_number];
1374  cur = cfun->eh->cur_region;
1375  if (cur)
1376    {
1377      struct eh_region *p = cur->inner;
1378      if (p)
1379	{
1380	  while (p->next_peer)
1381	    p = p->next_peer;
1382	  p->next_peer = root;
1383	}
1384      else
1385	cur->inner = root;
1386
1387      for (i = 1; i <= ifun_last_region_number; ++i)
1388	if (n_array[i] && n_array[i]->outer == NULL)
1389	  n_array[i]->outer = cur;
1390    }
1391  else
1392    {
1393      struct eh_region *p = cfun->eh->region_tree;
1394      if (p)
1395	{
1396	  while (p->next_peer)
1397	    p = p->next_peer;
1398	  p->next_peer = root;
1399	}
1400      else
1401	cfun->eh->region_tree = root;
1402    }
1403
1404  free (n_array);
1405
1406  i = cfun->eh->last_region_number;
1407  cfun->eh->last_region_number = i + ifun_last_region_number;
1408  return i;
1409}
1410
1411
1412static int
1413t2r_eq (const void *pentry, const void *pdata)
1414{
1415  tree entry = (tree) pentry;
1416  tree data = (tree) pdata;
1417
1418  return TREE_PURPOSE (entry) == data;
1419}
1420
1421static hashval_t
1422t2r_hash (const void *pentry)
1423{
1424  tree entry = (tree) pentry;
1425  return TYPE_HASH (TREE_PURPOSE (entry));
1426}
1427
1428static void
1429add_type_for_runtime (tree type)
1430{
1431  tree *slot;
1432
1433  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1434					    TYPE_HASH (type), INSERT);
1435  if (*slot == NULL)
1436    {
1437      tree runtime = (*lang_eh_runtime_type) (type);
1438      *slot = tree_cons (type, runtime, NULL_TREE);
1439    }
1440}
1441
1442static tree
1443lookup_type_for_runtime (tree type)
1444{
1445  tree *slot;
1446
1447  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1448					    TYPE_HASH (type), NO_INSERT);
1449
1450  /* We should have always inserted the data earlier.  */
1451  return TREE_VALUE (*slot);
1452}
1453
1454
1455/* Represent an entry in @TTypes for either catch actions
1456   or exception filter actions.  */
1457struct ttypes_filter GTY(())
1458{
1459  tree t;
1460  int filter;
1461};
1462
1463/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1464   (a tree) for a @TTypes type node we are thinking about adding.  */
1465
1466static int
1467ttypes_filter_eq (const void *pentry, const void *pdata)
1468{
1469  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1470  tree data = (tree) pdata;
1471
1472  return entry->t == data;
1473}
1474
1475static hashval_t
1476ttypes_filter_hash (const void *pentry)
1477{
1478  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1479  return TYPE_HASH (entry->t);
1480}
1481
1482/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1483   exception specification list we are thinking about adding.  */
1484/* ??? Currently we use the type lists in the order given.  Someone
1485   should put these in some canonical order.  */
1486
1487static int
1488ehspec_filter_eq (const void *pentry, const void *pdata)
1489{
1490  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1491  const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1492
1493  return type_list_equal (entry->t, data->t);
1494}
1495
1496/* Hash function for exception specification lists.  */
1497
1498static hashval_t
1499ehspec_filter_hash (const void *pentry)
1500{
1501  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1502  hashval_t h = 0;
1503  tree list;
1504
1505  for (list = entry->t; list ; list = TREE_CHAIN (list))
1506    h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1507  return h;
1508}
1509
1510/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1511   up the search.  Return the filter value to be used.  */
1512
1513static int
1514add_ttypes_entry (htab_t ttypes_hash, tree type)
1515{
1516  struct ttypes_filter **slot, *n;
1517
1518  slot = (struct ttypes_filter **)
1519    htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1520
1521  if ((n = *slot) == NULL)
1522    {
1523      /* Filter value is a 1 based table index.  */
1524
1525      n = xmalloc (sizeof (*n));
1526      n->t = type;
1527      n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1528      *slot = n;
1529
1530      VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1531    }
1532
1533  return n->filter;
1534}
1535
1536/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1537   to speed up the search.  Return the filter value to be used.  */
1538
1539static int
1540add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1541{
1542  struct ttypes_filter **slot, *n;
1543  struct ttypes_filter dummy;
1544
1545  dummy.t = list;
1546  slot = (struct ttypes_filter **)
1547    htab_find_slot (ehspec_hash, &dummy, INSERT);
1548
1549  if ((n = *slot) == NULL)
1550    {
1551      /* Filter value is a -1 based byte index into a uleb128 buffer.  */
1552
1553      n = xmalloc (sizeof (*n));
1554      n->t = list;
1555      n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1556      *slot = n;
1557
1558      /* Look up each type in the list and encode its filter
1559	 value as a uleb128.  Terminate the list with 0.  */
1560      for (; list ; list = TREE_CHAIN (list))
1561	push_uleb128 (&cfun->eh->ehspec_data,
1562		      add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1563      VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1564    }
1565
1566  return n->filter;
1567}
1568
1569/* Generate the action filter values to be used for CATCH and
1570   ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
1571   we use lots of landing pads, and so every type or list can share
1572   the same filter value, which saves table space.  */
1573
1574static void
1575assign_filter_values (void)
1576{
1577  int i;
1578  htab_t ttypes, ehspec;
1579
1580  VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1581  VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1582
1583  ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1584  ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1585
1586  for (i = cfun->eh->last_region_number; i > 0; --i)
1587    {
1588      struct eh_region *r = cfun->eh->region_array[i];
1589
1590      /* Mind we don't process a region more than once.  */
1591      if (!r || r->region_number != i)
1592	continue;
1593
1594      switch (r->type)
1595	{
1596	case ERT_CATCH:
1597	  /* Whatever type_list is (NULL or true list), we build a list
1598	     of filters for the region.  */
1599	  r->u.catch.filter_list = NULL_TREE;
1600
1601	  if (r->u.catch.type_list != NULL)
1602	    {
1603	      /* Get a filter value for each of the types caught and store
1604		 them in the region's dedicated list.  */
1605	      tree tp_node = r->u.catch.type_list;
1606
1607	      for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1608		{
1609		  int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1610		  tree flt_node = build_int_2 (flt, 0);
1611
1612		  r->u.catch.filter_list
1613		    = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1614		}
1615	    }
1616	  else
1617	    {
1618	      /* Get a filter value for the NULL list also since it will need
1619		 an action record anyway.  */
1620	      int flt = add_ttypes_entry (ttypes, NULL);
1621	      tree flt_node = build_int_2 (flt, 0);
1622
1623	      r->u.catch.filter_list
1624		= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1625	    }
1626
1627	  break;
1628
1629	case ERT_ALLOWED_EXCEPTIONS:
1630	  r->u.allowed.filter
1631	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1632	  break;
1633
1634	default:
1635	  break;
1636	}
1637    }
1638
1639  htab_delete (ttypes);
1640  htab_delete (ehspec);
1641}
1642
1643/* Generate the code to actually handle exceptions, which will follow the
1644   landing pads.  */
1645
1646static void
1647build_post_landing_pads (void)
1648{
1649  int i;
1650
1651  for (i = cfun->eh->last_region_number; i > 0; --i)
1652    {
1653      struct eh_region *region = cfun->eh->region_array[i];
1654      rtx seq;
1655
1656      /* Mind we don't process a region more than once.  */
1657      if (!region || region->region_number != i)
1658	continue;
1659
1660      switch (region->type)
1661	{
1662	case ERT_TRY:
1663	  /* ??? Collect the set of all non-overlapping catch handlers
1664	       all the way up the chain until blocked by a cleanup.  */
1665	  /* ??? Outer try regions can share landing pads with inner
1666	     try regions if the types are completely non-overlapping,
1667	     and there are no intervening cleanups.  */
1668
1669	  region->post_landing_pad = gen_label_rtx ();
1670
1671	  start_sequence ();
1672
1673	  emit_label (region->post_landing_pad);
1674
1675	  /* ??? It is mighty inconvenient to call back into the
1676	     switch statement generation code in expand_end_case.
1677	     Rapid prototyping sez a sequence of ifs.  */
1678	  {
1679	    struct eh_region *c;
1680	    for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1681	      {
1682		if (c->u.catch.type_list == NULL)
1683		  emit_jump (c->label);
1684		else
1685		  {
1686		    /* Need for one cmp/jump per type caught. Each type
1687		       list entry has a matching entry in the filter list
1688		       (see assign_filter_values).  */
1689		    tree tp_node = c->u.catch.type_list;
1690		    tree flt_node = c->u.catch.filter_list;
1691
1692		    for (; tp_node; )
1693		      {
1694			emit_cmp_and_jump_insns
1695			  (cfun->eh->filter,
1696			   GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1697			   EQ, NULL_RTX, word_mode, 0, c->label);
1698
1699			tp_node = TREE_CHAIN (tp_node);
1700			flt_node = TREE_CHAIN (flt_node);
1701		      }
1702		  }
1703	      }
1704	  }
1705
1706	  /* We delay the generation of the _Unwind_Resume until we generate
1707	     landing pads.  We emit a marker here so as to get good control
1708	     flow data in the meantime.  */
1709	  region->resume
1710	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1711	  emit_barrier ();
1712
1713	  seq = get_insns ();
1714	  end_sequence ();
1715
1716	  emit_insn_before (seq, region->u.try.catch->label);
1717	  break;
1718
1719	case ERT_ALLOWED_EXCEPTIONS:
1720	  region->post_landing_pad = gen_label_rtx ();
1721
1722	  start_sequence ();
1723
1724	  emit_label (region->post_landing_pad);
1725
1726	  emit_cmp_and_jump_insns (cfun->eh->filter,
1727				   GEN_INT (region->u.allowed.filter),
1728				   EQ, NULL_RTX, word_mode, 0, region->label);
1729
1730	  /* We delay the generation of the _Unwind_Resume until we generate
1731	     landing pads.  We emit a marker here so as to get good control
1732	     flow data in the meantime.  */
1733	  region->resume
1734	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1735	  emit_barrier ();
1736
1737	  seq = get_insns ();
1738	  end_sequence ();
1739
1740	  emit_insn_before (seq, region->label);
1741	  break;
1742
1743	case ERT_CLEANUP:
1744	case ERT_MUST_NOT_THROW:
1745	  region->post_landing_pad = region->label;
1746	  break;
1747
1748	case ERT_CATCH:
1749	case ERT_THROW:
1750	  /* Nothing to do.  */
1751	  break;
1752
1753	default:
1754	  abort ();
1755	}
1756    }
1757}
1758
1759/* Replace RESX patterns with jumps to the next handler if any, or calls to
1760   _Unwind_Resume otherwise.  */
1761
1762static void
1763connect_post_landing_pads (void)
1764{
1765  int i;
1766
1767  for (i = cfun->eh->last_region_number; i > 0; --i)
1768    {
1769      struct eh_region *region = cfun->eh->region_array[i];
1770      struct eh_region *outer;
1771      rtx seq;
1772
1773      /* Mind we don't process a region more than once.  */
1774      if (!region || region->region_number != i)
1775	continue;
1776
1777      /* If there is no RESX, or it has been deleted by flow, there's
1778	 nothing to fix up.  */
1779      if (! region->resume || INSN_DELETED_P (region->resume))
1780	continue;
1781
1782      /* Search for another landing pad in this function.  */
1783      for (outer = region->outer; outer ; outer = outer->outer)
1784	if (outer->post_landing_pad)
1785	  break;
1786
1787      start_sequence ();
1788
1789      if (outer)
1790	emit_jump (outer->post_landing_pad);
1791      else
1792	emit_library_call (unwind_resume_libfunc, LCT_THROW,
1793			   VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1794
1795      seq = get_insns ();
1796      end_sequence ();
1797      emit_insn_before (seq, region->resume);
1798      delete_insn (region->resume);
1799    }
1800}
1801
1802
1803static void
1804dw2_build_landing_pads (void)
1805{
1806  int i;
1807  unsigned int j;
1808
1809  for (i = cfun->eh->last_region_number; i > 0; --i)
1810    {
1811      struct eh_region *region = cfun->eh->region_array[i];
1812      rtx seq;
1813      bool clobbers_hard_regs = false;
1814
1815      /* Mind we don't process a region more than once.  */
1816      if (!region || region->region_number != i)
1817	continue;
1818
1819      if (region->type != ERT_CLEANUP
1820	  && region->type != ERT_TRY
1821	  && region->type != ERT_ALLOWED_EXCEPTIONS)
1822	continue;
1823
1824      start_sequence ();
1825
1826      region->landing_pad = gen_label_rtx ();
1827      emit_label (region->landing_pad);
1828
1829#ifdef HAVE_exception_receiver
1830      if (HAVE_exception_receiver)
1831	emit_insn (gen_exception_receiver ());
1832      else
1833#endif
1834#ifdef HAVE_nonlocal_goto_receiver
1835	if (HAVE_nonlocal_goto_receiver)
1836	  emit_insn (gen_nonlocal_goto_receiver ());
1837	else
1838#endif
1839	  { /* Nothing */ }
1840
1841      /* If the eh_return data registers are call-saved, then we
1842	 won't have considered them clobbered from the call that
1843	 threw.  Kill them now.  */
1844      for (j = 0; ; ++j)
1845	{
1846	  unsigned r = EH_RETURN_DATA_REGNO (j);
1847	  if (r == INVALID_REGNUM)
1848	    break;
1849	  if (! call_used_regs[r])
1850	    {
1851	      emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1852	      clobbers_hard_regs = true;
1853	    }
1854	}
1855
1856      if (clobbers_hard_regs)
1857	{
1858	  /* @@@ This is a kludge.  Not all machine descriptions define a
1859	     blockage insn, but we must not allow the code we just generated
1860	     to be reordered by scheduling.  So emit an ASM_INPUT to act as
1861	     blockage insn.  */
1862	  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1863	}
1864
1865      emit_move_insn (cfun->eh->exc_ptr,
1866		      gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1867      emit_move_insn (cfun->eh->filter,
1868		      gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1869
1870      seq = get_insns ();
1871      end_sequence ();
1872
1873      emit_insn_before (seq, region->post_landing_pad);
1874    }
1875}
1876
1877
1878struct sjlj_lp_info
1879{
1880  int directly_reachable;
1881  int action_index;
1882  int dispatch_index;
1883  int call_site_index;
1884};
1885
1886static bool
1887sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1888{
1889  rtx insn;
1890  bool found_one = false;
1891
1892  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1893    {
1894      struct eh_region *region;
1895      enum reachable_code rc;
1896      tree type_thrown;
1897      rtx note;
1898
1899      if (! INSN_P (insn))
1900	continue;
1901
1902      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1903      if (!note || INTVAL (XEXP (note, 0)) <= 0)
1904	continue;
1905
1906      region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1907
1908      type_thrown = NULL_TREE;
1909      if (region->type == ERT_THROW)
1910	{
1911	  type_thrown = region->u.throw.type;
1912	  region = region->outer;
1913	}
1914
1915      /* Find the first containing region that might handle the exception.
1916	 That's the landing pad to which we will transfer control.  */
1917      rc = RNL_NOT_CAUGHT;
1918      for (; region; region = region->outer)
1919	{
1920	  rc = reachable_next_level (region, type_thrown, 0);
1921	  if (rc != RNL_NOT_CAUGHT)
1922	    break;
1923	}
1924      if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1925	{
1926	  lp_info[region->region_number].directly_reachable = 1;
1927	  found_one = true;
1928	}
1929    }
1930
1931  return found_one;
1932}
1933
1934static void
1935sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1936{
1937  htab_t ar_hash;
1938  int i, index;
1939
1940  /* First task: build the action table.  */
1941
1942  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1943  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1944
1945  for (i = cfun->eh->last_region_number; i > 0; --i)
1946    if (lp_info[i].directly_reachable)
1947      {
1948	struct eh_region *r = cfun->eh->region_array[i];
1949	r->landing_pad = dispatch_label;
1950	lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1951	if (lp_info[i].action_index != -1)
1952	  cfun->uses_eh_lsda = 1;
1953      }
1954
1955  htab_delete (ar_hash);
1956
1957  /* Next: assign dispatch values.  In dwarf2 terms, this would be the
1958     landing pad label for the region.  For sjlj though, there is one
1959     common landing pad from which we dispatch to the post-landing pads.
1960
1961     A region receives a dispatch index if it is directly reachable
1962     and requires in-function processing.  Regions that share post-landing
1963     pads may share dispatch indices.  */
1964  /* ??? Post-landing pad sharing doesn't actually happen at the moment
1965     (see build_post_landing_pads) so we don't bother checking for it.  */
1966
1967  index = 0;
1968  for (i = cfun->eh->last_region_number; i > 0; --i)
1969    if (lp_info[i].directly_reachable)
1970      lp_info[i].dispatch_index = index++;
1971
1972  /* Finally: assign call-site values.  If dwarf2 terms, this would be
1973     the region number assigned by convert_to_eh_region_ranges, but
1974     handles no-action and must-not-throw differently.  */
1975
1976  call_site_base = 1;
1977  for (i = cfun->eh->last_region_number; i > 0; --i)
1978    if (lp_info[i].directly_reachable)
1979      {
1980	int action = lp_info[i].action_index;
1981
1982	/* Map must-not-throw to otherwise unused call-site index 0.  */
1983	if (action == -2)
1984	  index = 0;
1985	/* Map no-action to otherwise unused call-site index -1.  */
1986	else if (action == -1)
1987	  index = -1;
1988	/* Otherwise, look it up in the table.  */
1989	else
1990	  index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1991
1992	lp_info[i].call_site_index = index;
1993      }
1994}
1995
1996static void
1997sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1998{
1999  int last_call_site = -2;
2000  rtx insn, mem;
2001
2002  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2003    {
2004      struct eh_region *region;
2005      int this_call_site;
2006      rtx note, before, p;
2007
2008      /* Reset value tracking at extended basic block boundaries.  */
2009      if (GET_CODE (insn) == CODE_LABEL)
2010	last_call_site = -2;
2011
2012      if (! INSN_P (insn))
2013	continue;
2014
2015      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2016      if (!note)
2017	{
2018	  /* Calls (and trapping insns) without notes are outside any
2019	     exception handling region in this function.  Mark them as
2020	     no action.  */
2021	  if (GET_CODE (insn) == CALL_INSN
2022	      || (flag_non_call_exceptions
2023		  && may_trap_p (PATTERN (insn))))
2024	    this_call_site = -1;
2025	  else
2026	    continue;
2027	}
2028      else
2029	{
2030	  /* Calls that are known to not throw need not be marked.  */
2031	  if (INTVAL (XEXP (note, 0)) <= 0)
2032	    continue;
2033
2034	  region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2035	  this_call_site = lp_info[region->region_number].call_site_index;
2036	}
2037
2038      if (this_call_site == last_call_site)
2039	continue;
2040
2041      /* Don't separate a call from it's argument loads.  */
2042      before = insn;
2043      if (GET_CODE (insn) == CALL_INSN)
2044	before = find_first_parameter_load (insn, NULL_RTX);
2045
2046      start_sequence ();
2047      mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2048			    sjlj_fc_call_site_ofs);
2049      emit_move_insn (mem, GEN_INT (this_call_site));
2050      p = get_insns ();
2051      end_sequence ();
2052
2053      emit_insn_before (p, before);
2054      last_call_site = this_call_site;
2055    }
2056}
2057
2058/* Construct the SjLj_Function_Context.  */
2059
2060static void
2061sjlj_emit_function_enter (rtx dispatch_label)
2062{
2063  rtx fn_begin, fc, mem, seq;
2064
2065  fc = cfun->eh->sjlj_fc;
2066
2067  start_sequence ();
2068
2069  /* We're storing this libcall's address into memory instead of
2070     calling it directly.  Thus, we must call assemble_external_libcall
2071     here, as we can not depend on emit_library_call to do it for us.  */
2072  assemble_external_libcall (eh_personality_libfunc);
2073  mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2074  emit_move_insn (mem, eh_personality_libfunc);
2075
2076  mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2077  if (cfun->uses_eh_lsda)
2078    {
2079      char buf[20];
2080      rtx sym;
2081
2082      ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2083      sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2084      SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2085      emit_move_insn (mem, sym);
2086    }
2087  else
2088    emit_move_insn (mem, const0_rtx);
2089
2090#ifdef DONT_USE_BUILTIN_SETJMP
2091  {
2092    rtx x, note;
2093    x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2094				 TYPE_MODE (integer_type_node), 1,
2095				 plus_constant (XEXP (fc, 0),
2096						sjlj_fc_jbuf_ofs), Pmode);
2097
2098    note = emit_note (NOTE_INSN_EXPECTED_VALUE);
2099    NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2100
2101    emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2102			     TYPE_MODE (integer_type_node), 0, dispatch_label);
2103  }
2104#else
2105  expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2106			       dispatch_label);
2107#endif
2108
2109  emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2110		     1, XEXP (fc, 0), Pmode);
2111
2112  seq = get_insns ();
2113  end_sequence ();
2114
2115  /* ??? Instead of doing this at the beginning of the function,
2116     do this in a block that is at loop level 0 and dominates all
2117     can_throw_internal instructions.  */
2118
2119  for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2120    if (GET_CODE (fn_begin) == NOTE
2121	&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2122      break;
2123  emit_insn_after (seq, fn_begin);
2124}
2125
2126/* Call back from expand_function_end to know where we should put
2127   the call to unwind_sjlj_unregister_libfunc if needed.  */
2128
2129void
2130sjlj_emit_function_exit_after (rtx after)
2131{
2132  cfun->eh->sjlj_exit_after = after;
2133}
2134
2135static void
2136sjlj_emit_function_exit (void)
2137{
2138  rtx seq;
2139
2140  start_sequence ();
2141
2142  emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2143		     1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2144
2145  seq = get_insns ();
2146  end_sequence ();
2147
2148  /* ??? Really this can be done in any block at loop level 0 that
2149     post-dominates all can_throw_internal instructions.  This is
2150     the last possible moment.  */
2151
2152  emit_insn_after (seq, cfun->eh->sjlj_exit_after);
2153}
2154
2155static void
2156sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2157{
2158  int i, first_reachable;
2159  rtx mem, dispatch, seq, fc;
2160
2161  fc = cfun->eh->sjlj_fc;
2162
2163  start_sequence ();
2164
2165  emit_label (dispatch_label);
2166
2167#ifndef DONT_USE_BUILTIN_SETJMP
2168  expand_builtin_setjmp_receiver (dispatch_label);
2169#endif
2170
2171  /* Load up dispatch index, exc_ptr and filter values from the
2172     function context.  */
2173  mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2174			sjlj_fc_call_site_ofs);
2175  dispatch = copy_to_reg (mem);
2176
2177  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2178  if (word_mode != ptr_mode)
2179    {
2180#ifdef POINTERS_EXTEND_UNSIGNED
2181      mem = convert_memory_address (ptr_mode, mem);
2182#else
2183      mem = convert_to_mode (ptr_mode, mem, 0);
2184#endif
2185    }
2186  emit_move_insn (cfun->eh->exc_ptr, mem);
2187
2188  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2189  emit_move_insn (cfun->eh->filter, mem);
2190
2191  /* Jump to one of the directly reachable regions.  */
2192  /* ??? This really ought to be using a switch statement.  */
2193
2194  first_reachable = 0;
2195  for (i = cfun->eh->last_region_number; i > 0; --i)
2196    {
2197      if (! lp_info[i].directly_reachable)
2198	continue;
2199
2200      if (! first_reachable)
2201	{
2202	  first_reachable = i;
2203	  continue;
2204	}
2205
2206      emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2207			       EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2208			       cfun->eh->region_array[i]->post_landing_pad);
2209    }
2210
2211  seq = get_insns ();
2212  end_sequence ();
2213
2214  emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
2215			  ->post_landing_pad));
2216}
2217
2218static void
2219sjlj_build_landing_pads (void)
2220{
2221  struct sjlj_lp_info *lp_info;
2222
2223  lp_info = xcalloc (cfun->eh->last_region_number + 1,
2224		     sizeof (struct sjlj_lp_info));
2225
2226  if (sjlj_find_directly_reachable_regions (lp_info))
2227    {
2228      rtx dispatch_label = gen_label_rtx ();
2229
2230      cfun->eh->sjlj_fc
2231	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2232			      int_size_in_bytes (sjlj_fc_type_node),
2233			      TYPE_ALIGN (sjlj_fc_type_node));
2234
2235      sjlj_assign_call_site_values (dispatch_label, lp_info);
2236      sjlj_mark_call_sites (lp_info);
2237
2238      sjlj_emit_function_enter (dispatch_label);
2239      sjlj_emit_dispatch_table (dispatch_label, lp_info);
2240      sjlj_emit_function_exit ();
2241    }
2242
2243  free (lp_info);
2244}
2245
2246void
2247finish_eh_generation (void)
2248{
2249  /* Nothing to do if no regions created.  */
2250  if (cfun->eh->region_tree == NULL)
2251    return;
2252
2253  /* The object here is to provide find_basic_blocks with detailed
2254     information (via reachable_handlers) on how exception control
2255     flows within the function.  In this first pass, we can include
2256     type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2257     regions, and hope that it will be useful in deleting unreachable
2258     handlers.  Subsequently, we will generate landing pads which will
2259     connect many of the handlers, and then type information will not
2260     be effective.  Still, this is a win over previous implementations.  */
2261
2262  cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2263
2264  /* These registers are used by the landing pads.  Make sure they
2265     have been generated.  */
2266  get_exception_pointer (cfun);
2267  get_exception_filter (cfun);
2268
2269  /* Construct the landing pads.  */
2270
2271  assign_filter_values ();
2272  build_post_landing_pads ();
2273  connect_post_landing_pads ();
2274  if (USING_SJLJ_EXCEPTIONS)
2275    sjlj_build_landing_pads ();
2276  else
2277    dw2_build_landing_pads ();
2278
2279  cfun->eh->built_landing_pads = 1;
2280
2281  /* We've totally changed the CFG.  Start over.  */
2282  find_exception_handler_labels ();
2283  rebuild_jump_labels (get_insns ());
2284  find_basic_blocks (get_insns (), max_reg_num (), 0);
2285  cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2286}
2287
2288static hashval_t
2289ehl_hash (const void *pentry)
2290{
2291  struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2292
2293  /* 2^32 * ((sqrt(5) - 1) / 2) */
2294  const hashval_t scaled_golden_ratio = 0x9e3779b9;
2295  return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2296}
2297
2298static int
2299ehl_eq (const void *pentry, const void *pdata)
2300{
2301  struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2302  struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2303
2304  return entry->label == data->label;
2305}
2306
2307/* This section handles removing dead code for flow.  */
2308
2309/* Remove LABEL from exception_handler_label_map.  */
2310
2311static void
2312remove_exception_handler_label (rtx label)
2313{
2314  struct ehl_map_entry **slot, tmp;
2315
2316  /* If exception_handler_label_map was not built yet,
2317     there is nothing to do.  */
2318  if (cfun->eh->exception_handler_label_map == NULL)
2319    return;
2320
2321  tmp.label = label;
2322  slot = (struct ehl_map_entry **)
2323    htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2324  if (! slot)
2325    abort ();
2326
2327  htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2328}
2329
2330/* Splice REGION from the region tree etc.  */
2331
2332static void
2333remove_eh_handler (struct eh_region *region)
2334{
2335  struct eh_region **pp, **pp_start, *p, *outer, *inner;
2336  rtx lab;
2337
2338  /* For the benefit of efficiently handling REG_EH_REGION notes,
2339     replace this region in the region array with its containing
2340     region.  Note that previous region deletions may result in
2341     multiple copies of this region in the array, so we have a
2342     list of alternate numbers by which we are known.  */
2343
2344  outer = region->outer;
2345  cfun->eh->region_array[region->region_number] = outer;
2346  if (region->aka)
2347    {
2348      int i;
2349      EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2350	{ cfun->eh->region_array[i] = outer; });
2351    }
2352
2353  if (outer)
2354    {
2355      if (!outer->aka)
2356        outer->aka = BITMAP_GGC_ALLOC ();
2357      if (region->aka)
2358	bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2359      bitmap_set_bit (outer->aka, region->region_number);
2360    }
2361
2362  if (cfun->eh->built_landing_pads)
2363    lab = region->landing_pad;
2364  else
2365    lab = region->label;
2366  if (lab)
2367    remove_exception_handler_label (lab);
2368
2369  if (outer)
2370    pp_start = &outer->inner;
2371  else
2372    pp_start = &cfun->eh->region_tree;
2373  for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2374    continue;
2375  *pp = region->next_peer;
2376
2377  inner = region->inner;
2378  if (inner)
2379    {
2380      for (p = inner; p->next_peer ; p = p->next_peer)
2381	p->outer = outer;
2382      p->outer = outer;
2383
2384      p->next_peer = *pp_start;
2385      *pp_start = inner;
2386    }
2387
2388  if (region->type == ERT_CATCH)
2389    {
2390      struct eh_region *try, *next, *prev;
2391
2392      for (try = region->next_peer;
2393	   try->type == ERT_CATCH;
2394	   try = try->next_peer)
2395	continue;
2396      if (try->type != ERT_TRY)
2397	abort ();
2398
2399      next = region->u.catch.next_catch;
2400      prev = region->u.catch.prev_catch;
2401
2402      if (next)
2403	next->u.catch.prev_catch = prev;
2404      else
2405	try->u.try.last_catch = prev;
2406      if (prev)
2407	prev->u.catch.next_catch = next;
2408      else
2409	{
2410	  try->u.try.catch = next;
2411	  if (! next)
2412	    remove_eh_handler (try);
2413	}
2414    }
2415}
2416
2417/* LABEL heads a basic block that is about to be deleted.  If this
2418   label corresponds to an exception region, we may be able to
2419   delete the region.  */
2420
2421void
2422maybe_remove_eh_handler (rtx label)
2423{
2424  struct ehl_map_entry **slot, tmp;
2425  struct eh_region *region;
2426
2427  /* ??? After generating landing pads, it's not so simple to determine
2428     if the region data is completely unused.  One must examine the
2429     landing pad and the post landing pad, and whether an inner try block
2430     is referencing the catch handlers directly.  */
2431  if (cfun->eh->built_landing_pads)
2432    return;
2433
2434  tmp.label = label;
2435  slot = (struct ehl_map_entry **)
2436    htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2437  if (! slot)
2438    return;
2439  region = (*slot)->region;
2440  if (! region)
2441    return;
2442
2443  /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2444     because there is no path to the fallback call to terminate.
2445     But the region continues to affect call-site data until there
2446     are no more contained calls, which we don't see here.  */
2447  if (region->type == ERT_MUST_NOT_THROW)
2448    {
2449      htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2450      region->label = NULL_RTX;
2451    }
2452  else
2453    remove_eh_handler (region);
2454}
2455
2456/* Invokes CALLBACK for every exception handler label.  Only used by old
2457   loop hackery; should not be used by new code.  */
2458
2459void
2460for_each_eh_label (void (*callback) (rtx))
2461{
2462  htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2463		 (void *) &callback);
2464}
2465
2466static int
2467for_each_eh_label_1 (void **pentry, void *data)
2468{
2469  struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2470  void (*callback) (rtx) = *(void (**) (rtx)) data;
2471
2472  (*callback) (entry->label);
2473  return 1;
2474}
2475
2476/* This section describes CFG exception edges for flow.  */
2477
2478/* For communicating between calls to reachable_next_level.  */
2479struct reachable_info GTY(())
2480{
2481  tree types_caught;
2482  tree types_allowed;
2483  rtx handlers;
2484};
2485
2486/* A subroutine of reachable_next_level.  Return true if TYPE, or a
2487   base class of TYPE, is in HANDLED.  */
2488
2489static int
2490check_handled (tree handled, tree type)
2491{
2492  tree t;
2493
2494  /* We can check for exact matches without front-end help.  */
2495  if (! lang_eh_type_covers)
2496    {
2497      for (t = handled; t ; t = TREE_CHAIN (t))
2498	if (TREE_VALUE (t) == type)
2499	  return 1;
2500    }
2501  else
2502    {
2503      for (t = handled; t ; t = TREE_CHAIN (t))
2504	if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2505	  return 1;
2506    }
2507
2508  return 0;
2509}
2510
2511/* A subroutine of reachable_next_level.  If we are collecting a list
2512   of handlers, add one.  After landing pad generation, reference
2513   it instead of the handlers themselves.  Further, the handlers are
2514   all wired together, so by referencing one, we've got them all.
2515   Before landing pad generation we reference each handler individually.
2516
2517   LP_REGION contains the landing pad; REGION is the handler.  */
2518
2519static void
2520add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
2521{
2522  if (! info)
2523    return;
2524
2525  if (cfun->eh->built_landing_pads)
2526    {
2527      if (! info->handlers)
2528	info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2529    }
2530  else
2531    info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2532}
2533
2534/* Process one level of exception regions for reachability.
2535   If TYPE_THROWN is non-null, then it is the *exact* type being
2536   propagated.  If INFO is non-null, then collect handler labels
2537   and caught/allowed type information between invocations.  */
2538
2539static enum reachable_code
2540reachable_next_level (struct eh_region *region, tree type_thrown,
2541		      struct reachable_info *info)
2542{
2543  switch (region->type)
2544    {
2545    case ERT_CLEANUP:
2546      /* Before landing-pad generation, we model control flow
2547	 directly to the individual handlers.  In this way we can
2548	 see that catch handler types may shadow one another.  */
2549      add_reachable_handler (info, region, region);
2550      return RNL_MAYBE_CAUGHT;
2551
2552    case ERT_TRY:
2553      {
2554	struct eh_region *c;
2555	enum reachable_code ret = RNL_NOT_CAUGHT;
2556
2557	for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2558	  {
2559	    /* A catch-all handler ends the search.  */
2560	    if (c->u.catch.type_list == NULL)
2561	      {
2562		add_reachable_handler (info, region, c);
2563		return RNL_CAUGHT;
2564	      }
2565
2566	    if (type_thrown)
2567	      {
2568		/* If we have at least one type match, end the search.  */
2569		tree tp_node = c->u.catch.type_list;
2570
2571		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2572		  {
2573		    tree type = TREE_VALUE (tp_node);
2574
2575		    if (type == type_thrown
2576			|| (lang_eh_type_covers
2577			    && (*lang_eh_type_covers) (type, type_thrown)))
2578		      {
2579			add_reachable_handler (info, region, c);
2580			return RNL_CAUGHT;
2581		      }
2582		  }
2583
2584		/* If we have definitive information of a match failure,
2585		   the catch won't trigger.  */
2586		if (lang_eh_type_covers)
2587		  return RNL_NOT_CAUGHT;
2588	      }
2589
2590	    /* At this point, we either don't know what type is thrown or
2591	       don't have front-end assistance to help deciding if it is
2592	       covered by one of the types in the list for this region.
2593
2594	       We'd then like to add this region to the list of reachable
2595	       handlers since it is indeed potentially reachable based on the
2596	       information we have.
2597
2598	       Actually, this handler is for sure not reachable if all the
2599	       types it matches have already been caught. That is, it is only
2600	       potentially reachable if at least one of the types it catches
2601	       has not been previously caught.  */
2602
2603	    if (! info)
2604	      ret = RNL_MAYBE_CAUGHT;
2605	    else
2606	      {
2607		tree tp_node = c->u.catch.type_list;
2608		bool maybe_reachable = false;
2609
2610		/* Compute the potential reachability of this handler and
2611		   update the list of types caught at the same time.  */
2612		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2613		  {
2614		    tree type = TREE_VALUE (tp_node);
2615
2616		    if (! check_handled (info->types_caught, type))
2617		      {
2618			info->types_caught
2619			  = tree_cons (NULL, type, info->types_caught);
2620
2621			maybe_reachable = true;
2622		      }
2623		  }
2624
2625		if (maybe_reachable)
2626		  {
2627		    add_reachable_handler (info, region, c);
2628
2629		    /* ??? If the catch type is a base class of every allowed
2630		       type, then we know we can stop the search.  */
2631		    ret = RNL_MAYBE_CAUGHT;
2632		  }
2633	      }
2634	  }
2635
2636	return ret;
2637      }
2638
2639    case ERT_ALLOWED_EXCEPTIONS:
2640      /* An empty list of types definitely ends the search.  */
2641      if (region->u.allowed.type_list == NULL_TREE)
2642	{
2643	  add_reachable_handler (info, region, region);
2644	  return RNL_CAUGHT;
2645	}
2646
2647      /* Collect a list of lists of allowed types for use in detecting
2648	 when a catch may be transformed into a catch-all.  */
2649      if (info)
2650	info->types_allowed = tree_cons (NULL_TREE,
2651					 region->u.allowed.type_list,
2652					 info->types_allowed);
2653
2654      /* If we have definitive information about the type hierarchy,
2655	 then we can tell if the thrown type will pass through the
2656	 filter.  */
2657      if (type_thrown && lang_eh_type_covers)
2658	{
2659	  if (check_handled (region->u.allowed.type_list, type_thrown))
2660	    return RNL_NOT_CAUGHT;
2661	  else
2662	    {
2663	      add_reachable_handler (info, region, region);
2664	      return RNL_CAUGHT;
2665	    }
2666	}
2667
2668      add_reachable_handler (info, region, region);
2669      return RNL_MAYBE_CAUGHT;
2670
2671    case ERT_CATCH:
2672      /* Catch regions are handled by their controlling try region.  */
2673      return RNL_NOT_CAUGHT;
2674
2675    case ERT_MUST_NOT_THROW:
2676      /* Here we end our search, since no exceptions may propagate.
2677	 If we've touched down at some landing pad previous, then the
2678	 explicit function call we generated may be used.  Otherwise
2679	 the call is made by the runtime.  */
2680      if (info && info->handlers)
2681	{
2682	  add_reachable_handler (info, region, region);
2683	  return RNL_CAUGHT;
2684	}
2685      else
2686	return RNL_BLOCKED;
2687
2688    case ERT_THROW:
2689    case ERT_FIXUP:
2690    case ERT_UNKNOWN:
2691      /* Shouldn't see these here.  */
2692      break;
2693    }
2694
2695  abort ();
2696}
2697
2698/* Retrieve a list of labels of exception handlers which can be
2699   reached by a given insn.  */
2700
2701rtx
2702reachable_handlers (rtx insn)
2703{
2704  struct reachable_info info;
2705  struct eh_region *region;
2706  tree type_thrown;
2707  int region_number;
2708
2709  if (GET_CODE (insn) == JUMP_INSN
2710      && GET_CODE (PATTERN (insn)) == RESX)
2711    region_number = XINT (PATTERN (insn), 0);
2712  else
2713    {
2714      rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2715      if (!note || INTVAL (XEXP (note, 0)) <= 0)
2716	return NULL;
2717      region_number = INTVAL (XEXP (note, 0));
2718    }
2719
2720  memset (&info, 0, sizeof (info));
2721
2722  region = cfun->eh->region_array[region_number];
2723
2724  type_thrown = NULL_TREE;
2725  if (GET_CODE (insn) == JUMP_INSN
2726      && GET_CODE (PATTERN (insn)) == RESX)
2727    {
2728      /* A RESX leaves a region instead of entering it.  Thus the
2729	 region itself may have been deleted out from under us.  */
2730      if (region == NULL)
2731	return NULL;
2732      region = region->outer;
2733    }
2734  else if (region->type == ERT_THROW)
2735    {
2736      type_thrown = region->u.throw.type;
2737      region = region->outer;
2738    }
2739
2740  while (region)
2741    {
2742      if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2743	break;
2744      /* If we have processed one cleanup, there is no point in
2745	 processing any more of them.  Each cleanup will have an edge
2746	 to the next outer cleanup region, so the flow graph will be
2747	 accurate.  */
2748      if (region->type == ERT_CLEANUP)
2749	region = region->u.cleanup.prev_try;
2750      else
2751	region = region->outer;
2752    }
2753
2754  return info.handlers;
2755}
2756
2757/* Determine if the given INSN can throw an exception that is caught
2758   within the function.  */
2759
2760bool
2761can_throw_internal (rtx insn)
2762{
2763  struct eh_region *region;
2764  tree type_thrown;
2765  rtx note;
2766
2767  if (! INSN_P (insn))
2768    return false;
2769
2770  if (GET_CODE (insn) == INSN
2771      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2772    insn = XVECEXP (PATTERN (insn), 0, 0);
2773
2774  if (GET_CODE (insn) == CALL_INSN
2775      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2776    {
2777      int i;
2778      for (i = 0; i < 3; ++i)
2779	{
2780	  rtx sub = XEXP (PATTERN (insn), i);
2781	  for (; sub ; sub = NEXT_INSN (sub))
2782	    if (can_throw_internal (sub))
2783	      return true;
2784	}
2785      return false;
2786    }
2787
2788  /* Every insn that might throw has an EH_REGION note.  */
2789  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2790  if (!note || INTVAL (XEXP (note, 0)) <= 0)
2791    return false;
2792
2793  region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2794
2795  type_thrown = NULL_TREE;
2796  if (region->type == ERT_THROW)
2797    {
2798      type_thrown = region->u.throw.type;
2799      region = region->outer;
2800    }
2801
2802  /* If this exception is ignored by each and every containing region,
2803     then control passes straight out.  The runtime may handle some
2804     regions, which also do not require processing internally.  */
2805  for (; region; region = region->outer)
2806    {
2807      enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2808      if (how == RNL_BLOCKED)
2809	return false;
2810      if (how != RNL_NOT_CAUGHT)
2811	return true;
2812    }
2813
2814  return false;
2815}
2816
2817/* Determine if the given INSN can throw an exception that is
2818   visible outside the function.  */
2819
2820bool
2821can_throw_external (rtx insn)
2822{
2823  struct eh_region *region;
2824  tree type_thrown;
2825  rtx note;
2826
2827  if (! INSN_P (insn))
2828    return false;
2829
2830  if (GET_CODE (insn) == INSN
2831      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2832    insn = XVECEXP (PATTERN (insn), 0, 0);
2833
2834  if (GET_CODE (insn) == CALL_INSN
2835      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2836    {
2837      int i;
2838      for (i = 0; i < 3; ++i)
2839	{
2840	  rtx sub = XEXP (PATTERN (insn), i);
2841	  for (; sub ; sub = NEXT_INSN (sub))
2842	    if (can_throw_external (sub))
2843	      return true;
2844	}
2845      return false;
2846    }
2847
2848  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2849  if (!note)
2850    {
2851      /* Calls (and trapping insns) without notes are outside any
2852	 exception handling region in this function.  We have to
2853	 assume it might throw.  Given that the front end and middle
2854	 ends mark known NOTHROW functions, this isn't so wildly
2855	 inaccurate.  */
2856      return (GET_CODE (insn) == CALL_INSN
2857	      || (flag_non_call_exceptions
2858		  && may_trap_p (PATTERN (insn))));
2859    }
2860  if (INTVAL (XEXP (note, 0)) <= 0)
2861    return false;
2862
2863  region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2864
2865  type_thrown = NULL_TREE;
2866  if (region->type == ERT_THROW)
2867    {
2868      type_thrown = region->u.throw.type;
2869      region = region->outer;
2870    }
2871
2872  /* If the exception is caught or blocked by any containing region,
2873     then it is not seen by any calling function.  */
2874  for (; region ; region = region->outer)
2875    if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2876      return false;
2877
2878  return true;
2879}
2880
2881/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls.  */
2882
2883void
2884set_nothrow_function_flags (void)
2885{
2886  rtx insn;
2887
2888  current_function_nothrow = 1;
2889
2890  /* Assume cfun->all_throwers_are_sibcalls until we encounter
2891     something that can throw an exception.  We specifically exempt
2892     CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2893     and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
2894     is optimistic.  */
2895
2896  cfun->all_throwers_are_sibcalls = 1;
2897
2898  if (! flag_exceptions)
2899    return;
2900
2901  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2902    if (can_throw_external (insn))
2903      {
2904	current_function_nothrow = 0;
2905
2906	if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2907	  {
2908	    cfun->all_throwers_are_sibcalls = 0;
2909	    return;
2910	  }
2911      }
2912
2913  for (insn = current_function_epilogue_delay_list; insn;
2914       insn = XEXP (insn, 1))
2915    if (can_throw_external (insn))
2916      {
2917	current_function_nothrow = 0;
2918
2919	if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2920	  {
2921	    cfun->all_throwers_are_sibcalls = 0;
2922	    return;
2923	  }
2924      }
2925}
2926
2927
2928/* Various hooks for unwind library.  */
2929
2930/* Do any necessary initialization to access arbitrary stack frames.
2931   On the SPARC, this means flushing the register windows.  */
2932
2933void
2934expand_builtin_unwind_init (void)
2935{
2936  /* Set this so all the registers get saved in our frame; we need to be
2937     able to copy the saved values for any registers from frames we unwind.  */
2938  current_function_has_nonlocal_label = 1;
2939
2940#ifdef SETUP_FRAME_ADDRESSES
2941  SETUP_FRAME_ADDRESSES ();
2942#endif
2943}
2944
2945rtx
2946expand_builtin_eh_return_data_regno (tree arglist)
2947{
2948  tree which = TREE_VALUE (arglist);
2949  unsigned HOST_WIDE_INT iwhich;
2950
2951  if (TREE_CODE (which) != INTEGER_CST)
2952    {
2953      error ("argument of `__builtin_eh_return_regno' must be constant");
2954      return constm1_rtx;
2955    }
2956
2957  iwhich = tree_low_cst (which, 1);
2958  iwhich = EH_RETURN_DATA_REGNO (iwhich);
2959  if (iwhich == INVALID_REGNUM)
2960    return constm1_rtx;
2961
2962#ifdef DWARF_FRAME_REGNUM
2963  iwhich = DWARF_FRAME_REGNUM (iwhich);
2964#else
2965  iwhich = DBX_REGISTER_NUMBER (iwhich);
2966#endif
2967
2968  return GEN_INT (iwhich);
2969}
2970
2971/* Given a value extracted from the return address register or stack slot,
2972   return the actual address encoded in that value.  */
2973
2974rtx
2975expand_builtin_extract_return_addr (tree addr_tree)
2976{
2977  rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2978
2979  if (GET_MODE (addr) != Pmode
2980      && GET_MODE (addr) != VOIDmode)
2981    {
2982#ifdef POINTERS_EXTEND_UNSIGNED
2983      addr = convert_memory_address (Pmode, addr);
2984#else
2985      addr = convert_to_mode (Pmode, addr, 0);
2986#endif
2987    }
2988
2989  /* First mask out any unwanted bits.  */
2990#ifdef MASK_RETURN_ADDR
2991  expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2992#endif
2993
2994  /* Then adjust to find the real return address.  */
2995#if defined (RETURN_ADDR_OFFSET)
2996  addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2997#endif
2998
2999  return addr;
3000}
3001
3002/* Given an actual address in addr_tree, do any necessary encoding
3003   and return the value to be stored in the return address register or
3004   stack slot so the epilogue will return to that address.  */
3005
3006rtx
3007expand_builtin_frob_return_addr (tree addr_tree)
3008{
3009  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3010
3011  addr = convert_memory_address (Pmode, addr);
3012
3013#ifdef RETURN_ADDR_OFFSET
3014  addr = force_reg (Pmode, addr);
3015  addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3016#endif
3017
3018  return addr;
3019}
3020
3021/* Set up the epilogue with the magic bits we'll need to return to the
3022   exception handler.  */
3023
3024void
3025expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3026			  tree handler_tree)
3027{
3028  rtx tmp;
3029
3030#ifdef EH_RETURN_STACKADJ_RTX
3031  tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3032  tmp = convert_memory_address (Pmode, tmp);
3033  if (!cfun->eh->ehr_stackadj)
3034    cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3035  else if (tmp != cfun->eh->ehr_stackadj)
3036    emit_move_insn (cfun->eh->ehr_stackadj, tmp);
3037#endif
3038
3039  tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3040  tmp = convert_memory_address (Pmode, tmp);
3041  if (!cfun->eh->ehr_handler)
3042    cfun->eh->ehr_handler = copy_to_reg (tmp);
3043  else if (tmp != cfun->eh->ehr_handler)
3044    emit_move_insn (cfun->eh->ehr_handler, tmp);
3045
3046  if (!cfun->eh->ehr_label)
3047    cfun->eh->ehr_label = gen_label_rtx ();
3048  emit_jump (cfun->eh->ehr_label);
3049}
3050
3051void
3052expand_eh_return (void)
3053{
3054  rtx around_label;
3055
3056  if (! cfun->eh->ehr_label)
3057    return;
3058
3059  current_function_calls_eh_return = 1;
3060
3061#ifdef EH_RETURN_STACKADJ_RTX
3062  emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3063#endif
3064
3065  around_label = gen_label_rtx ();
3066  emit_jump (around_label);
3067
3068  emit_label (cfun->eh->ehr_label);
3069  clobber_return_register ();
3070
3071#ifdef EH_RETURN_STACKADJ_RTX
3072  emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3073#endif
3074
3075#ifdef HAVE_eh_return
3076  if (HAVE_eh_return)
3077    emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3078  else
3079#endif
3080    {
3081#ifdef EH_RETURN_HANDLER_RTX
3082      emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3083#else
3084      error ("__builtin_eh_return not supported on this target");
3085#endif
3086    }
3087
3088  emit_label (around_label);
3089}
3090
3091/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3092   POINTERS_EXTEND_UNSIGNED and return it.  */
3093
3094rtx
3095expand_builtin_extend_pointer (tree addr_tree)
3096{
3097  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3098  int extend;
3099
3100#ifdef POINTERS_EXTEND_UNSIGNED
3101  extend = POINTERS_EXTEND_UNSIGNED;
3102#else
3103  /* The previous EH code did an unsigned extend by default, so we do this also
3104     for consistency.  */
3105  extend = 1;
3106#endif
3107
3108  return convert_modes (word_mode, ptr_mode, addr, extend);
3109}
3110
3111/* In the following functions, we represent entries in the action table
3112   as 1-based indices.  Special cases are:
3113
3114	 0:	null action record, non-null landing pad; implies cleanups
3115	-1:	null action record, null landing pad; implies no action
3116	-2:	no call-site entry; implies must_not_throw
3117	-3:	we have yet to process outer regions
3118
3119   Further, no special cases apply to the "next" field of the record.
3120   For next, 0 means end of list.  */
3121
3122struct action_record
3123{
3124  int offset;
3125  int filter;
3126  int next;
3127};
3128
3129static int
3130action_record_eq (const void *pentry, const void *pdata)
3131{
3132  const struct action_record *entry = (const struct action_record *) pentry;
3133  const struct action_record *data = (const struct action_record *) pdata;
3134  return entry->filter == data->filter && entry->next == data->next;
3135}
3136
3137static hashval_t
3138action_record_hash (const void *pentry)
3139{
3140  const struct action_record *entry = (const struct action_record *) pentry;
3141  return entry->next * 1009 + entry->filter;
3142}
3143
3144static int
3145add_action_record (htab_t ar_hash, int filter, int next)
3146{
3147  struct action_record **slot, *new, tmp;
3148
3149  tmp.filter = filter;
3150  tmp.next = next;
3151  slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3152
3153  if ((new = *slot) == NULL)
3154    {
3155      new = xmalloc (sizeof (*new));
3156      new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3157      new->filter = filter;
3158      new->next = next;
3159      *slot = new;
3160
3161      /* The filter value goes in untouched.  The link to the next
3162	 record is a "self-relative" byte offset, or zero to indicate
3163	 that there is no next record.  So convert the absolute 1 based
3164	 indices we've been carrying around into a displacement.  */
3165
3166      push_sleb128 (&cfun->eh->action_record_data, filter);
3167      if (next)
3168	next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3169      push_sleb128 (&cfun->eh->action_record_data, next);
3170    }
3171
3172  return new->offset;
3173}
3174
3175static int
3176collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3177{
3178  struct eh_region *c;
3179  int next;
3180
3181  /* If we've reached the top of the region chain, then we have
3182     no actions, and require no landing pad.  */
3183  if (region == NULL)
3184    return -1;
3185
3186  switch (region->type)
3187    {
3188    case ERT_CLEANUP:
3189      /* A cleanup adds a zero filter to the beginning of the chain, but
3190	 there are special cases to look out for.  If there are *only*
3191	 cleanups along a path, then it compresses to a zero action.
3192	 Further, if there are multiple cleanups along a path, we only
3193	 need to represent one of them, as that is enough to trigger
3194	 entry to the landing pad at runtime.  */
3195      next = collect_one_action_chain (ar_hash, region->outer);
3196      if (next <= 0)
3197	return 0;
3198      for (c = region->outer; c ; c = c->outer)
3199	if (c->type == ERT_CLEANUP)
3200	  return next;
3201      return add_action_record (ar_hash, 0, next);
3202
3203    case ERT_TRY:
3204      /* Process the associated catch regions in reverse order.
3205	 If there's a catch-all handler, then we don't need to
3206	 search outer regions.  Use a magic -3 value to record
3207	 that we haven't done the outer search.  */
3208      next = -3;
3209      for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3210	{
3211	  if (c->u.catch.type_list == NULL)
3212	    {
3213	      /* Retrieve the filter from the head of the filter list
3214		 where we have stored it (see assign_filter_values).  */
3215	      int filter
3216		= TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3217
3218	      next = add_action_record (ar_hash, filter, 0);
3219	    }
3220	  else
3221	    {
3222	      /* Once the outer search is done, trigger an action record for
3223                 each filter we have.  */
3224	      tree flt_node;
3225
3226	      if (next == -3)
3227		{
3228		  next = collect_one_action_chain (ar_hash, region->outer);
3229
3230		  /* If there is no next action, terminate the chain.  */
3231		  if (next == -1)
3232		    next = 0;
3233		  /* If all outer actions are cleanups or must_not_throw,
3234		     we'll have no action record for it, since we had wanted
3235		     to encode these states in the call-site record directly.
3236		     Add a cleanup action to the chain to catch these.  */
3237		  else if (next <= 0)
3238		    next = add_action_record (ar_hash, 0, 0);
3239		}
3240
3241	      flt_node = c->u.catch.filter_list;
3242	      for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3243		{
3244		  int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3245		  next = add_action_record (ar_hash, filter, next);
3246		}
3247	    }
3248	}
3249      return next;
3250
3251    case ERT_ALLOWED_EXCEPTIONS:
3252      /* An exception specification adds its filter to the
3253	 beginning of the chain.  */
3254      next = collect_one_action_chain (ar_hash, region->outer);
3255
3256      /* If there is no next action, terminate the chain.  */
3257      if (next == -1)
3258	next = 0;
3259      /* If all outer actions are cleanups or must_not_throw,
3260	 we'll have no action record for it, since we had wanted
3261	 to encode these states in the call-site record directly.
3262	 Add a cleanup action to the chain to catch these.  */
3263      else if (next <= 0)
3264	next = add_action_record (ar_hash, 0, 0);
3265
3266      return add_action_record (ar_hash, region->u.allowed.filter, next);
3267
3268    case ERT_MUST_NOT_THROW:
3269      /* A must-not-throw region with no inner handlers or cleanups
3270	 requires no call-site entry.  Note that this differs from
3271	 the no handler or cleanup case in that we do require an lsda
3272	 to be generated.  Return a magic -2 value to record this.  */
3273      return -2;
3274
3275    case ERT_CATCH:
3276    case ERT_THROW:
3277      /* CATCH regions are handled in TRY above.  THROW regions are
3278	 for optimization information only and produce no output.  */
3279      return collect_one_action_chain (ar_hash, region->outer);
3280
3281    default:
3282      abort ();
3283    }
3284}
3285
3286static int
3287add_call_site (rtx landing_pad, int action)
3288{
3289  struct call_site_record *data = cfun->eh->call_site_data;
3290  int used = cfun->eh->call_site_data_used;
3291  int size = cfun->eh->call_site_data_size;
3292
3293  if (used >= size)
3294    {
3295      size = (size ? size * 2 : 64);
3296      data = ggc_realloc (data, sizeof (*data) * size);
3297      cfun->eh->call_site_data = data;
3298      cfun->eh->call_site_data_size = size;
3299    }
3300
3301  data[used].landing_pad = landing_pad;
3302  data[used].action = action;
3303
3304  cfun->eh->call_site_data_used = used + 1;
3305
3306  return used + call_site_base;
3307}
3308
3309/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3310   The new note numbers will not refer to region numbers, but
3311   instead to call site entries.  */
3312
3313void
3314convert_to_eh_region_ranges (void)
3315{
3316  rtx insn, iter, note;
3317  htab_t ar_hash;
3318  int last_action = -3;
3319  rtx last_action_insn = NULL_RTX;
3320  rtx last_landing_pad = NULL_RTX;
3321  rtx first_no_action_insn = NULL_RTX;
3322  int call_site = 0;
3323
3324  if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3325    return;
3326
3327  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3328
3329  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3330
3331  for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3332    if (INSN_P (iter))
3333      {
3334	struct eh_region *region;
3335	int this_action;
3336	rtx this_landing_pad;
3337
3338	insn = iter;
3339	if (GET_CODE (insn) == INSN
3340	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
3341	  insn = XVECEXP (PATTERN (insn), 0, 0);
3342
3343	note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3344	if (!note)
3345	  {
3346	    if (! (GET_CODE (insn) == CALL_INSN
3347		   || (flag_non_call_exceptions
3348		       && may_trap_p (PATTERN (insn)))))
3349	      continue;
3350	    this_action = -1;
3351	    region = NULL;
3352	  }
3353	else
3354	  {
3355	    if (INTVAL (XEXP (note, 0)) <= 0)
3356	      continue;
3357	    region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3358	    this_action = collect_one_action_chain (ar_hash, region);
3359	  }
3360
3361	/* Existence of catch handlers, or must-not-throw regions
3362	   implies that an lsda is needed (even if empty).  */
3363	if (this_action != -1)
3364	  cfun->uses_eh_lsda = 1;
3365
3366	/* Delay creation of region notes for no-action regions
3367	   until we're sure that an lsda will be required.  */
3368	else if (last_action == -3)
3369	  {
3370	    first_no_action_insn = iter;
3371	    last_action = -1;
3372	  }
3373
3374	/* Cleanups and handlers may share action chains but not
3375	   landing pads.  Collect the landing pad for this region.  */
3376	if (this_action >= 0)
3377	  {
3378	    struct eh_region *o;
3379	    for (o = region; ! o->landing_pad ; o = o->outer)
3380	      continue;
3381	    this_landing_pad = o->landing_pad;
3382	  }
3383	else
3384	  this_landing_pad = NULL_RTX;
3385
3386	/* Differing actions or landing pads implies a change in call-site
3387	   info, which implies some EH_REGION note should be emitted.  */
3388	if (last_action != this_action
3389	    || last_landing_pad != this_landing_pad)
3390	  {
3391	    /* If we'd not seen a previous action (-3) or the previous
3392	       action was must-not-throw (-2), then we do not need an
3393	       end note.  */
3394	    if (last_action >= -1)
3395	      {
3396		/* If we delayed the creation of the begin, do it now.  */
3397		if (first_no_action_insn)
3398		  {
3399		    call_site = add_call_site (NULL_RTX, 0);
3400		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3401					     first_no_action_insn);
3402		    NOTE_EH_HANDLER (note) = call_site;
3403		    first_no_action_insn = NULL_RTX;
3404		  }
3405
3406		note = emit_note_after (NOTE_INSN_EH_REGION_END,
3407					last_action_insn);
3408		NOTE_EH_HANDLER (note) = call_site;
3409	      }
3410
3411	    /* If the new action is must-not-throw, then no region notes
3412	       are created.  */
3413	    if (this_action >= -1)
3414	      {
3415		call_site = add_call_site (this_landing_pad,
3416					   this_action < 0 ? 0 : this_action);
3417		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3418		NOTE_EH_HANDLER (note) = call_site;
3419	      }
3420
3421	    last_action = this_action;
3422	    last_landing_pad = this_landing_pad;
3423	  }
3424	last_action_insn = iter;
3425      }
3426
3427  if (last_action >= -1 && ! first_no_action_insn)
3428    {
3429      note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3430      NOTE_EH_HANDLER (note) = call_site;
3431    }
3432
3433  htab_delete (ar_hash);
3434}
3435
3436
3437static void
3438push_uleb128 (varray_type *data_area, unsigned int value)
3439{
3440  do
3441    {
3442      unsigned char byte = value & 0x7f;
3443      value >>= 7;
3444      if (value)
3445	byte |= 0x80;
3446      VARRAY_PUSH_UCHAR (*data_area, byte);
3447    }
3448  while (value);
3449}
3450
3451static void
3452push_sleb128 (varray_type *data_area, int value)
3453{
3454  unsigned char byte;
3455  int more;
3456
3457  do
3458    {
3459      byte = value & 0x7f;
3460      value >>= 7;
3461      more = ! ((value == 0 && (byte & 0x40) == 0)
3462		|| (value == -1 && (byte & 0x40) != 0));
3463      if (more)
3464	byte |= 0x80;
3465      VARRAY_PUSH_UCHAR (*data_area, byte);
3466    }
3467  while (more);
3468}
3469
3470
3471#ifndef HAVE_AS_LEB128
3472static int
3473dw2_size_of_call_site_table (void)
3474{
3475  int n = cfun->eh->call_site_data_used;
3476  int size = n * (4 + 4 + 4);
3477  int i;
3478
3479  for (i = 0; i < n; ++i)
3480    {
3481      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3482      size += size_of_uleb128 (cs->action);
3483    }
3484
3485  return size;
3486}
3487
3488static int
3489sjlj_size_of_call_site_table (void)
3490{
3491  int n = cfun->eh->call_site_data_used;
3492  int size = 0;
3493  int i;
3494
3495  for (i = 0; i < n; ++i)
3496    {
3497      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3498      size += size_of_uleb128 (INTVAL (cs->landing_pad));
3499      size += size_of_uleb128 (cs->action);
3500    }
3501
3502  return size;
3503}
3504#endif
3505
3506static void
3507dw2_output_call_site_table (void)
3508{
3509  const char *const function_start_lab
3510    = IDENTIFIER_POINTER (current_function_func_begin_label);
3511  int n = cfun->eh->call_site_data_used;
3512  int i;
3513
3514  for (i = 0; i < n; ++i)
3515    {
3516      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3517      char reg_start_lab[32];
3518      char reg_end_lab[32];
3519      char landing_pad_lab[32];
3520
3521      ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3522      ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3523
3524      if (cs->landing_pad)
3525	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3526				     CODE_LABEL_NUMBER (cs->landing_pad));
3527
3528      /* ??? Perhaps use insn length scaling if the assembler supports
3529	 generic arithmetic.  */
3530      /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3531	 data4 if the function is small enough.  */
3532#ifdef HAVE_AS_LEB128
3533      dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3534				    "region %d start", i);
3535      dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3536				    "length");
3537      if (cs->landing_pad)
3538	dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3539				      "landing pad");
3540      else
3541	dw2_asm_output_data_uleb128 (0, "landing pad");
3542#else
3543      dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3544			    "region %d start", i);
3545      dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3546      if (cs->landing_pad)
3547	dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3548			      "landing pad");
3549      else
3550	dw2_asm_output_data (4, 0, "landing pad");
3551#endif
3552      dw2_asm_output_data_uleb128 (cs->action, "action");
3553    }
3554
3555  call_site_base += n;
3556}
3557
3558static void
3559sjlj_output_call_site_table (void)
3560{
3561  int n = cfun->eh->call_site_data_used;
3562  int i;
3563
3564  for (i = 0; i < n; ++i)
3565    {
3566      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3567
3568      dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3569				   "region %d landing pad", i);
3570      dw2_asm_output_data_uleb128 (cs->action, "action");
3571    }
3572
3573  call_site_base += n;
3574}
3575
3576/* Tell assembler to switch to the section for the exception handling
3577   table.  */
3578
3579void
3580default_exception_section (void)
3581{
3582  if (targetm.have_named_sections)
3583    {
3584      int flags;
3585#ifdef HAVE_LD_RO_RW_SECTION_MIXING
3586      int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3587
3588      flags = (! flag_pic
3589	       || ((tt_format & 0x70) != DW_EH_PE_absptr
3590		   && (tt_format & 0x70) != DW_EH_PE_aligned))
3591	      ? 0 : SECTION_WRITE;
3592#else
3593      flags = SECTION_WRITE;
3594#endif
3595      named_section_flags (".gcc_except_table", flags);
3596    }
3597  else if (flag_pic)
3598    data_section ();
3599  else
3600    readonly_data_section ();
3601}
3602
3603void
3604output_function_exception_table (void)
3605{
3606  int tt_format, cs_format, lp_format, i, n;
3607#ifdef HAVE_AS_LEB128
3608  char ttype_label[32];
3609  char cs_after_size_label[32];
3610  char cs_end_label[32];
3611#else
3612  int call_site_len;
3613#endif
3614  int have_tt_data;
3615  int tt_format_size = 0;
3616
3617  /* Not all functions need anything.  */
3618  if (! cfun->uses_eh_lsda)
3619    return;
3620
3621#ifdef IA64_UNWIND_INFO
3622  fputs ("\t.personality\t", asm_out_file);
3623  output_addr_const (asm_out_file, eh_personality_libfunc);
3624  fputs ("\n\t.handlerdata\n", asm_out_file);
3625  /* Note that varasm still thinks we're in the function's code section.
3626     The ".endp" directive that will immediately follow will take us back.  */
3627#else
3628  (*targetm.asm_out.exception_section) ();
3629#endif
3630
3631  have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3632		  || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3633
3634  /* Indicate the format of the @TType entries.  */
3635  if (! have_tt_data)
3636    tt_format = DW_EH_PE_omit;
3637  else
3638    {
3639      tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3640#ifdef HAVE_AS_LEB128
3641      ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3642				   current_function_funcdef_no);
3643#endif
3644      tt_format_size = size_of_encoded_value (tt_format);
3645
3646      assemble_align (tt_format_size * BITS_PER_UNIT);
3647    }
3648
3649  (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
3650			     current_function_funcdef_no);
3651
3652  /* The LSDA header.  */
3653
3654  /* Indicate the format of the landing pad start pointer.  An omitted
3655     field implies @LPStart == @Start.  */
3656  /* Currently we always put @LPStart == @Start.  This field would
3657     be most useful in moving the landing pads completely out of
3658     line to another section, but it could also be used to minimize
3659     the size of uleb128 landing pad offsets.  */
3660  lp_format = DW_EH_PE_omit;
3661  dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3662		       eh_data_format_name (lp_format));
3663
3664  /* @LPStart pointer would go here.  */
3665
3666  dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3667		       eh_data_format_name (tt_format));
3668
3669#ifndef HAVE_AS_LEB128
3670  if (USING_SJLJ_EXCEPTIONS)
3671    call_site_len = sjlj_size_of_call_site_table ();
3672  else
3673    call_site_len = dw2_size_of_call_site_table ();
3674#endif
3675
3676  /* A pc-relative 4-byte displacement to the @TType data.  */
3677  if (have_tt_data)
3678    {
3679#ifdef HAVE_AS_LEB128
3680      char ttype_after_disp_label[32];
3681      ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3682				   current_function_funcdef_no);
3683      dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3684				    "@TType base offset");
3685      ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3686#else
3687      /* Ug.  Alignment queers things.  */
3688      unsigned int before_disp, after_disp, last_disp, disp;
3689
3690      before_disp = 1 + 1;
3691      after_disp = (1 + size_of_uleb128 (call_site_len)
3692		    + call_site_len
3693		    + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3694		    + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3695		       * tt_format_size));
3696
3697      disp = after_disp;
3698      do
3699	{
3700	  unsigned int disp_size, pad;
3701
3702	  last_disp = disp;
3703	  disp_size = size_of_uleb128 (disp);
3704	  pad = before_disp + disp_size + after_disp;
3705	  if (pad % tt_format_size)
3706	    pad = tt_format_size - (pad % tt_format_size);
3707	  else
3708	    pad = 0;
3709	  disp = after_disp + pad;
3710	}
3711      while (disp != last_disp);
3712
3713      dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3714#endif
3715    }
3716
3717  /* Indicate the format of the call-site offsets.  */
3718#ifdef HAVE_AS_LEB128
3719  cs_format = DW_EH_PE_uleb128;
3720#else
3721  cs_format = DW_EH_PE_udata4;
3722#endif
3723  dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3724		       eh_data_format_name (cs_format));
3725
3726#ifdef HAVE_AS_LEB128
3727  ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3728			       current_function_funcdef_no);
3729  ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3730			       current_function_funcdef_no);
3731  dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3732				"Call-site table length");
3733  ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3734  if (USING_SJLJ_EXCEPTIONS)
3735    sjlj_output_call_site_table ();
3736  else
3737    dw2_output_call_site_table ();
3738  ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3739#else
3740  dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3741  if (USING_SJLJ_EXCEPTIONS)
3742    sjlj_output_call_site_table ();
3743  else
3744    dw2_output_call_site_table ();
3745#endif
3746
3747  /* ??? Decode and interpret the data for flag_debug_asm.  */
3748  n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3749  for (i = 0; i < n; ++i)
3750    dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3751			 (i ? NULL : "Action record table"));
3752
3753  if (have_tt_data)
3754    assemble_align (tt_format_size * BITS_PER_UNIT);
3755
3756  i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3757  while (i-- > 0)
3758    {
3759      tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3760      rtx value;
3761
3762      if (type == NULL_TREE)
3763	value = const0_rtx;
3764      else
3765	{
3766	  struct cgraph_varpool_node *node;
3767
3768	  type = lookup_type_for_runtime (type);
3769	  value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3770
3771	  /* Let cgraph know that the rtti decl is used.  Not all of the
3772	     paths below go through assemble_integer, which would take
3773	     care of this for us.  */
3774	  if (TREE_CODE (type) == ADDR_EXPR)
3775	    {
3776	      type = TREE_OPERAND (type, 0);
3777	      node = cgraph_varpool_node (type);
3778	      if (node)
3779		cgraph_varpool_mark_needed_node (node);
3780	    }
3781	  else if (TREE_CODE (type) != INTEGER_CST)
3782	    abort ();
3783	}
3784
3785      if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3786	assemble_integer (value, tt_format_size,
3787			  tt_format_size * BITS_PER_UNIT, 1);
3788      else
3789	dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3790    }
3791
3792#ifdef HAVE_AS_LEB128
3793  if (have_tt_data)
3794      ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3795#endif
3796
3797  /* ??? Decode and interpret the data for flag_debug_asm.  */
3798  n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3799  for (i = 0; i < n; ++i)
3800    dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3801			 (i ? NULL : "Exception specification table"));
3802
3803  function_section (current_function_decl);
3804}
3805
3806#include "gt-except.h"
3807