except.c revision 90075
1/* Implements exception handling.
2   Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4   Contributed by Mike Stump <mrs@cygnus.com>.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA.  */
22
23
24/* An exception is an event that can be signaled from within a
25   function. This event can then be "caught" or "trapped" by the
26   callers of this function. This potentially allows program flow to
27   be transferred to any arbitrary code associated with a function call
28   several levels up the stack.
29
30   The intended use for this mechanism is for signaling "exceptional
31   events" in an out-of-band fashion, hence its name. The C++ language
32   (and many other OO-styled or functional languages) practically
33   requires such a mechanism, as otherwise it becomes very difficult
34   or even impossible to signal failure conditions in complex
35   situations.  The traditional C++ example is when an error occurs in
36   the process of constructing an object; without such a mechanism, it
37   is impossible to signal that the error occurs without adding global
38   state variables and error checks around every object construction.
39
40   The act of causing this event to occur is referred to as "throwing
41   an exception". (Alternate terms include "raising an exception" or
42   "signaling an exception".) The term "throw" is used because control
43   is returned to the callers of the function that is signaling the
44   exception, and thus there is the concept of "throwing" the
45   exception up the call stack.
46
47   [ Add updated documentation on how to use this.  ]  */
48
49
50#include "config.h"
51#include "system.h"
52#include "rtl.h"
53#include "tree.h"
54#include "flags.h"
55#include "function.h"
56#include "expr.h"
57#include "libfuncs.h"
58#include "insn-config.h"
59#include "except.h"
60#include "integrate.h"
61#include "hard-reg-set.h"
62#include "basic-block.h"
63#include "output.h"
64#include "dwarf2asm.h"
65#include "dwarf2out.h"
66#include "dwarf2.h"
67#include "toplev.h"
68#include "hashtab.h"
69#include "intl.h"
70#include "ggc.h"
71#include "tm_p.h"
72#include "target.h"
73
74/* Provide defaults for stuff that may not be defined when using
75   sjlj exceptions.  */
76#ifndef EH_RETURN_STACKADJ_RTX
77#define EH_RETURN_STACKADJ_RTX 0
78#endif
79#ifndef EH_RETURN_HANDLER_RTX
80#define EH_RETURN_HANDLER_RTX 0
81#endif
82#ifndef EH_RETURN_DATA_REGNO
83#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
84#endif
85
86
87/* Nonzero means enable synchronous exceptions for non-call instructions.  */
88int flag_non_call_exceptions;
89
90/* Protect cleanup actions with must-not-throw regions, with a call
91   to the given failure handler.  */
92tree (*lang_protect_cleanup_actions) PARAMS ((void));
93
94/* Return true if type A catches type B.  */
95int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
96
97/* Map a type to a runtime object to match type.  */
98tree (*lang_eh_runtime_type) PARAMS ((tree));
99
100/* A list of labels used for exception handlers.  */
101rtx exception_handler_labels;
102
103static int call_site_base;
104static unsigned int sjlj_funcdef_number;
105static htab_t type_to_runtime_map;
106
107/* Describe the SjLj_Function_Context structure.  */
108static tree sjlj_fc_type_node;
109static int sjlj_fc_call_site_ofs;
110static int sjlj_fc_data_ofs;
111static int sjlj_fc_personality_ofs;
112static int sjlj_fc_lsda_ofs;
113static int sjlj_fc_jbuf_ofs;
114
115/* Describes one exception region.  */
116struct eh_region
117{
118  /* The immediately surrounding region.  */
119  struct eh_region *outer;
120
121  /* The list of immediately contained regions.  */
122  struct eh_region *inner;
123  struct eh_region *next_peer;
124
125  /* An identifier for this region.  */
126  int region_number;
127
128  /* Each region does exactly one thing.  */
129  enum eh_region_type
130  {
131    ERT_UNKNOWN = 0,
132    ERT_CLEANUP,
133    ERT_TRY,
134    ERT_CATCH,
135    ERT_ALLOWED_EXCEPTIONS,
136    ERT_MUST_NOT_THROW,
137    ERT_THROW,
138    ERT_FIXUP
139  } type;
140
141  /* Holds the action to perform based on the preceding type.  */
142  union {
143    /* A list of catch blocks, a surrounding try block,
144       and the label for continuing after a catch.  */
145    struct {
146      struct eh_region *catch;
147      struct eh_region *last_catch;
148      struct eh_region *prev_try;
149      rtx continue_label;
150    } try;
151
152    /* The list through the catch handlers, the list of type objects
153       matched, and the list of associated filters.  */
154    struct {
155      struct eh_region *next_catch;
156      struct eh_region *prev_catch;
157      tree type_list;
158      tree filter_list;
159    } catch;
160
161    /* A tree_list of allowed types.  */
162    struct {
163      tree type_list;
164      int filter;
165    } allowed;
166
167    /* The type given by a call to "throw foo();", or discovered
168       for a throw.  */
169    struct {
170      tree type;
171    } throw;
172
173    /* Retain the cleanup expression even after expansion so that
174       we can match up fixup regions.  */
175    struct {
176      tree exp;
177    } cleanup;
178
179    /* The real region (by expression and by pointer) that fixup code
180       should live in.  */
181    struct {
182      tree cleanup_exp;
183      struct eh_region *real_region;
184    } fixup;
185  } u;
186
187  /* Entry point for this region's handler before landing pads are built.  */
188  rtx label;
189
190  /* Entry point for this region's handler from the runtime eh library.  */
191  rtx landing_pad;
192
193  /* Entry point for this region's handler from an inner region.  */
194  rtx post_landing_pad;
195
196  /* The RESX insn for handing off control to the next outermost handler,
197     if appropriate.  */
198  rtx resume;
199};
200
201/* Used to save exception status for each function.  */
202struct eh_status
203{
204  /* The tree of all regions for this function.  */
205  struct eh_region *region_tree;
206
207  /* The same information as an indexable array.  */
208  struct eh_region **region_array;
209
210  /* The most recently open region.  */
211  struct eh_region *cur_region;
212
213  /* This is the region for which we are processing catch blocks.  */
214  struct eh_region *try_region;
215
216  /* A stack (TREE_LIST) of lists of handlers.  The TREE_VALUE of each
217     node is itself a TREE_CHAINed list of handlers for regions that
218     are not yet closed. The TREE_VALUE of each entry contains the
219     handler for the corresponding entry on the ehstack.  */
220  tree protect_list;
221
222  rtx filter;
223  rtx exc_ptr;
224
225  int built_landing_pads;
226  int last_region_number;
227
228  varray_type ttype_data;
229  varray_type ehspec_data;
230  varray_type action_record_data;
231
232  struct call_site_record
233  {
234    rtx landing_pad;
235    int action;
236  } *call_site_data;
237  int call_site_data_used;
238  int call_site_data_size;
239
240  rtx ehr_stackadj;
241  rtx ehr_handler;
242  rtx ehr_label;
243
244  rtx sjlj_fc;
245  rtx sjlj_exit_after;
246};
247
248
249static void mark_eh_region			PARAMS ((struct eh_region *));
250
251static int t2r_eq				PARAMS ((const PTR,
252							 const PTR));
253static hashval_t t2r_hash			PARAMS ((const PTR));
254static int t2r_mark_1				PARAMS ((PTR *, PTR));
255static void t2r_mark				PARAMS ((PTR));
256static void add_type_for_runtime		PARAMS ((tree));
257static tree lookup_type_for_runtime		PARAMS ((tree));
258
259static struct eh_region *expand_eh_region_end	PARAMS ((void));
260
261static rtx get_exception_filter			PARAMS ((struct function *));
262
263static void collect_eh_region_array		PARAMS ((void));
264static void resolve_fixup_regions		PARAMS ((void));
265static void remove_fixup_regions		PARAMS ((void));
266static void remove_unreachable_regions		PARAMS ((rtx));
267static void convert_from_eh_region_ranges_1	PARAMS ((rtx *, int *, int));
268
269static struct eh_region *duplicate_eh_region_1	PARAMS ((struct eh_region *,
270						     struct inline_remap *));
271static void duplicate_eh_region_2		PARAMS ((struct eh_region *,
272							 struct eh_region **));
273static int ttypes_filter_eq			PARAMS ((const PTR,
274							 const PTR));
275static hashval_t ttypes_filter_hash		PARAMS ((const PTR));
276static int ehspec_filter_eq			PARAMS ((const PTR,
277							 const PTR));
278static hashval_t ehspec_filter_hash		PARAMS ((const PTR));
279static int add_ttypes_entry			PARAMS ((htab_t, tree));
280static int add_ehspec_entry			PARAMS ((htab_t, htab_t,
281							 tree));
282static void assign_filter_values		PARAMS ((void));
283static void build_post_landing_pads		PARAMS ((void));
284static void connect_post_landing_pads		PARAMS ((void));
285static void dw2_build_landing_pads		PARAMS ((void));
286
287struct sjlj_lp_info;
288static bool sjlj_find_directly_reachable_regions
289     PARAMS ((struct sjlj_lp_info *));
290static void sjlj_assign_call_site_values
291     PARAMS ((rtx, struct sjlj_lp_info *));
292static void sjlj_mark_call_sites
293     PARAMS ((struct sjlj_lp_info *));
294static void sjlj_emit_function_enter		PARAMS ((rtx));
295static void sjlj_emit_function_exit		PARAMS ((void));
296static void sjlj_emit_dispatch_table
297     PARAMS ((rtx, struct sjlj_lp_info *));
298static void sjlj_build_landing_pads		PARAMS ((void));
299
300static void remove_exception_handler_label	PARAMS ((rtx));
301static void remove_eh_handler			PARAMS ((struct eh_region *));
302
303struct reachable_info;
304
305/* The return value of reachable_next_level.  */
306enum reachable_code
307{
308  /* The given exception is not processed by the given region.  */
309  RNL_NOT_CAUGHT,
310  /* The given exception may need processing by the given region.  */
311  RNL_MAYBE_CAUGHT,
312  /* The given exception is completely processed by the given region.  */
313  RNL_CAUGHT,
314  /* The given exception is completely processed by the runtime.  */
315  RNL_BLOCKED
316};
317
318static int check_handled			PARAMS ((tree, tree));
319static void add_reachable_handler
320     PARAMS ((struct reachable_info *, struct eh_region *,
321	      struct eh_region *));
322static enum reachable_code reachable_next_level
323     PARAMS ((struct eh_region *, tree, struct reachable_info *));
324
325static int action_record_eq			PARAMS ((const PTR,
326							 const PTR));
327static hashval_t action_record_hash		PARAMS ((const PTR));
328static int add_action_record			PARAMS ((htab_t, int, int));
329static int collect_one_action_chain		PARAMS ((htab_t,
330							 struct eh_region *));
331static int add_call_site			PARAMS ((rtx, int));
332
333static void push_uleb128			PARAMS ((varray_type *,
334							 unsigned int));
335static void push_sleb128			PARAMS ((varray_type *, int));
336#ifndef HAVE_AS_LEB128
337static int dw2_size_of_call_site_table		PARAMS ((void));
338static int sjlj_size_of_call_site_table		PARAMS ((void));
339#endif
340static void dw2_output_call_site_table		PARAMS ((void));
341static void sjlj_output_call_site_table		PARAMS ((void));
342
343
344/* Routine to see if exception handling is turned on.
345   DO_WARN is non-zero if we want to inform the user that exception
346   handling is turned off.
347
348   This is used to ensure that -fexceptions has been specified if the
349   compiler tries to use any exception-specific functions.  */
350
351int
352doing_eh (do_warn)
353     int do_warn;
354{
355  if (! flag_exceptions)
356    {
357      static int warned = 0;
358      if (! warned && do_warn)
359	{
360	  error ("exception handling disabled, use -fexceptions to enable");
361	  warned = 1;
362	}
363      return 0;
364    }
365  return 1;
366}
367
368
369void
370init_eh ()
371{
372  ggc_add_rtx_root (&exception_handler_labels, 1);
373
374  if (! flag_exceptions)
375    return;
376
377  type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
378  ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
379
380  /* Create the SjLj_Function_Context structure.  This should match
381     the definition in unwind-sjlj.c.  */
382  if (USING_SJLJ_EXCEPTIONS)
383    {
384      tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
385
386      sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
387      ggc_add_tree_root (&sjlj_fc_type_node, 1);
388
389      f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
390			   build_pointer_type (sjlj_fc_type_node));
391      DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
392
393      f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
394			 integer_type_node);
395      DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
396
397      tmp = build_index_type (build_int_2 (4 - 1, 0));
398      tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
399      f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
400      DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
401
402      f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
403			  ptr_type_node);
404      DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
405
406      f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
407			   ptr_type_node);
408      DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
409
410#ifdef DONT_USE_BUILTIN_SETJMP
411#ifdef JMP_BUF_SIZE
412      tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
413#else
414      /* Should be large enough for most systems, if it is not,
415	 JMP_BUF_SIZE should be defined with the proper value.  It will
416	 also tend to be larger than necessary for most systems, a more
417	 optimal port will define JMP_BUF_SIZE.  */
418      tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
419#endif
420#else
421      /* This is 2 for builtin_setjmp, plus whatever the target requires
422	 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL).  */
423      tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
424			  / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
425#endif
426      tmp = build_index_type (tmp);
427      tmp = build_array_type (ptr_type_node, tmp);
428      f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
429#ifdef DONT_USE_BUILTIN_SETJMP
430      /* We don't know what the alignment requirements of the
431	 runtime's jmp_buf has.  Overestimate.  */
432      DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
433      DECL_USER_ALIGN (f_jbuf) = 1;
434#endif
435      DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
436
437      TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
438      TREE_CHAIN (f_prev) = f_cs;
439      TREE_CHAIN (f_cs) = f_data;
440      TREE_CHAIN (f_data) = f_per;
441      TREE_CHAIN (f_per) = f_lsda;
442      TREE_CHAIN (f_lsda) = f_jbuf;
443
444      layout_type (sjlj_fc_type_node);
445
446      /* Cache the interesting field offsets so that we have
447	 easy access from rtl.  */
448      sjlj_fc_call_site_ofs
449	= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
450	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
451      sjlj_fc_data_ofs
452	= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
453	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
454      sjlj_fc_personality_ofs
455	= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
456	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
457      sjlj_fc_lsda_ofs
458	= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
459	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
460      sjlj_fc_jbuf_ofs
461	= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
462	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
463    }
464}
465
466void
467init_eh_for_function ()
468{
469  cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
470}
471
472/* Mark EH for GC.  */
473
474static void
475mark_eh_region (region)
476     struct eh_region *region;
477{
478  if (! region)
479    return;
480
481  switch (region->type)
482    {
483    case ERT_UNKNOWN:
484      /* This can happen if a nested function is inside the body of a region
485	 and we do a GC as part of processing it.  */
486      break;
487    case ERT_CLEANUP:
488      ggc_mark_tree (region->u.cleanup.exp);
489      break;
490    case ERT_TRY:
491      ggc_mark_rtx (region->u.try.continue_label);
492      break;
493    case ERT_CATCH:
494      ggc_mark_tree (region->u.catch.type_list);
495      ggc_mark_tree (region->u.catch.filter_list);
496      break;
497    case ERT_ALLOWED_EXCEPTIONS:
498      ggc_mark_tree (region->u.allowed.type_list);
499      break;
500    case ERT_MUST_NOT_THROW:
501      break;
502    case ERT_THROW:
503      ggc_mark_tree (region->u.throw.type);
504      break;
505    case ERT_FIXUP:
506      ggc_mark_tree (region->u.fixup.cleanup_exp);
507      break;
508    default:
509      abort ();
510    }
511
512  ggc_mark_rtx (region->label);
513  ggc_mark_rtx (region->resume);
514  ggc_mark_rtx (region->landing_pad);
515  ggc_mark_rtx (region->post_landing_pad);
516}
517
518void
519mark_eh_status (eh)
520     struct eh_status *eh;
521{
522  int i;
523
524  if (eh == 0)
525    return;
526
527  /* If we've called collect_eh_region_array, use it.  Otherwise walk
528     the tree non-recursively.  */
529  if (eh->region_array)
530    {
531      for (i = eh->last_region_number; i > 0; --i)
532	{
533	  struct eh_region *r = eh->region_array[i];
534	  if (r && r->region_number == i)
535	    mark_eh_region (r);
536	}
537    }
538  else if (eh->region_tree)
539    {
540      struct eh_region *r = eh->region_tree;
541      while (1)
542	{
543	  mark_eh_region (r);
544	  if (r->inner)
545	    r = r->inner;
546	  else if (r->next_peer)
547	    r = r->next_peer;
548	  else
549	    {
550	      do {
551		r = r->outer;
552		if (r == NULL)
553		  goto tree_done;
554	      } while (r->next_peer == NULL);
555	      r = r->next_peer;
556	    }
557	}
558    tree_done:;
559    }
560
561  ggc_mark_tree (eh->protect_list);
562  ggc_mark_rtx (eh->filter);
563  ggc_mark_rtx (eh->exc_ptr);
564  ggc_mark_tree_varray (eh->ttype_data);
565
566  if (eh->call_site_data)
567    {
568      for (i = eh->call_site_data_used - 1; i >= 0; --i)
569	ggc_mark_rtx (eh->call_site_data[i].landing_pad);
570    }
571
572  ggc_mark_rtx (eh->ehr_stackadj);
573  ggc_mark_rtx (eh->ehr_handler);
574  ggc_mark_rtx (eh->ehr_label);
575
576  ggc_mark_rtx (eh->sjlj_fc);
577  ggc_mark_rtx (eh->sjlj_exit_after);
578}
579
580void
581free_eh_status (f)
582     struct function *f;
583{
584  struct eh_status *eh = f->eh;
585
586  if (eh->region_array)
587    {
588      int i;
589      for (i = eh->last_region_number; i > 0; --i)
590	{
591	  struct eh_region *r = eh->region_array[i];
592	  /* Mind we don't free a region struct more than once.  */
593	  if (r && r->region_number == i)
594	    free (r);
595	}
596      free (eh->region_array);
597    }
598  else if (eh->region_tree)
599    {
600      struct eh_region *next, *r = eh->region_tree;
601      while (1)
602	{
603	  if (r->inner)
604	    r = r->inner;
605	  else if (r->next_peer)
606	    {
607	      next = r->next_peer;
608	      free (r);
609	      r = next;
610	    }
611	  else
612	    {
613	      do {
614	        next = r->outer;
615	        free (r);
616	        r = next;
617		if (r == NULL)
618		  goto tree_done;
619	      } while (r->next_peer == NULL);
620	      next = r->next_peer;
621	      free (r);
622	      r = next;
623	    }
624	}
625    tree_done:;
626    }
627
628  VARRAY_FREE (eh->ttype_data);
629  VARRAY_FREE (eh->ehspec_data);
630  VARRAY_FREE (eh->action_record_data);
631  if (eh->call_site_data)
632    free (eh->call_site_data);
633
634  free (eh);
635  f->eh = NULL;
636  exception_handler_labels = NULL;
637}
638
639
640/* Start an exception handling region.  All instructions emitted
641   after this point are considered to be part of the region until
642   expand_eh_region_end is invoked.  */
643
644void
645expand_eh_region_start ()
646{
647  struct eh_region *new_region;
648  struct eh_region *cur_region;
649  rtx note;
650
651  if (! doing_eh (0))
652    return;
653
654  /* Insert a new blank region as a leaf in the tree.  */
655  new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
656  cur_region = cfun->eh->cur_region;
657  new_region->outer = cur_region;
658  if (cur_region)
659    {
660      new_region->next_peer = cur_region->inner;
661      cur_region->inner = new_region;
662    }
663  else
664    {
665      new_region->next_peer = cfun->eh->region_tree;
666      cfun->eh->region_tree = new_region;
667    }
668  cfun->eh->cur_region = new_region;
669
670  /* Create a note marking the start of this region.  */
671  new_region->region_number = ++cfun->eh->last_region_number;
672  note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
673  NOTE_EH_HANDLER (note) = new_region->region_number;
674}
675
676/* Common code to end a region.  Returns the region just ended.  */
677
678static struct eh_region *
679expand_eh_region_end ()
680{
681  struct eh_region *cur_region = cfun->eh->cur_region;
682  rtx note;
683
684  /* Create a note marking the end of this region.  */
685  note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
686  NOTE_EH_HANDLER (note) = cur_region->region_number;
687
688  /* Pop.  */
689  cfun->eh->cur_region = cur_region->outer;
690
691  return cur_region;
692}
693
694/* End an exception handling region for a cleanup.  HANDLER is an
695   expression to expand for the cleanup.  */
696
697void
698expand_eh_region_end_cleanup (handler)
699     tree handler;
700{
701  struct eh_region *region;
702  tree protect_cleanup_actions;
703  rtx around_label;
704  rtx data_save[2];
705
706  if (! doing_eh (0))
707    return;
708
709  region = expand_eh_region_end ();
710  region->type = ERT_CLEANUP;
711  region->label = gen_label_rtx ();
712  region->u.cleanup.exp = handler;
713
714  around_label = gen_label_rtx ();
715  emit_jump (around_label);
716
717  emit_label (region->label);
718
719  /* Give the language a chance to specify an action to be taken if an
720     exception is thrown that would propagate out of the HANDLER.  */
721  protect_cleanup_actions
722    = (lang_protect_cleanup_actions
723       ? (*lang_protect_cleanup_actions) ()
724       : NULL_TREE);
725
726  if (protect_cleanup_actions)
727    expand_eh_region_start ();
728
729  /* In case this cleanup involves an inline destructor with a try block in
730     it, we need to save the EH return data registers around it.  */
731  data_save[0] = gen_reg_rtx (Pmode);
732  emit_move_insn (data_save[0], get_exception_pointer (cfun));
733  data_save[1] = gen_reg_rtx (word_mode);
734  emit_move_insn (data_save[1], get_exception_filter (cfun));
735
736  expand_expr (handler, const0_rtx, VOIDmode, 0);
737
738  emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
739  emit_move_insn (cfun->eh->filter, data_save[1]);
740
741  if (protect_cleanup_actions)
742    expand_eh_region_end_must_not_throw (protect_cleanup_actions);
743
744  /* We need any stack adjustment complete before the around_label.  */
745  do_pending_stack_adjust ();
746
747  /* We delay the generation of the _Unwind_Resume until we generate
748     landing pads.  We emit a marker here so as to get good control
749     flow data in the meantime.  */
750  region->resume
751    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
752  emit_barrier ();
753
754  emit_label (around_label);
755}
756
757/* End an exception handling region for a try block, and prepares
758   for subsequent calls to expand_start_catch.  */
759
760void
761expand_start_all_catch ()
762{
763  struct eh_region *region;
764
765  if (! doing_eh (1))
766    return;
767
768  region = expand_eh_region_end ();
769  region->type = ERT_TRY;
770  region->u.try.prev_try = cfun->eh->try_region;
771  region->u.try.continue_label = gen_label_rtx ();
772
773  cfun->eh->try_region = region;
774
775  emit_jump (region->u.try.continue_label);
776}
777
778/* Begin a catch clause.  TYPE is the type caught, a list of such types, or
779   null if this is a catch-all clause. Providing a type list enables to
780   associate the catch region with potentially several exception types, which
781   is useful e.g. for Ada.  */
782
783void
784expand_start_catch (type_or_list)
785     tree type_or_list;
786{
787  struct eh_region *t, *c, *l;
788  tree type_list;
789
790  if (! doing_eh (0))
791    return;
792
793  type_list = type_or_list;
794
795  if (type_or_list)
796    {
797      /* Ensure to always end up with a type list to normalize further
798         processing, then register each type against the runtime types
799         map.  */
800      tree type_node;
801
802      if (TREE_CODE (type_or_list) != TREE_LIST)
803        type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
804
805      type_node = type_list;
806      for (; type_node; type_node = TREE_CHAIN (type_node))
807        add_type_for_runtime (TREE_VALUE (type_node));
808    }
809
810  expand_eh_region_start ();
811
812  t = cfun->eh->try_region;
813  c = cfun->eh->cur_region;
814  c->type = ERT_CATCH;
815  c->u.catch.type_list = type_list;
816  c->label = gen_label_rtx ();
817
818  l = t->u.try.last_catch;
819  c->u.catch.prev_catch = l;
820  if (l)
821    l->u.catch.next_catch = c;
822  else
823    t->u.try.catch = c;
824  t->u.try.last_catch = c;
825
826  emit_label (c->label);
827}
828
829/* End a catch clause.  Control will resume after the try/catch block.  */
830
831void
832expand_end_catch ()
833{
834  struct eh_region *try_region, *catch_region;
835
836  if (! doing_eh (0))
837    return;
838
839  catch_region = expand_eh_region_end ();
840  try_region = cfun->eh->try_region;
841
842  emit_jump (try_region->u.try.continue_label);
843}
844
845/* End a sequence of catch handlers for a try block.  */
846
847void
848expand_end_all_catch ()
849{
850  struct eh_region *try_region;
851
852  if (! doing_eh (0))
853    return;
854
855  try_region = cfun->eh->try_region;
856  cfun->eh->try_region = try_region->u.try.prev_try;
857
858  emit_label (try_region->u.try.continue_label);
859}
860
861/* End an exception region for an exception type filter.  ALLOWED is a
862   TREE_LIST of types to be matched by the runtime.  FAILURE is an
863   expression to invoke if a mismatch occurs.
864
865   ??? We could use these semantics for calls to rethrow, too; if we can
866   see the surrounding catch clause, we know that the exception we're
867   rethrowing satisfies the "filter" of the catch type.  */
868
869void
870expand_eh_region_end_allowed (allowed, failure)
871     tree allowed, failure;
872{
873  struct eh_region *region;
874  rtx around_label;
875
876  if (! doing_eh (0))
877    return;
878
879  region = expand_eh_region_end ();
880  region->type = ERT_ALLOWED_EXCEPTIONS;
881  region->u.allowed.type_list = allowed;
882  region->label = gen_label_rtx ();
883
884  for (; allowed ; allowed = TREE_CHAIN (allowed))
885    add_type_for_runtime (TREE_VALUE (allowed));
886
887  /* We must emit the call to FAILURE here, so that if this function
888     throws a different exception, that it will be processed by the
889     correct region.  */
890
891  around_label = gen_label_rtx ();
892  emit_jump (around_label);
893
894  emit_label (region->label);
895  expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
896  /* We must adjust the stack before we reach the AROUND_LABEL because
897     the call to FAILURE does not occur on all paths to the
898     AROUND_LABEL.  */
899  do_pending_stack_adjust ();
900
901  emit_label (around_label);
902}
903
904/* End an exception region for a must-not-throw filter.  FAILURE is an
905   expression invoke if an uncaught exception propagates this far.
906
907   This is conceptually identical to expand_eh_region_end_allowed with
908   an empty allowed list (if you passed "std::terminate" instead of
909   "__cxa_call_unexpected"), but they are represented differently in
910   the C++ LSDA.  */
911
912void
913expand_eh_region_end_must_not_throw (failure)
914     tree failure;
915{
916  struct eh_region *region;
917  rtx around_label;
918
919  if (! doing_eh (0))
920    return;
921
922  region = expand_eh_region_end ();
923  region->type = ERT_MUST_NOT_THROW;
924  region->label = gen_label_rtx ();
925
926  /* We must emit the call to FAILURE here, so that if this function
927     throws a different exception, that it will be processed by the
928     correct region.  */
929
930  around_label = gen_label_rtx ();
931  emit_jump (around_label);
932
933  emit_label (region->label);
934  expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
935
936  emit_label (around_label);
937}
938
939/* End an exception region for a throw.  No handling goes on here,
940   but it's the easiest way for the front-end to indicate what type
941   is being thrown.  */
942
943void
944expand_eh_region_end_throw (type)
945     tree type;
946{
947  struct eh_region *region;
948
949  if (! doing_eh (0))
950    return;
951
952  region = expand_eh_region_end ();
953  region->type = ERT_THROW;
954  region->u.throw.type = type;
955}
956
957/* End a fixup region.  Within this region the cleanups for the immediately
958   enclosing region are _not_ run.  This is used for goto cleanup to avoid
959   destroying an object twice.
960
961   This would be an extraordinarily simple prospect, were it not for the
962   fact that we don't actually know what the immediately enclosing region
963   is.  This surprising fact is because expand_cleanups is currently
964   generating a sequence that it will insert somewhere else.  We collect
965   the proper notion of "enclosing" in convert_from_eh_region_ranges.  */
966
967void
968expand_eh_region_end_fixup (handler)
969     tree handler;
970{
971  struct eh_region *fixup;
972
973  if (! doing_eh (0))
974    return;
975
976  fixup = expand_eh_region_end ();
977  fixup->type = ERT_FIXUP;
978  fixup->u.fixup.cleanup_exp = handler;
979}
980
981/* Return an rtl expression for a pointer to the exception object
982   within a handler.  */
983
984rtx
985get_exception_pointer (fun)
986     struct function *fun;
987{
988  rtx exc_ptr = fun->eh->exc_ptr;
989  if (fun == cfun && ! exc_ptr)
990    {
991      exc_ptr = gen_reg_rtx (Pmode);
992      fun->eh->exc_ptr = exc_ptr;
993    }
994  return exc_ptr;
995}
996
997/* Return an rtl expression for the exception dispatch filter
998   within a handler.  */
999
1000static rtx
1001get_exception_filter (fun)
1002     struct function *fun;
1003{
1004  rtx filter = fun->eh->filter;
1005  if (fun == cfun && ! filter)
1006    {
1007      filter = gen_reg_rtx (word_mode);
1008      fun->eh->filter = filter;
1009    }
1010  return filter;
1011}
1012
1013/* Begin a region that will contain entries created with
1014   add_partial_entry.  */
1015
1016void
1017begin_protect_partials ()
1018{
1019  /* Push room for a new list.  */
1020  cfun->eh->protect_list
1021    = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
1022}
1023
1024/* Start a new exception region for a region of code that has a
1025   cleanup action and push the HANDLER for the region onto
1026   protect_list. All of the regions created with add_partial_entry
1027   will be ended when end_protect_partials is invoked.
1028
1029   ??? The only difference between this purpose and that of
1030   expand_decl_cleanup is that in this case, we only want the cleanup to
1031   run if an exception is thrown.  This should also be handled using
1032   binding levels.  */
1033
1034void
1035add_partial_entry (handler)
1036     tree handler;
1037{
1038  expand_eh_region_start ();
1039
1040  /* Add this entry to the front of the list.  */
1041  TREE_VALUE (cfun->eh->protect_list)
1042    = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1043}
1044
1045/* End all the pending exception regions on protect_list.  */
1046
1047void
1048end_protect_partials ()
1049{
1050  tree t;
1051
1052  /* Pop the topmost entry.  */
1053  t = TREE_VALUE (cfun->eh->protect_list);
1054  cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1055
1056  /* End all the exception regions.  */
1057  for (; t; t = TREE_CHAIN (t))
1058    expand_eh_region_end_cleanup (TREE_VALUE (t));
1059}
1060
1061
1062/* This section is for the exception handling specific optimization pass.  */
1063
1064/* Random access the exception region tree.  It's just as simple to
1065   collect the regions this way as in expand_eh_region_start, but
1066   without having to realloc memory.  */
1067
1068static void
1069collect_eh_region_array ()
1070{
1071  struct eh_region **array, *i;
1072
1073  i = cfun->eh->region_tree;
1074  if (! i)
1075    return;
1076
1077  array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1078  cfun->eh->region_array = array;
1079
1080  while (1)
1081    {
1082      array[i->region_number] = i;
1083
1084      /* If there are sub-regions, process them.  */
1085      if (i->inner)
1086	i = i->inner;
1087      /* If there are peers, process them.  */
1088      else if (i->next_peer)
1089	i = i->next_peer;
1090      /* Otherwise, step back up the tree to the next peer.  */
1091      else
1092	{
1093	  do {
1094	    i = i->outer;
1095	    if (i == NULL)
1096	      return;
1097	  } while (i->next_peer == NULL);
1098	  i = i->next_peer;
1099	}
1100    }
1101}
1102
1103static void
1104resolve_fixup_regions ()
1105{
1106  int i, j, n = cfun->eh->last_region_number;
1107
1108  for (i = 1; i <= n; ++i)
1109    {
1110      struct eh_region *fixup = cfun->eh->region_array[i];
1111      struct eh_region *cleanup = 0;
1112
1113      if (! fixup || fixup->type != ERT_FIXUP)
1114	continue;
1115
1116      for (j = 1; j <= n; ++j)
1117	{
1118	  cleanup = cfun->eh->region_array[j];
1119	  if (cleanup->type == ERT_CLEANUP
1120	      && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1121	    break;
1122	}
1123      if (j > n)
1124	abort ();
1125
1126      fixup->u.fixup.real_region = cleanup->outer;
1127    }
1128}
1129
1130/* Now that we've discovered what region actually encloses a fixup,
1131   we can shuffle pointers and remove them from the tree.  */
1132
1133static void
1134remove_fixup_regions ()
1135{
1136  int i;
1137  rtx insn, note;
1138  struct eh_region *fixup;
1139
1140  /* Walk the insn chain and adjust the REG_EH_REGION numbers
1141     for instructions referencing fixup regions.  This is only
1142     strictly necessary for fixup regions with no parent, but
1143     doesn't hurt to do it for all regions.  */
1144  for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1145    if (INSN_P (insn)
1146	&& (note = find_reg_note (insn, REG_EH_REGION, NULL))
1147	&& INTVAL (XEXP (note, 0)) > 0
1148	&& (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1149	&& fixup->type == ERT_FIXUP)
1150      {
1151	if (fixup->u.fixup.real_region)
1152	  XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1153	else
1154	  remove_note (insn, note);
1155      }
1156
1157  /* Remove the fixup regions from the tree.  */
1158  for (i = cfun->eh->last_region_number; i > 0; --i)
1159    {
1160      fixup = cfun->eh->region_array[i];
1161      if (! fixup)
1162	continue;
1163
1164      /* Allow GC to maybe free some memory.  */
1165      if (fixup->type == ERT_CLEANUP)
1166        fixup->u.cleanup.exp = NULL_TREE;
1167
1168      if (fixup->type != ERT_FIXUP)
1169	continue;
1170
1171      if (fixup->inner)
1172	{
1173	  struct eh_region *parent, *p, **pp;
1174
1175	  parent = fixup->u.fixup.real_region;
1176
1177	  /* Fix up the children's parent pointers; find the end of
1178	     the list.  */
1179	  for (p = fixup->inner; ; p = p->next_peer)
1180	    {
1181	      p->outer = parent;
1182	      if (! p->next_peer)
1183		break;
1184	    }
1185
1186	  /* In the tree of cleanups, only outer-inner ordering matters.
1187	     So link the children back in anywhere at the correct level.  */
1188	  if (parent)
1189	    pp = &parent->inner;
1190	  else
1191	    pp = &cfun->eh->region_tree;
1192	  p->next_peer = *pp;
1193	  *pp = fixup->inner;
1194	  fixup->inner = NULL;
1195	}
1196
1197      remove_eh_handler (fixup);
1198    }
1199}
1200
1201/* Remove all regions whose labels are not reachable from insns.  */
1202
1203static void
1204remove_unreachable_regions (insns)
1205     rtx insns;
1206{
1207  int i, *uid_region_num;
1208  bool *reachable;
1209  struct eh_region *r;
1210  rtx insn;
1211
1212  uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1213  reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1214
1215  for (i = cfun->eh->last_region_number; i > 0; --i)
1216    {
1217      r = cfun->eh->region_array[i];
1218      if (!r || r->region_number != i)
1219	continue;
1220
1221      if (r->resume)
1222        {
1223	  if (uid_region_num[INSN_UID (r->resume)])
1224	    abort ();
1225	  uid_region_num[INSN_UID (r->resume)] = i;
1226        }
1227      if (r->label)
1228        {
1229	  if (uid_region_num[INSN_UID (r->label)])
1230	    abort ();
1231	  uid_region_num[INSN_UID (r->label)] = i;
1232        }
1233      if (r->type == ERT_TRY && r->u.try.continue_label)
1234        {
1235	  if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1236	    abort ();
1237	  uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1238        }
1239    }
1240
1241  for (insn = insns; insn; insn = NEXT_INSN (insn))
1242    reachable[uid_region_num[INSN_UID (insn)]] = true;
1243
1244  for (i = cfun->eh->last_region_number; i > 0; --i)
1245    {
1246      r = cfun->eh->region_array[i];
1247      if (r && r->region_number == i && !reachable[i])
1248	{
1249	  /* Don't remove ERT_THROW regions if their outer region
1250	     is reachable.  */
1251	  if (r->type == ERT_THROW
1252	      && r->outer
1253	      && reachable[r->outer->region_number])
1254	    continue;
1255
1256	  remove_eh_handler (r);
1257	}
1258    }
1259
1260  free (reachable);
1261  free (uid_region_num);
1262}
1263
1264/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1265   can_throw instruction in the region.  */
1266
1267static void
1268convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1269     rtx *pinsns;
1270     int *orig_sp;
1271     int cur;
1272{
1273  int *sp = orig_sp;
1274  rtx insn, next;
1275
1276  for (insn = *pinsns; insn ; insn = next)
1277    {
1278      next = NEXT_INSN (insn);
1279      if (GET_CODE (insn) == NOTE)
1280	{
1281	  int kind = NOTE_LINE_NUMBER (insn);
1282	  if (kind == NOTE_INSN_EH_REGION_BEG
1283	      || kind == NOTE_INSN_EH_REGION_END)
1284	    {
1285	      if (kind == NOTE_INSN_EH_REGION_BEG)
1286		{
1287		  struct eh_region *r;
1288
1289		  *sp++ = cur;
1290		  cur = NOTE_EH_HANDLER (insn);
1291
1292		  r = cfun->eh->region_array[cur];
1293		  if (r->type == ERT_FIXUP)
1294		    {
1295		      r = r->u.fixup.real_region;
1296		      cur = r ? r->region_number : 0;
1297		    }
1298		  else if (r->type == ERT_CATCH)
1299		    {
1300		      r = r->outer;
1301		      cur = r ? r->region_number : 0;
1302		    }
1303		}
1304	      else
1305		cur = *--sp;
1306
1307	      /* Removing the first insn of a CALL_PLACEHOLDER sequence
1308		 requires extra care to adjust sequence start.  */
1309	      if (insn == *pinsns)
1310		*pinsns = next;
1311	      remove_insn (insn);
1312	      continue;
1313	    }
1314	}
1315      else if (INSN_P (insn))
1316	{
1317	  if (cur > 0
1318	      && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1319	      /* Calls can always potentially throw exceptions, unless
1320		 they have a REG_EH_REGION note with a value of 0 or less.
1321		 Which should be the only possible kind so far.  */
1322	      && (GET_CODE (insn) == CALL_INSN
1323		  /* If we wanted exceptions for non-call insns, then
1324		     any may_trap_p instruction could throw.  */
1325		  || (flag_non_call_exceptions
1326		      && GET_CODE (PATTERN (insn)) != CLOBBER
1327		      && GET_CODE (PATTERN (insn)) != USE
1328		      && may_trap_p (PATTERN (insn)))))
1329	    {
1330	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1331						  REG_NOTES (insn));
1332	    }
1333
1334	  if (GET_CODE (insn) == CALL_INSN
1335	      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1336	    {
1337	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1338					       sp, cur);
1339	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1340					       sp, cur);
1341	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1342					       sp, cur);
1343	    }
1344	}
1345    }
1346
1347  if (sp != orig_sp)
1348    abort ();
1349}
1350
1351void
1352convert_from_eh_region_ranges ()
1353{
1354  int *stack;
1355  rtx insns;
1356
1357  collect_eh_region_array ();
1358  resolve_fixup_regions ();
1359
1360  stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1361  insns = get_insns ();
1362  convert_from_eh_region_ranges_1 (&insns, stack, 0);
1363  free (stack);
1364
1365  remove_fixup_regions ();
1366  remove_unreachable_regions (insns);
1367}
1368
1369void
1370find_exception_handler_labels ()
1371{
1372  rtx list = NULL_RTX;
1373  int i;
1374
1375  free_EXPR_LIST_list (&exception_handler_labels);
1376
1377  if (cfun->eh->region_tree == NULL)
1378    return;
1379
1380  for (i = cfun->eh->last_region_number; i > 0; --i)
1381    {
1382      struct eh_region *region = cfun->eh->region_array[i];
1383      rtx lab;
1384
1385      if (! region || region->region_number != i)
1386	continue;
1387      if (cfun->eh->built_landing_pads)
1388	lab = region->landing_pad;
1389      else
1390	lab = region->label;
1391
1392      if (lab)
1393	list = alloc_EXPR_LIST (0, lab, list);
1394    }
1395
1396  /* For sjlj exceptions, need the return label to remain live until
1397     after landing pad generation.  */
1398  if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1399    list = alloc_EXPR_LIST (0, return_label, list);
1400
1401  exception_handler_labels = list;
1402}
1403
1404
1405static struct eh_region *
1406duplicate_eh_region_1 (o, map)
1407     struct eh_region *o;
1408     struct inline_remap *map;
1409{
1410  struct eh_region *n
1411    = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1412
1413  n->region_number = o->region_number + cfun->eh->last_region_number;
1414  n->type = o->type;
1415
1416  switch (n->type)
1417    {
1418    case ERT_CLEANUP:
1419    case ERT_MUST_NOT_THROW:
1420      break;
1421
1422    case ERT_TRY:
1423      if (o->u.try.continue_label)
1424	n->u.try.continue_label
1425	  = get_label_from_map (map,
1426				CODE_LABEL_NUMBER (o->u.try.continue_label));
1427      break;
1428
1429    case ERT_CATCH:
1430      n->u.catch.type_list = o->u.catch.type_list;
1431      break;
1432
1433    case ERT_ALLOWED_EXCEPTIONS:
1434      n->u.allowed.type_list = o->u.allowed.type_list;
1435      break;
1436
1437    case ERT_THROW:
1438      n->u.throw.type = o->u.throw.type;
1439
1440    default:
1441      abort ();
1442    }
1443
1444  if (o->label)
1445    n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1446  if (o->resume)
1447    {
1448      n->resume = map->insn_map[INSN_UID (o->resume)];
1449      if (n->resume == NULL)
1450	abort ();
1451    }
1452
1453  return n;
1454}
1455
1456static void
1457duplicate_eh_region_2 (o, n_array)
1458     struct eh_region *o;
1459     struct eh_region **n_array;
1460{
1461  struct eh_region *n = n_array[o->region_number];
1462
1463  switch (n->type)
1464    {
1465    case ERT_TRY:
1466      n->u.try.catch = n_array[o->u.try.catch->region_number];
1467      n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1468      break;
1469
1470    case ERT_CATCH:
1471      if (o->u.catch.next_catch)
1472        n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1473      if (o->u.catch.prev_catch)
1474        n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1475      break;
1476
1477    default:
1478      break;
1479    }
1480
1481  if (o->outer)
1482    n->outer = n_array[o->outer->region_number];
1483  if (o->inner)
1484    n->inner = n_array[o->inner->region_number];
1485  if (o->next_peer)
1486    n->next_peer = n_array[o->next_peer->region_number];
1487}
1488
1489int
1490duplicate_eh_regions (ifun, map)
1491     struct function *ifun;
1492     struct inline_remap *map;
1493{
1494  int ifun_last_region_number = ifun->eh->last_region_number;
1495  struct eh_region **n_array, *root, *cur;
1496  int i;
1497
1498  if (ifun_last_region_number == 0)
1499    return 0;
1500
1501  n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1502
1503  for (i = 1; i <= ifun_last_region_number; ++i)
1504    {
1505      cur = ifun->eh->region_array[i];
1506      if (!cur || cur->region_number != i)
1507	continue;
1508      n_array[i] = duplicate_eh_region_1 (cur, map);
1509    }
1510  for (i = 1; i <= ifun_last_region_number; ++i)
1511    {
1512      cur = ifun->eh->region_array[i];
1513      if (!cur || cur->region_number != i)
1514	continue;
1515      duplicate_eh_region_2 (cur, n_array);
1516    }
1517
1518  root = n_array[ifun->eh->region_tree->region_number];
1519  cur = cfun->eh->cur_region;
1520  if (cur)
1521    {
1522      struct eh_region *p = cur->inner;
1523      if (p)
1524	{
1525	  while (p->next_peer)
1526	    p = p->next_peer;
1527	  p->next_peer = root;
1528	}
1529      else
1530	cur->inner = root;
1531
1532      for (i = 1; i <= ifun_last_region_number; ++i)
1533	if (n_array[i] && n_array[i]->outer == NULL)
1534	  n_array[i]->outer = cur;
1535    }
1536  else
1537    {
1538      struct eh_region *p = cfun->eh->region_tree;
1539      if (p)
1540	{
1541	  while (p->next_peer)
1542	    p = p->next_peer;
1543	  p->next_peer = root;
1544	}
1545      else
1546	cfun->eh->region_tree = root;
1547    }
1548
1549  free (n_array);
1550
1551  i = cfun->eh->last_region_number;
1552  cfun->eh->last_region_number = i + ifun_last_region_number;
1553  return i;
1554}
1555
1556
1557static int
1558t2r_eq (pentry, pdata)
1559     const PTR pentry;
1560     const PTR pdata;
1561{
1562  tree entry = (tree) pentry;
1563  tree data = (tree) pdata;
1564
1565  return TREE_PURPOSE (entry) == data;
1566}
1567
1568static hashval_t
1569t2r_hash (pentry)
1570     const PTR pentry;
1571{
1572  tree entry = (tree) pentry;
1573  return TYPE_HASH (TREE_PURPOSE (entry));
1574}
1575
1576static int
1577t2r_mark_1 (slot, data)
1578     PTR *slot;
1579     PTR data ATTRIBUTE_UNUSED;
1580{
1581  tree contents = (tree) *slot;
1582  ggc_mark_tree (contents);
1583  return 1;
1584}
1585
1586static void
1587t2r_mark (addr)
1588     PTR addr;
1589{
1590  htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1591}
1592
1593static void
1594add_type_for_runtime (type)
1595     tree type;
1596{
1597  tree *slot;
1598
1599  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1600					    TYPE_HASH (type), INSERT);
1601  if (*slot == NULL)
1602    {
1603      tree runtime = (*lang_eh_runtime_type) (type);
1604      *slot = tree_cons (type, runtime, NULL_TREE);
1605    }
1606}
1607
1608static tree
1609lookup_type_for_runtime (type)
1610     tree type;
1611{
1612  tree *slot;
1613
1614  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1615					    TYPE_HASH (type), NO_INSERT);
1616
1617  /* We should have always inserted the data earlier.  */
1618  return TREE_VALUE (*slot);
1619}
1620
1621
1622/* Represent an entry in @TTypes for either catch actions
1623   or exception filter actions.  */
1624struct ttypes_filter
1625{
1626  tree t;
1627  int filter;
1628};
1629
1630/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1631   (a tree) for a @TTypes type node we are thinking about adding.  */
1632
1633static int
1634ttypes_filter_eq (pentry, pdata)
1635     const PTR pentry;
1636     const PTR pdata;
1637{
1638  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1639  tree data = (tree) pdata;
1640
1641  return entry->t == data;
1642}
1643
1644static hashval_t
1645ttypes_filter_hash (pentry)
1646     const PTR pentry;
1647{
1648  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1649  return TYPE_HASH (entry->t);
1650}
1651
1652/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1653   exception specification list we are thinking about adding.  */
1654/* ??? Currently we use the type lists in the order given.  Someone
1655   should put these in some canonical order.  */
1656
1657static int
1658ehspec_filter_eq (pentry, pdata)
1659     const PTR pentry;
1660     const PTR pdata;
1661{
1662  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1663  const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1664
1665  return type_list_equal (entry->t, data->t);
1666}
1667
1668/* Hash function for exception specification lists.  */
1669
1670static hashval_t
1671ehspec_filter_hash (pentry)
1672     const PTR pentry;
1673{
1674  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1675  hashval_t h = 0;
1676  tree list;
1677
1678  for (list = entry->t; list ; list = TREE_CHAIN (list))
1679    h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1680  return h;
1681}
1682
1683/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1684   up the search.  Return the filter value to be used.  */
1685
1686static int
1687add_ttypes_entry (ttypes_hash, type)
1688     htab_t ttypes_hash;
1689     tree type;
1690{
1691  struct ttypes_filter **slot, *n;
1692
1693  slot = (struct ttypes_filter **)
1694    htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1695
1696  if ((n = *slot) == NULL)
1697    {
1698      /* Filter value is a 1 based table index.  */
1699
1700      n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1701      n->t = type;
1702      n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1703      *slot = n;
1704
1705      VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1706    }
1707
1708  return n->filter;
1709}
1710
1711/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1712   to speed up the search.  Return the filter value to be used.  */
1713
1714static int
1715add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1716     htab_t ehspec_hash;
1717     htab_t ttypes_hash;
1718     tree list;
1719{
1720  struct ttypes_filter **slot, *n;
1721  struct ttypes_filter dummy;
1722
1723  dummy.t = list;
1724  slot = (struct ttypes_filter **)
1725    htab_find_slot (ehspec_hash, &dummy, INSERT);
1726
1727  if ((n = *slot) == NULL)
1728    {
1729      /* Filter value is a -1 based byte index into a uleb128 buffer.  */
1730
1731      n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1732      n->t = list;
1733      n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1734      *slot = n;
1735
1736      /* Look up each type in the list and encode its filter
1737	 value as a uleb128.  Terminate the list with 0.  */
1738      for (; list ; list = TREE_CHAIN (list))
1739	push_uleb128 (&cfun->eh->ehspec_data,
1740		      add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1741      VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1742    }
1743
1744  return n->filter;
1745}
1746
1747/* Generate the action filter values to be used for CATCH and
1748   ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
1749   we use lots of landing pads, and so every type or list can share
1750   the same filter value, which saves table space.  */
1751
1752static void
1753assign_filter_values ()
1754{
1755  int i;
1756  htab_t ttypes, ehspec;
1757
1758  VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1759  VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1760
1761  ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1762  ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1763
1764  for (i = cfun->eh->last_region_number; i > 0; --i)
1765    {
1766      struct eh_region *r = cfun->eh->region_array[i];
1767
1768      /* Mind we don't process a region more than once.  */
1769      if (!r || r->region_number != i)
1770	continue;
1771
1772      switch (r->type)
1773	{
1774	case ERT_CATCH:
1775	  /* Whatever type_list is (NULL or true list), we build a list
1776	     of filters for the region.  */
1777	  r->u.catch.filter_list = NULL_TREE;
1778
1779	  if (r->u.catch.type_list != NULL)
1780	    {
1781	      /* Get a filter value for each of the types caught and store
1782		 them in the region's dedicated list.  */
1783	      tree tp_node = r->u.catch.type_list;
1784
1785	      for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1786		{
1787		  int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1788		  tree flt_node = build_int_2 (flt, 0);
1789
1790		  r->u.catch.filter_list
1791		    = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1792		}
1793	    }
1794	  else
1795	    {
1796	      /* Get a filter value for the NULL list also since it will need
1797		 an action record anyway.  */
1798	      int flt = add_ttypes_entry (ttypes, NULL);
1799	      tree flt_node = build_int_2 (flt, 0);
1800
1801	      r->u.catch.filter_list
1802		= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1803	    }
1804
1805	  break;
1806
1807	case ERT_ALLOWED_EXCEPTIONS:
1808	  r->u.allowed.filter
1809	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1810	  break;
1811
1812	default:
1813	  break;
1814	}
1815    }
1816
1817  htab_delete (ttypes);
1818  htab_delete (ehspec);
1819}
1820
1821static void
1822build_post_landing_pads ()
1823{
1824  int i;
1825
1826  for (i = cfun->eh->last_region_number; i > 0; --i)
1827    {
1828      struct eh_region *region = cfun->eh->region_array[i];
1829      rtx seq;
1830
1831      /* Mind we don't process a region more than once.  */
1832      if (!region || region->region_number != i)
1833	continue;
1834
1835      switch (region->type)
1836	{
1837	case ERT_TRY:
1838	  /* ??? Collect the set of all non-overlapping catch handlers
1839	       all the way up the chain until blocked by a cleanup.  */
1840	  /* ??? Outer try regions can share landing pads with inner
1841	     try regions if the types are completely non-overlapping,
1842	     and there are no intervening cleanups.  */
1843
1844	  region->post_landing_pad = gen_label_rtx ();
1845
1846	  start_sequence ();
1847
1848	  emit_label (region->post_landing_pad);
1849
1850	  /* ??? It is mighty inconvenient to call back into the
1851	     switch statement generation code in expand_end_case.
1852	     Rapid prototyping sez a sequence of ifs.  */
1853	  {
1854	    struct eh_region *c;
1855	    for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1856	      {
1857		/* ??? _Unwind_ForcedUnwind wants no match here.  */
1858		if (c->u.catch.type_list == NULL)
1859		  emit_jump (c->label);
1860		else
1861		  {
1862		    /* Need for one cmp/jump per type caught. Each type
1863		       list entry has a matching entry in the filter list
1864		       (see assign_filter_values).  */
1865		    tree tp_node = c->u.catch.type_list;
1866		    tree flt_node = c->u.catch.filter_list;
1867
1868		    for (; tp_node; )
1869		      {
1870			emit_cmp_and_jump_insns
1871			  (cfun->eh->filter,
1872			   GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1873			   EQ, NULL_RTX, word_mode, 0, c->label);
1874
1875			tp_node = TREE_CHAIN (tp_node);
1876			flt_node = TREE_CHAIN (flt_node);
1877		      }
1878		  }
1879	      }
1880	  }
1881
1882	  /* We delay the generation of the _Unwind_Resume until we generate
1883	     landing pads.  We emit a marker here so as to get good control
1884	     flow data in the meantime.  */
1885	  region->resume
1886	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1887	  emit_barrier ();
1888
1889	  seq = get_insns ();
1890	  end_sequence ();
1891
1892	  emit_insns_before (seq, region->u.try.catch->label);
1893	  break;
1894
1895	case ERT_ALLOWED_EXCEPTIONS:
1896	  region->post_landing_pad = gen_label_rtx ();
1897
1898	  start_sequence ();
1899
1900	  emit_label (region->post_landing_pad);
1901
1902	  emit_cmp_and_jump_insns (cfun->eh->filter,
1903				   GEN_INT (region->u.allowed.filter),
1904				   EQ, NULL_RTX, word_mode, 0, region->label);
1905
1906	  /* We delay the generation of the _Unwind_Resume until we generate
1907	     landing pads.  We emit a marker here so as to get good control
1908	     flow data in the meantime.  */
1909	  region->resume
1910	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1911	  emit_barrier ();
1912
1913	  seq = get_insns ();
1914	  end_sequence ();
1915
1916	  emit_insns_before (seq, region->label);
1917	  break;
1918
1919	case ERT_CLEANUP:
1920	case ERT_MUST_NOT_THROW:
1921	  region->post_landing_pad = region->label;
1922	  break;
1923
1924	case ERT_CATCH:
1925	case ERT_THROW:
1926	  /* Nothing to do.  */
1927	  break;
1928
1929	default:
1930	  abort ();
1931	}
1932    }
1933}
1934
1935/* Replace RESX patterns with jumps to the next handler if any, or calls to
1936   _Unwind_Resume otherwise.  */
1937
1938static void
1939connect_post_landing_pads ()
1940{
1941  int i;
1942
1943  for (i = cfun->eh->last_region_number; i > 0; --i)
1944    {
1945      struct eh_region *region = cfun->eh->region_array[i];
1946      struct eh_region *outer;
1947      rtx seq;
1948
1949      /* Mind we don't process a region more than once.  */
1950      if (!region || region->region_number != i)
1951	continue;
1952
1953      /* If there is no RESX, or it has been deleted by flow, there's
1954	 nothing to fix up.  */
1955      if (! region->resume || INSN_DELETED_P (region->resume))
1956	continue;
1957
1958      /* Search for another landing pad in this function.  */
1959      for (outer = region->outer; outer ; outer = outer->outer)
1960	if (outer->post_landing_pad)
1961	  break;
1962
1963      start_sequence ();
1964
1965      if (outer)
1966	emit_jump (outer->post_landing_pad);
1967      else
1968	emit_library_call (unwind_resume_libfunc, LCT_THROW,
1969			   VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1970
1971      seq = get_insns ();
1972      end_sequence ();
1973      emit_insns_before (seq, region->resume);
1974      delete_insn (region->resume);
1975    }
1976}
1977
1978
1979static void
1980dw2_build_landing_pads ()
1981{
1982  int i;
1983  unsigned int j;
1984
1985  for (i = cfun->eh->last_region_number; i > 0; --i)
1986    {
1987      struct eh_region *region = cfun->eh->region_array[i];
1988      rtx seq;
1989      bool clobbers_hard_regs = false;
1990
1991      /* Mind we don't process a region more than once.  */
1992      if (!region || region->region_number != i)
1993	continue;
1994
1995      if (region->type != ERT_CLEANUP
1996	  && region->type != ERT_TRY
1997	  && region->type != ERT_ALLOWED_EXCEPTIONS)
1998	continue;
1999
2000      start_sequence ();
2001
2002      region->landing_pad = gen_label_rtx ();
2003      emit_label (region->landing_pad);
2004
2005#ifdef HAVE_exception_receiver
2006      if (HAVE_exception_receiver)
2007	emit_insn (gen_exception_receiver ());
2008      else
2009#endif
2010#ifdef HAVE_nonlocal_goto_receiver
2011	if (HAVE_nonlocal_goto_receiver)
2012	  emit_insn (gen_nonlocal_goto_receiver ());
2013	else
2014#endif
2015	  { /* Nothing */ }
2016
2017      /* If the eh_return data registers are call-saved, then we
2018	 won't have considered them clobbered from the call that
2019	 threw.  Kill them now.  */
2020      for (j = 0; ; ++j)
2021	{
2022	  unsigned r = EH_RETURN_DATA_REGNO (j);
2023	  if (r == INVALID_REGNUM)
2024	    break;
2025	  if (! call_used_regs[r])
2026	    {
2027	      emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2028	      clobbers_hard_regs = true;
2029	    }
2030	}
2031
2032      if (clobbers_hard_regs)
2033	{
2034	  /* @@@ This is a kludge.  Not all machine descriptions define a
2035	     blockage insn, but we must not allow the code we just generated
2036	     to be reordered by scheduling.  So emit an ASM_INPUT to act as
2037	     blockage insn.  */
2038	  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2039	}
2040
2041      emit_move_insn (cfun->eh->exc_ptr,
2042		      gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
2043      emit_move_insn (cfun->eh->filter,
2044		      gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
2045
2046      seq = get_insns ();
2047      end_sequence ();
2048
2049      emit_insns_before (seq, region->post_landing_pad);
2050    }
2051}
2052
2053
2054struct sjlj_lp_info
2055{
2056  int directly_reachable;
2057  int action_index;
2058  int dispatch_index;
2059  int call_site_index;
2060};
2061
2062static bool
2063sjlj_find_directly_reachable_regions (lp_info)
2064     struct sjlj_lp_info *lp_info;
2065{
2066  rtx insn;
2067  bool found_one = false;
2068
2069  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2070    {
2071      struct eh_region *region;
2072      enum reachable_code rc;
2073      tree type_thrown;
2074      rtx note;
2075
2076      if (! INSN_P (insn))
2077	continue;
2078
2079      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2080      if (!note || INTVAL (XEXP (note, 0)) <= 0)
2081	continue;
2082
2083      region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2084
2085      type_thrown = NULL_TREE;
2086      if (region->type == ERT_THROW)
2087	{
2088	  type_thrown = region->u.throw.type;
2089	  region = region->outer;
2090	}
2091
2092      /* Find the first containing region that might handle the exception.
2093	 That's the landing pad to which we will transfer control.  */
2094      rc = RNL_NOT_CAUGHT;
2095      for (; region; region = region->outer)
2096	{
2097	  rc = reachable_next_level (region, type_thrown, 0);
2098	  if (rc != RNL_NOT_CAUGHT)
2099	    break;
2100	}
2101      if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2102	{
2103	  lp_info[region->region_number].directly_reachable = 1;
2104	  found_one = true;
2105	}
2106    }
2107
2108  return found_one;
2109}
2110
2111static void
2112sjlj_assign_call_site_values (dispatch_label, lp_info)
2113     rtx dispatch_label;
2114     struct sjlj_lp_info *lp_info;
2115{
2116  htab_t ar_hash;
2117  int i, index;
2118
2119  /* First task: build the action table.  */
2120
2121  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2122  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2123
2124  for (i = cfun->eh->last_region_number; i > 0; --i)
2125    if (lp_info[i].directly_reachable)
2126      {
2127	struct eh_region *r = cfun->eh->region_array[i];
2128	r->landing_pad = dispatch_label;
2129	lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2130	if (lp_info[i].action_index != -1)
2131	  cfun->uses_eh_lsda = 1;
2132      }
2133
2134  htab_delete (ar_hash);
2135
2136  /* Next: assign dispatch values.  In dwarf2 terms, this would be the
2137     landing pad label for the region.  For sjlj though, there is one
2138     common landing pad from which we dispatch to the post-landing pads.
2139
2140     A region receives a dispatch index if it is directly reachable
2141     and requires in-function processing.  Regions that share post-landing
2142     pads may share dispatch indices.  */
2143  /* ??? Post-landing pad sharing doesn't actually happen at the moment
2144     (see build_post_landing_pads) so we don't bother checking for it.  */
2145
2146  index = 0;
2147  for (i = cfun->eh->last_region_number; i > 0; --i)
2148    if (lp_info[i].directly_reachable)
2149      lp_info[i].dispatch_index = index++;
2150
2151  /* Finally: assign call-site values.  If dwarf2 terms, this would be
2152     the region number assigned by convert_to_eh_region_ranges, but
2153     handles no-action and must-not-throw differently.  */
2154
2155  call_site_base = 1;
2156  for (i = cfun->eh->last_region_number; i > 0; --i)
2157    if (lp_info[i].directly_reachable)
2158      {
2159	int action = lp_info[i].action_index;
2160
2161	/* Map must-not-throw to otherwise unused call-site index 0.  */
2162	if (action == -2)
2163	  index = 0;
2164	/* Map no-action to otherwise unused call-site index -1.  */
2165	else if (action == -1)
2166	  index = -1;
2167	/* Otherwise, look it up in the table.  */
2168	else
2169	  index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2170
2171	lp_info[i].call_site_index = index;
2172      }
2173}
2174
2175static void
2176sjlj_mark_call_sites (lp_info)
2177     struct sjlj_lp_info *lp_info;
2178{
2179  int last_call_site = -2;
2180  rtx insn, mem;
2181
2182  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2183    {
2184      struct eh_region *region;
2185      int this_call_site;
2186      rtx note, before, p;
2187
2188      /* Reset value tracking at extended basic block boundaries.  */
2189      if (GET_CODE (insn) == CODE_LABEL)
2190	last_call_site = -2;
2191
2192      if (! INSN_P (insn))
2193	continue;
2194
2195      note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2196      if (!note)
2197	{
2198	  /* Calls (and trapping insns) without notes are outside any
2199	     exception handling region in this function.  Mark them as
2200	     no action.  */
2201	  if (GET_CODE (insn) == CALL_INSN
2202	      || (flag_non_call_exceptions
2203		  && may_trap_p (PATTERN (insn))))
2204	    this_call_site = -1;
2205	  else
2206	    continue;
2207	}
2208      else
2209	{
2210	  /* Calls that are known to not throw need not be marked.  */
2211	  if (INTVAL (XEXP (note, 0)) <= 0)
2212	    continue;
2213
2214	  region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2215	  this_call_site = lp_info[region->region_number].call_site_index;
2216	}
2217
2218      if (this_call_site == last_call_site)
2219	continue;
2220
2221      /* Don't separate a call from it's argument loads.  */
2222      before = insn;
2223      if (GET_CODE (insn) == CALL_INSN)
2224         before = find_first_parameter_load (insn, NULL_RTX);
2225
2226      start_sequence ();
2227      mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2228			    sjlj_fc_call_site_ofs);
2229      emit_move_insn (mem, GEN_INT (this_call_site));
2230      p = get_insns ();
2231      end_sequence ();
2232
2233      emit_insns_before (p, before);
2234      last_call_site = this_call_site;
2235    }
2236}
2237
2238/* Construct the SjLj_Function_Context.  */
2239
2240static void
2241sjlj_emit_function_enter (dispatch_label)
2242     rtx dispatch_label;
2243{
2244  rtx fn_begin, fc, mem, seq;
2245
2246  fc = cfun->eh->sjlj_fc;
2247
2248  start_sequence ();
2249
2250  /* We're storing this libcall's address into memory instead of
2251     calling it directly.  Thus, we must call assemble_external_libcall
2252     here, as we can not depend on emit_library_call to do it for us.  */
2253  assemble_external_libcall (eh_personality_libfunc);
2254  mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2255  emit_move_insn (mem, eh_personality_libfunc);
2256
2257  mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2258  if (cfun->uses_eh_lsda)
2259    {
2260      char buf[20];
2261      ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2262      emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2263    }
2264  else
2265    emit_move_insn (mem, const0_rtx);
2266
2267#ifdef DONT_USE_BUILTIN_SETJMP
2268  {
2269    rtx x, note;
2270    x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2271				 TYPE_MODE (integer_type_node), 1,
2272				 plus_constant (XEXP (fc, 0),
2273						sjlj_fc_jbuf_ofs), Pmode);
2274
2275    note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2276    NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2277
2278    emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2279			     TYPE_MODE (integer_type_node), 0, dispatch_label);
2280  }
2281#else
2282  expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2283			       dispatch_label);
2284#endif
2285
2286  emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2287		     1, XEXP (fc, 0), Pmode);
2288
2289  seq = get_insns ();
2290  end_sequence ();
2291
2292  /* ??? Instead of doing this at the beginning of the function,
2293     do this in a block that is at loop level 0 and dominates all
2294     can_throw_internal instructions.  */
2295
2296  for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2297    if (GET_CODE (fn_begin) == NOTE
2298	&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2299      break;
2300  emit_insns_after (seq, fn_begin);
2301}
2302
2303/* Call back from expand_function_end to know where we should put
2304   the call to unwind_sjlj_unregister_libfunc if needed.  */
2305
2306void
2307sjlj_emit_function_exit_after (after)
2308     rtx after;
2309{
2310  cfun->eh->sjlj_exit_after = after;
2311}
2312
2313static void
2314sjlj_emit_function_exit ()
2315{
2316  rtx seq;
2317
2318  start_sequence ();
2319
2320  emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2321		     1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2322
2323  seq = get_insns ();
2324  end_sequence ();
2325
2326  /* ??? Really this can be done in any block at loop level 0 that
2327     post-dominates all can_throw_internal instructions.  This is
2328     the last possible moment.  */
2329
2330  emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2331}
2332
2333static void
2334sjlj_emit_dispatch_table (dispatch_label, lp_info)
2335     rtx dispatch_label;
2336     struct sjlj_lp_info *lp_info;
2337{
2338  int i, first_reachable;
2339  rtx mem, dispatch, seq, fc;
2340
2341  fc = cfun->eh->sjlj_fc;
2342
2343  start_sequence ();
2344
2345  emit_label (dispatch_label);
2346
2347#ifndef DONT_USE_BUILTIN_SETJMP
2348  expand_builtin_setjmp_receiver (dispatch_label);
2349#endif
2350
2351  /* Load up dispatch index, exc_ptr and filter values from the
2352     function context.  */
2353  mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2354			sjlj_fc_call_site_ofs);
2355  dispatch = copy_to_reg (mem);
2356
2357  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2358  if (word_mode != Pmode)
2359    {
2360#ifdef POINTERS_EXTEND_UNSIGNED
2361      mem = convert_memory_address (Pmode, mem);
2362#else
2363      mem = convert_to_mode (Pmode, mem, 0);
2364#endif
2365    }
2366  emit_move_insn (cfun->eh->exc_ptr, mem);
2367
2368  mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2369  emit_move_insn (cfun->eh->filter, mem);
2370
2371  /* Jump to one of the directly reachable regions.  */
2372  /* ??? This really ought to be using a switch statement.  */
2373
2374  first_reachable = 0;
2375  for (i = cfun->eh->last_region_number; i > 0; --i)
2376    {
2377      if (! lp_info[i].directly_reachable)
2378	continue;
2379
2380      if (! first_reachable)
2381	{
2382	  first_reachable = i;
2383	  continue;
2384	}
2385
2386      emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2387			       EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2388			       cfun->eh->region_array[i]->post_landing_pad);
2389    }
2390
2391  seq = get_insns ();
2392  end_sequence ();
2393
2394  emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2395			   ->post_landing_pad));
2396}
2397
2398static void
2399sjlj_build_landing_pads ()
2400{
2401  struct sjlj_lp_info *lp_info;
2402
2403  lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2404					     sizeof (struct sjlj_lp_info));
2405
2406  if (sjlj_find_directly_reachable_regions (lp_info))
2407    {
2408      rtx dispatch_label = gen_label_rtx ();
2409
2410      cfun->eh->sjlj_fc
2411	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2412			      int_size_in_bytes (sjlj_fc_type_node),
2413			      TYPE_ALIGN (sjlj_fc_type_node));
2414
2415      sjlj_assign_call_site_values (dispatch_label, lp_info);
2416      sjlj_mark_call_sites (lp_info);
2417
2418      sjlj_emit_function_enter (dispatch_label);
2419      sjlj_emit_dispatch_table (dispatch_label, lp_info);
2420      sjlj_emit_function_exit ();
2421    }
2422
2423  free (lp_info);
2424}
2425
2426void
2427finish_eh_generation ()
2428{
2429  /* Nothing to do if no regions created.  */
2430  if (cfun->eh->region_tree == NULL)
2431    return;
2432
2433  /* The object here is to provide find_basic_blocks with detailed
2434     information (via reachable_handlers) on how exception control
2435     flows within the function.  In this first pass, we can include
2436     type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2437     regions, and hope that it will be useful in deleting unreachable
2438     handlers.  Subsequently, we will generate landing pads which will
2439     connect many of the handlers, and then type information will not
2440     be effective.  Still, this is a win over previous implementations.  */
2441
2442  rebuild_jump_labels (get_insns ());
2443  find_basic_blocks (get_insns (), max_reg_num (), 0);
2444  cleanup_cfg (CLEANUP_PRE_LOOP);
2445
2446  /* These registers are used by the landing pads.  Make sure they
2447     have been generated.  */
2448  get_exception_pointer (cfun);
2449  get_exception_filter (cfun);
2450
2451  /* Construct the landing pads.  */
2452
2453  assign_filter_values ();
2454  build_post_landing_pads ();
2455  connect_post_landing_pads ();
2456  if (USING_SJLJ_EXCEPTIONS)
2457    sjlj_build_landing_pads ();
2458  else
2459    dw2_build_landing_pads ();
2460
2461  cfun->eh->built_landing_pads = 1;
2462
2463  /* We've totally changed the CFG.  Start over.  */
2464  find_exception_handler_labels ();
2465  rebuild_jump_labels (get_insns ());
2466  find_basic_blocks (get_insns (), max_reg_num (), 0);
2467  cleanup_cfg (CLEANUP_PRE_LOOP);
2468}
2469
2470/* This section handles removing dead code for flow.  */
2471
2472/* Remove LABEL from the exception_handler_labels list.  */
2473
2474static void
2475remove_exception_handler_label (label)
2476     rtx label;
2477{
2478  rtx *pl, l;
2479
2480  /* If exception_handler_labels was not built yet,
2481     there is nothing to do.  */
2482  if (exception_handler_labels == NULL)
2483    return;
2484
2485  for (pl = &exception_handler_labels, l = *pl;
2486       XEXP (l, 0) != label;
2487       pl = &XEXP (l, 1), l = *pl)
2488    continue;
2489
2490  *pl = XEXP (l, 1);
2491  free_EXPR_LIST_node (l);
2492}
2493
2494/* Splice REGION from the region tree etc.  */
2495
2496static void
2497remove_eh_handler (region)
2498     struct eh_region *region;
2499{
2500  struct eh_region **pp, *p;
2501  rtx lab;
2502  int i;
2503
2504  /* For the benefit of efficiently handling REG_EH_REGION notes,
2505     replace this region in the region array with its containing
2506     region.  Note that previous region deletions may result in
2507     multiple copies of this region in the array, so we have to
2508     search the whole thing.  */
2509  for (i = cfun->eh->last_region_number; i > 0; --i)
2510    if (cfun->eh->region_array[i] == region)
2511      cfun->eh->region_array[i] = region->outer;
2512
2513  if (cfun->eh->built_landing_pads)
2514    lab = region->landing_pad;
2515  else
2516    lab = region->label;
2517  if (lab)
2518    remove_exception_handler_label (lab);
2519
2520  if (region->outer)
2521    pp = &region->outer->inner;
2522  else
2523    pp = &cfun->eh->region_tree;
2524  for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2525    continue;
2526
2527  if (region->inner)
2528    {
2529      for (p = region->inner; p->next_peer ; p = p->next_peer)
2530	p->outer = region->outer;
2531      p->next_peer = region->next_peer;
2532      p->outer = region->outer;
2533      *pp = region->inner;
2534    }
2535  else
2536    *pp = region->next_peer;
2537
2538  if (region->type == ERT_CATCH)
2539    {
2540      struct eh_region *try, *next, *prev;
2541
2542      for (try = region->next_peer;
2543	   try->type == ERT_CATCH;
2544	   try = try->next_peer)
2545	continue;
2546      if (try->type != ERT_TRY)
2547	abort ();
2548
2549      next = region->u.catch.next_catch;
2550      prev = region->u.catch.prev_catch;
2551
2552      if (next)
2553	next->u.catch.prev_catch = prev;
2554      else
2555	try->u.try.last_catch = prev;
2556      if (prev)
2557	prev->u.catch.next_catch = next;
2558      else
2559	{
2560	  try->u.try.catch = next;
2561	  if (! next)
2562	    remove_eh_handler (try);
2563	}
2564    }
2565
2566  free (region);
2567}
2568
2569/* LABEL heads a basic block that is about to be deleted.  If this
2570   label corresponds to an exception region, we may be able to
2571   delete the region.  */
2572
2573void
2574maybe_remove_eh_handler (label)
2575     rtx label;
2576{
2577  int i;
2578
2579  /* ??? After generating landing pads, it's not so simple to determine
2580     if the region data is completely unused.  One must examine the
2581     landing pad and the post landing pad, and whether an inner try block
2582     is referencing the catch handlers directly.  */
2583  if (cfun->eh->built_landing_pads)
2584    return;
2585
2586  for (i = cfun->eh->last_region_number; i > 0; --i)
2587    {
2588      struct eh_region *region = cfun->eh->region_array[i];
2589      if (region && region->label == label)
2590	{
2591	  /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2592	     because there is no path to the fallback call to terminate.
2593	     But the region continues to affect call-site data until there
2594	     are no more contained calls, which we don't see here.  */
2595	  if (region->type == ERT_MUST_NOT_THROW)
2596	    {
2597	      remove_exception_handler_label (region->label);
2598	      region->label = NULL_RTX;
2599	    }
2600	  else
2601	    remove_eh_handler (region);
2602	  break;
2603	}
2604    }
2605}
2606
2607
2608/* This section describes CFG exception edges for flow.  */
2609
2610/* For communicating between calls to reachable_next_level.  */
2611struct reachable_info
2612{
2613  tree types_caught;
2614  tree types_allowed;
2615  rtx handlers;
2616};
2617
2618/* A subroutine of reachable_next_level.  Return true if TYPE, or a
2619   base class of TYPE, is in HANDLED.  */
2620
2621static int
2622check_handled (handled, type)
2623     tree handled, type;
2624{
2625  tree t;
2626
2627  /* We can check for exact matches without front-end help.  */
2628  if (! lang_eh_type_covers)
2629    {
2630      for (t = handled; t ; t = TREE_CHAIN (t))
2631	if (TREE_VALUE (t) == type)
2632	  return 1;
2633    }
2634  else
2635    {
2636      for (t = handled; t ; t = TREE_CHAIN (t))
2637	if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2638	  return 1;
2639    }
2640
2641  return 0;
2642}
2643
2644/* A subroutine of reachable_next_level.  If we are collecting a list
2645   of handlers, add one.  After landing pad generation, reference
2646   it instead of the handlers themselves.  Further, the handlers are
2647   all wired together, so by referencing one, we've got them all.
2648   Before landing pad generation we reference each handler individually.
2649
2650   LP_REGION contains the landing pad; REGION is the handler.  */
2651
2652static void
2653add_reachable_handler (info, lp_region, region)
2654     struct reachable_info *info;
2655     struct eh_region *lp_region;
2656     struct eh_region *region;
2657{
2658  if (! info)
2659    return;
2660
2661  if (cfun->eh->built_landing_pads)
2662    {
2663      if (! info->handlers)
2664	info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2665    }
2666  else
2667    info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2668}
2669
2670/* Process one level of exception regions for reachability.
2671   If TYPE_THROWN is non-null, then it is the *exact* type being
2672   propagated.  If INFO is non-null, then collect handler labels
2673   and caught/allowed type information between invocations.  */
2674
2675static enum reachable_code
2676reachable_next_level (region, type_thrown, info)
2677     struct eh_region *region;
2678     tree type_thrown;
2679     struct reachable_info *info;
2680{
2681  switch (region->type)
2682    {
2683    case ERT_CLEANUP:
2684      /* Before landing-pad generation, we model control flow
2685	 directly to the individual handlers.  In this way we can
2686	 see that catch handler types may shadow one another.  */
2687      add_reachable_handler (info, region, region);
2688      return RNL_MAYBE_CAUGHT;
2689
2690    case ERT_TRY:
2691      {
2692	struct eh_region *c;
2693	enum reachable_code ret = RNL_NOT_CAUGHT;
2694
2695	for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2696	  {
2697	    /* A catch-all handler ends the search.  */
2698	    /* ??? _Unwind_ForcedUnwind will want outer cleanups
2699	       to be run as well.  */
2700	    if (c->u.catch.type_list == NULL)
2701	      {
2702		add_reachable_handler (info, region, c);
2703		return RNL_CAUGHT;
2704	      }
2705
2706	    if (type_thrown)
2707	      {
2708		/* If we have at least one type match, end the search.  */
2709		tree tp_node = c->u.catch.type_list;
2710
2711		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2712		  {
2713		    tree type = TREE_VALUE (tp_node);
2714
2715		    if (type == type_thrown
2716			|| (lang_eh_type_covers
2717			    && (*lang_eh_type_covers) (type, type_thrown)))
2718		      {
2719			add_reachable_handler (info, region, c);
2720			return RNL_CAUGHT;
2721		      }
2722		  }
2723
2724		/* If we have definitive information of a match failure,
2725		   the catch won't trigger.  */
2726		if (lang_eh_type_covers)
2727		  return RNL_NOT_CAUGHT;
2728	      }
2729
2730	    /* At this point, we either don't know what type is thrown or
2731	       don't have front-end assistance to help deciding if it is
2732	       covered by one of the types in the list for this region.
2733
2734	       We'd then like to add this region to the list of reachable
2735	       handlers since it is indeed potentially reachable based on the
2736	       information we have.
2737
2738	       Actually, this handler is for sure not reachable if all the
2739	       types it matches have already been caught. That is, it is only
2740	       potentially reachable if at least one of the types it catches
2741	       has not been previously caught.  */
2742
2743	    if (! info)
2744	      ret = RNL_MAYBE_CAUGHT;
2745	    else
2746	      {
2747		tree tp_node = c->u.catch.type_list;
2748		bool maybe_reachable = false;
2749
2750		/* Compute the potential reachability of this handler and
2751		   update the list of types caught at the same time.  */
2752		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2753		  {
2754		    tree type = TREE_VALUE (tp_node);
2755
2756		    if (! check_handled (info->types_caught, type))
2757		      {
2758			info->types_caught
2759			  = tree_cons (NULL, type, info->types_caught);
2760
2761			maybe_reachable = true;
2762		      }
2763		  }
2764
2765		if (maybe_reachable)
2766		  {
2767		    add_reachable_handler (info, region, c);
2768
2769		    /* ??? If the catch type is a base class of every allowed
2770		       type, then we know we can stop the search.  */
2771		    ret = RNL_MAYBE_CAUGHT;
2772		  }
2773	      }
2774	  }
2775
2776	return ret;
2777      }
2778
2779    case ERT_ALLOWED_EXCEPTIONS:
2780      /* An empty list of types definitely ends the search.  */
2781      if (region->u.allowed.type_list == NULL_TREE)
2782	{
2783	  add_reachable_handler (info, region, region);
2784	  return RNL_CAUGHT;
2785	}
2786
2787      /* Collect a list of lists of allowed types for use in detecting
2788	 when a catch may be transformed into a catch-all.  */
2789      if (info)
2790	info->types_allowed = tree_cons (NULL_TREE,
2791					 region->u.allowed.type_list,
2792					 info->types_allowed);
2793
2794      /* If we have definitive information about the type hierarchy,
2795	 then we can tell if the thrown type will pass through the
2796	 filter.  */
2797      if (type_thrown && lang_eh_type_covers)
2798	{
2799	  if (check_handled (region->u.allowed.type_list, type_thrown))
2800	    return RNL_NOT_CAUGHT;
2801	  else
2802	    {
2803	      add_reachable_handler (info, region, region);
2804	      return RNL_CAUGHT;
2805	    }
2806	}
2807
2808      add_reachable_handler (info, region, region);
2809      return RNL_MAYBE_CAUGHT;
2810
2811    case ERT_CATCH:
2812      /* Catch regions are handled by their controling try region.  */
2813      return RNL_NOT_CAUGHT;
2814
2815    case ERT_MUST_NOT_THROW:
2816      /* Here we end our search, since no exceptions may propagate.
2817	 If we've touched down at some landing pad previous, then the
2818	 explicit function call we generated may be used.  Otherwise
2819	 the call is made by the runtime.  */
2820      if (info && info->handlers)
2821	{
2822	  add_reachable_handler (info, region, region);
2823          return RNL_CAUGHT;
2824	}
2825      else
2826	return RNL_BLOCKED;
2827
2828    case ERT_THROW:
2829    case ERT_FIXUP:
2830    case ERT_UNKNOWN:
2831      /* Shouldn't see these here.  */
2832      break;
2833    }
2834
2835  abort ();
2836}
2837
2838/* Retrieve a list of labels of exception handlers which can be
2839   reached by a given insn.  */
2840
2841rtx
2842reachable_handlers (insn)
2843     rtx insn;
2844{
2845  struct reachable_info info;
2846  struct eh_region *region;
2847  tree type_thrown;
2848  int region_number;
2849
2850  if (GET_CODE (insn) == JUMP_INSN
2851      && GET_CODE (PATTERN (insn)) == RESX)
2852    region_number = XINT (PATTERN (insn), 0);
2853  else
2854    {
2855      rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2856      if (!note || INTVAL (XEXP (note, 0)) <= 0)
2857	return NULL;
2858      region_number = INTVAL (XEXP (note, 0));
2859    }
2860
2861  memset (&info, 0, sizeof (info));
2862
2863  region = cfun->eh->region_array[region_number];
2864
2865  type_thrown = NULL_TREE;
2866  if (GET_CODE (insn) == JUMP_INSN
2867      && GET_CODE (PATTERN (insn)) == RESX)
2868    {
2869      /* A RESX leaves a region instead of entering it.  Thus the
2870	 region itself may have been deleted out from under us.  */
2871      if (region == NULL)
2872	return NULL;
2873      region = region->outer;
2874    }
2875  else if (region->type == ERT_THROW)
2876    {
2877      type_thrown = region->u.throw.type;
2878      region = region->outer;
2879    }
2880
2881  for (; region; region = region->outer)
2882    if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2883      break;
2884
2885  return info.handlers;
2886}
2887
2888/* Determine if the given INSN can throw an exception that is caught
2889   within the function.  */
2890
2891bool
2892can_throw_internal (insn)
2893     rtx insn;
2894{
2895  struct eh_region *region;
2896  tree type_thrown;
2897  rtx note;
2898
2899  if (! INSN_P (insn))
2900    return false;
2901
2902  if (GET_CODE (insn) == INSN
2903      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2904    insn = XVECEXP (PATTERN (insn), 0, 0);
2905
2906  if (GET_CODE (insn) == CALL_INSN
2907      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2908    {
2909      int i;
2910      for (i = 0; i < 3; ++i)
2911	{
2912	  rtx sub = XEXP (PATTERN (insn), i);
2913	  for (; sub ; sub = NEXT_INSN (sub))
2914	    if (can_throw_internal (sub))
2915	      return true;
2916	}
2917      return false;
2918    }
2919
2920  /* Every insn that might throw has an EH_REGION note.  */
2921  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2922  if (!note || INTVAL (XEXP (note, 0)) <= 0)
2923    return false;
2924
2925  region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2926
2927  type_thrown = NULL_TREE;
2928  if (region->type == ERT_THROW)
2929    {
2930      type_thrown = region->u.throw.type;
2931      region = region->outer;
2932    }
2933
2934  /* If this exception is ignored by each and every containing region,
2935     then control passes straight out.  The runtime may handle some
2936     regions, which also do not require processing internally.  */
2937  for (; region; region = region->outer)
2938    {
2939      enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2940      if (how == RNL_BLOCKED)
2941	return false;
2942      if (how != RNL_NOT_CAUGHT)
2943        return true;
2944    }
2945
2946  return false;
2947}
2948
2949/* Determine if the given INSN can throw an exception that is
2950   visible outside the function.  */
2951
2952bool
2953can_throw_external (insn)
2954     rtx insn;
2955{
2956  struct eh_region *region;
2957  tree type_thrown;
2958  rtx note;
2959
2960  if (! INSN_P (insn))
2961    return false;
2962
2963  if (GET_CODE (insn) == INSN
2964      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2965    insn = XVECEXP (PATTERN (insn), 0, 0);
2966
2967  if (GET_CODE (insn) == CALL_INSN
2968      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2969    {
2970      int i;
2971      for (i = 0; i < 3; ++i)
2972	{
2973	  rtx sub = XEXP (PATTERN (insn), i);
2974	  for (; sub ; sub = NEXT_INSN (sub))
2975	    if (can_throw_external (sub))
2976	      return true;
2977	}
2978      return false;
2979    }
2980
2981  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2982  if (!note)
2983    {
2984      /* Calls (and trapping insns) without notes are outside any
2985	 exception handling region in this function.  We have to
2986	 assume it might throw.  Given that the front end and middle
2987	 ends mark known NOTHROW functions, this isn't so wildly
2988	 inaccurate.  */
2989      return (GET_CODE (insn) == CALL_INSN
2990	      || (flag_non_call_exceptions
2991		  && may_trap_p (PATTERN (insn))));
2992    }
2993  if (INTVAL (XEXP (note, 0)) <= 0)
2994    return false;
2995
2996  region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2997
2998  type_thrown = NULL_TREE;
2999  if (region->type == ERT_THROW)
3000    {
3001      type_thrown = region->u.throw.type;
3002      region = region->outer;
3003    }
3004
3005  /* If the exception is caught or blocked by any containing region,
3006     then it is not seen by any calling function.  */
3007  for (; region ; region = region->outer)
3008    if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3009      return false;
3010
3011  return true;
3012}
3013
3014/* True if nothing in this function can throw outside this function.  */
3015
3016bool
3017nothrow_function_p ()
3018{
3019  rtx insn;
3020
3021  if (! flag_exceptions)
3022    return true;
3023
3024  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3025    if (can_throw_external (insn))
3026      return false;
3027  for (insn = current_function_epilogue_delay_list; insn;
3028       insn = XEXP (insn, 1))
3029    if (can_throw_external (insn))
3030      return false;
3031
3032  return true;
3033}
3034
3035
3036/* Various hooks for unwind library.  */
3037
3038/* Do any necessary initialization to access arbitrary stack frames.
3039   On the SPARC, this means flushing the register windows.  */
3040
3041void
3042expand_builtin_unwind_init ()
3043{
3044  /* Set this so all the registers get saved in our frame; we need to be
3045     able to copy the saved values for any registers from frames we unwind.  */
3046  current_function_has_nonlocal_label = 1;
3047
3048#ifdef SETUP_FRAME_ADDRESSES
3049  SETUP_FRAME_ADDRESSES ();
3050#endif
3051}
3052
3053rtx
3054expand_builtin_eh_return_data_regno (arglist)
3055     tree arglist;
3056{
3057  tree which = TREE_VALUE (arglist);
3058  unsigned HOST_WIDE_INT iwhich;
3059
3060  if (TREE_CODE (which) != INTEGER_CST)
3061    {
3062      error ("argument of `__builtin_eh_return_regno' must be constant");
3063      return constm1_rtx;
3064    }
3065
3066  iwhich = tree_low_cst (which, 1);
3067  iwhich = EH_RETURN_DATA_REGNO (iwhich);
3068  if (iwhich == INVALID_REGNUM)
3069    return constm1_rtx;
3070
3071#ifdef DWARF_FRAME_REGNUM
3072  iwhich = DWARF_FRAME_REGNUM (iwhich);
3073#else
3074  iwhich = DBX_REGISTER_NUMBER (iwhich);
3075#endif
3076
3077  return GEN_INT (iwhich);
3078}
3079
3080/* Given a value extracted from the return address register or stack slot,
3081   return the actual address encoded in that value.  */
3082
3083rtx
3084expand_builtin_extract_return_addr (addr_tree)
3085     tree addr_tree;
3086{
3087  rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3088
3089  /* First mask out any unwanted bits.  */
3090#ifdef MASK_RETURN_ADDR
3091  expand_and (addr, MASK_RETURN_ADDR, addr);
3092#endif
3093
3094  /* Then adjust to find the real return address.  */
3095#if defined (RETURN_ADDR_OFFSET)
3096  addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3097#endif
3098
3099  return addr;
3100}
3101
3102/* Given an actual address in addr_tree, do any necessary encoding
3103   and return the value to be stored in the return address register or
3104   stack slot so the epilogue will return to that address.  */
3105
3106rtx
3107expand_builtin_frob_return_addr (addr_tree)
3108     tree addr_tree;
3109{
3110  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3111
3112#ifdef POINTERS_EXTEND_UNSIGNED
3113  if (GET_MODE (addr) != Pmode)
3114    addr = convert_memory_address (Pmode, addr);
3115#endif
3116
3117#ifdef RETURN_ADDR_OFFSET
3118  addr = force_reg (Pmode, addr);
3119  addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3120#endif
3121
3122  return addr;
3123}
3124
3125/* Set up the epilogue with the magic bits we'll need to return to the
3126   exception handler.  */
3127
3128void
3129expand_builtin_eh_return (stackadj_tree, handler_tree)
3130    tree stackadj_tree, handler_tree;
3131{
3132  rtx stackadj, handler;
3133
3134  stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3135  handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3136
3137#ifdef POINTERS_EXTEND_UNSIGNED
3138  if (GET_MODE (stackadj) != Pmode)
3139    stackadj = convert_memory_address (Pmode, stackadj);
3140
3141  if (GET_MODE (handler) != Pmode)
3142    handler = convert_memory_address (Pmode, handler);
3143#endif
3144
3145  if (! cfun->eh->ehr_label)
3146    {
3147      cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3148      cfun->eh->ehr_handler = copy_to_reg (handler);
3149      cfun->eh->ehr_label = gen_label_rtx ();
3150    }
3151  else
3152    {
3153      if (stackadj != cfun->eh->ehr_stackadj)
3154	emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3155      if (handler != cfun->eh->ehr_handler)
3156	emit_move_insn (cfun->eh->ehr_handler, handler);
3157    }
3158
3159  emit_jump (cfun->eh->ehr_label);
3160}
3161
3162void
3163expand_eh_return ()
3164{
3165  rtx sa, ra, around_label;
3166
3167  if (! cfun->eh->ehr_label)
3168    return;
3169
3170  sa = EH_RETURN_STACKADJ_RTX;
3171  if (! sa)
3172    {
3173      error ("__builtin_eh_return not supported on this target");
3174      return;
3175    }
3176
3177  current_function_calls_eh_return = 1;
3178
3179  around_label = gen_label_rtx ();
3180  emit_move_insn (sa, const0_rtx);
3181  emit_jump (around_label);
3182
3183  emit_label (cfun->eh->ehr_label);
3184  clobber_return_register ();
3185
3186#ifdef HAVE_eh_return
3187  if (HAVE_eh_return)
3188    emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3189  else
3190#endif
3191    {
3192      ra = EH_RETURN_HANDLER_RTX;
3193      if (! ra)
3194	{
3195	  error ("__builtin_eh_return not supported on this target");
3196	  ra = gen_reg_rtx (Pmode);
3197	}
3198
3199      emit_move_insn (sa, cfun->eh->ehr_stackadj);
3200      emit_move_insn (ra, cfun->eh->ehr_handler);
3201    }
3202
3203  emit_label (around_label);
3204}
3205
3206/* In the following functions, we represent entries in the action table
3207   as 1-based indices.  Special cases are:
3208
3209	 0:	null action record, non-null landing pad; implies cleanups
3210	-1:	null action record, null landing pad; implies no action
3211	-2:	no call-site entry; implies must_not_throw
3212	-3:	we have yet to process outer regions
3213
3214   Further, no special cases apply to the "next" field of the record.
3215   For next, 0 means end of list.  */
3216
3217struct action_record
3218{
3219  int offset;
3220  int filter;
3221  int next;
3222};
3223
3224static int
3225action_record_eq (pentry, pdata)
3226     const PTR pentry;
3227     const PTR pdata;
3228{
3229  const struct action_record *entry = (const struct action_record *) pentry;
3230  const struct action_record *data = (const struct action_record *) pdata;
3231  return entry->filter == data->filter && entry->next == data->next;
3232}
3233
3234static hashval_t
3235action_record_hash (pentry)
3236     const PTR pentry;
3237{
3238  const struct action_record *entry = (const struct action_record *) pentry;
3239  return entry->next * 1009 + entry->filter;
3240}
3241
3242static int
3243add_action_record (ar_hash, filter, next)
3244     htab_t ar_hash;
3245     int filter, next;
3246{
3247  struct action_record **slot, *new, tmp;
3248
3249  tmp.filter = filter;
3250  tmp.next = next;
3251  slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3252
3253  if ((new = *slot) == NULL)
3254    {
3255      new = (struct action_record *) xmalloc (sizeof (*new));
3256      new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3257      new->filter = filter;
3258      new->next = next;
3259      *slot = new;
3260
3261      /* The filter value goes in untouched.  The link to the next
3262	 record is a "self-relative" byte offset, or zero to indicate
3263	 that there is no next record.  So convert the absolute 1 based
3264	 indices we've been carrying around into a displacement.  */
3265
3266      push_sleb128 (&cfun->eh->action_record_data, filter);
3267      if (next)
3268	next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3269      push_sleb128 (&cfun->eh->action_record_data, next);
3270    }
3271
3272  return new->offset;
3273}
3274
3275static int
3276collect_one_action_chain (ar_hash, region)
3277     htab_t ar_hash;
3278     struct eh_region *region;
3279{
3280  struct eh_region *c;
3281  int next;
3282
3283  /* If we've reached the top of the region chain, then we have
3284     no actions, and require no landing pad.  */
3285  if (region == NULL)
3286    return -1;
3287
3288  switch (region->type)
3289    {
3290    case ERT_CLEANUP:
3291      /* A cleanup adds a zero filter to the beginning of the chain, but
3292	 there are special cases to look out for.  If there are *only*
3293	 cleanups along a path, then it compresses to a zero action.
3294	 Further, if there are multiple cleanups along a path, we only
3295	 need to represent one of them, as that is enough to trigger
3296	 entry to the landing pad at runtime.  */
3297      next = collect_one_action_chain (ar_hash, region->outer);
3298      if (next <= 0)
3299	return 0;
3300      for (c = region->outer; c ; c = c->outer)
3301	if (c->type == ERT_CLEANUP)
3302	  return next;
3303      return add_action_record (ar_hash, 0, next);
3304
3305    case ERT_TRY:
3306      /* Process the associated catch regions in reverse order.
3307	 If there's a catch-all handler, then we don't need to
3308	 search outer regions.  Use a magic -3 value to record
3309	 that we haven't done the outer search.  */
3310      next = -3;
3311      for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3312	{
3313	  if (c->u.catch.type_list == NULL)
3314	    {
3315	      /* Retrieve the filter from the head of the filter list
3316		 where we have stored it (see assign_filter_values).  */
3317	      int filter
3318		= TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3319
3320	      next = add_action_record (ar_hash, filter, 0);
3321	    }
3322	  else
3323	    {
3324	      /* Once the outer search is done, trigger an action record for
3325                 each filter we have.  */
3326	      tree flt_node;
3327
3328	      if (next == -3)
3329		{
3330		  next = collect_one_action_chain (ar_hash, region->outer);
3331
3332		  /* If there is no next action, terminate the chain.  */
3333		  if (next == -1)
3334		    next = 0;
3335		  /* If all outer actions are cleanups or must_not_throw,
3336		     we'll have no action record for it, since we had wanted
3337		     to encode these states in the call-site record directly.
3338		     Add a cleanup action to the chain to catch these.  */
3339		  else if (next <= 0)
3340		    next = add_action_record (ar_hash, 0, 0);
3341		}
3342
3343	      flt_node = c->u.catch.filter_list;
3344	      for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3345		{
3346		  int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3347		  next = add_action_record (ar_hash, filter, next);
3348		}
3349	    }
3350	}
3351      return next;
3352
3353    case ERT_ALLOWED_EXCEPTIONS:
3354      /* An exception specification adds its filter to the
3355	 beginning of the chain.  */
3356      next = collect_one_action_chain (ar_hash, region->outer);
3357      return add_action_record (ar_hash, region->u.allowed.filter,
3358				next < 0 ? 0 : next);
3359
3360    case ERT_MUST_NOT_THROW:
3361      /* A must-not-throw region with no inner handlers or cleanups
3362	 requires no call-site entry.  Note that this differs from
3363	 the no handler or cleanup case in that we do require an lsda
3364	 to be generated.  Return a magic -2 value to record this.  */
3365      return -2;
3366
3367    case ERT_CATCH:
3368    case ERT_THROW:
3369      /* CATCH regions are handled in TRY above.  THROW regions are
3370	 for optimization information only and produce no output.  */
3371      return collect_one_action_chain (ar_hash, region->outer);
3372
3373    default:
3374      abort ();
3375    }
3376}
3377
3378static int
3379add_call_site (landing_pad, action)
3380     rtx landing_pad;
3381     int action;
3382{
3383  struct call_site_record *data = cfun->eh->call_site_data;
3384  int used = cfun->eh->call_site_data_used;
3385  int size = cfun->eh->call_site_data_size;
3386
3387  if (used >= size)
3388    {
3389      size = (size ? size * 2 : 64);
3390      data = (struct call_site_record *)
3391	xrealloc (data, sizeof (*data) * size);
3392      cfun->eh->call_site_data = data;
3393      cfun->eh->call_site_data_size = size;
3394    }
3395
3396  data[used].landing_pad = landing_pad;
3397  data[used].action = action;
3398
3399  cfun->eh->call_site_data_used = used + 1;
3400
3401  return used + call_site_base;
3402}
3403
3404/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3405   The new note numbers will not refer to region numbers, but
3406   instead to call site entries.  */
3407
3408void
3409convert_to_eh_region_ranges ()
3410{
3411  rtx insn, iter, note;
3412  htab_t ar_hash;
3413  int last_action = -3;
3414  rtx last_action_insn = NULL_RTX;
3415  rtx last_landing_pad = NULL_RTX;
3416  rtx first_no_action_insn = NULL_RTX;
3417  int call_site = 0;
3418
3419  if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3420    return;
3421
3422  VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3423
3424  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3425
3426  for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3427    if (INSN_P (iter))
3428      {
3429	struct eh_region *region;
3430	int this_action;
3431	rtx this_landing_pad;
3432
3433	insn = iter;
3434	if (GET_CODE (insn) == INSN
3435	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
3436	  insn = XVECEXP (PATTERN (insn), 0, 0);
3437
3438	note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3439	if (!note)
3440	  {
3441	    if (! (GET_CODE (insn) == CALL_INSN
3442		   || (flag_non_call_exceptions
3443		       && may_trap_p (PATTERN (insn)))))
3444	      continue;
3445	    this_action = -1;
3446	    region = NULL;
3447	  }
3448	else
3449	  {
3450	    if (INTVAL (XEXP (note, 0)) <= 0)
3451	      continue;
3452	    region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3453	    this_action = collect_one_action_chain (ar_hash, region);
3454	  }
3455
3456	/* Existence of catch handlers, or must-not-throw regions
3457	   implies that an lsda is needed (even if empty).  */
3458	if (this_action != -1)
3459	  cfun->uses_eh_lsda = 1;
3460
3461	/* Delay creation of region notes for no-action regions
3462	   until we're sure that an lsda will be required.  */
3463	else if (last_action == -3)
3464	  {
3465	    first_no_action_insn = iter;
3466	    last_action = -1;
3467	  }
3468
3469	/* Cleanups and handlers may share action chains but not
3470	   landing pads.  Collect the landing pad for this region.  */
3471	if (this_action >= 0)
3472	  {
3473	    struct eh_region *o;
3474	    for (o = region; ! o->landing_pad ; o = o->outer)
3475	      continue;
3476	    this_landing_pad = o->landing_pad;
3477	  }
3478	else
3479	  this_landing_pad = NULL_RTX;
3480
3481	/* Differing actions or landing pads implies a change in call-site
3482	   info, which implies some EH_REGION note should be emitted.  */
3483	if (last_action != this_action
3484	    || last_landing_pad != this_landing_pad)
3485	  {
3486	    /* If we'd not seen a previous action (-3) or the previous
3487	       action was must-not-throw (-2), then we do not need an
3488	       end note.  */
3489	    if (last_action >= -1)
3490	      {
3491		/* If we delayed the creation of the begin, do it now.  */
3492		if (first_no_action_insn)
3493		  {
3494		    call_site = add_call_site (NULL_RTX, 0);
3495		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3496					     first_no_action_insn);
3497		    NOTE_EH_HANDLER (note) = call_site;
3498		    first_no_action_insn = NULL_RTX;
3499		  }
3500
3501		note = emit_note_after (NOTE_INSN_EH_REGION_END,
3502					last_action_insn);
3503		NOTE_EH_HANDLER (note) = call_site;
3504	      }
3505
3506	    /* If the new action is must-not-throw, then no region notes
3507	       are created.  */
3508	    if (this_action >= -1)
3509	      {
3510		call_site = add_call_site (this_landing_pad,
3511					   this_action < 0 ? 0 : this_action);
3512		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3513		NOTE_EH_HANDLER (note) = call_site;
3514	      }
3515
3516	    last_action = this_action;
3517	    last_landing_pad = this_landing_pad;
3518	  }
3519	last_action_insn = iter;
3520      }
3521
3522  if (last_action >= -1 && ! first_no_action_insn)
3523    {
3524      note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3525      NOTE_EH_HANDLER (note) = call_site;
3526    }
3527
3528  htab_delete (ar_hash);
3529}
3530
3531
3532static void
3533push_uleb128 (data_area, value)
3534     varray_type *data_area;
3535     unsigned int value;
3536{
3537  do
3538    {
3539      unsigned char byte = value & 0x7f;
3540      value >>= 7;
3541      if (value)
3542	byte |= 0x80;
3543      VARRAY_PUSH_UCHAR (*data_area, byte);
3544    }
3545  while (value);
3546}
3547
3548static void
3549push_sleb128 (data_area, value)
3550     varray_type *data_area;
3551     int value;
3552{
3553  unsigned char byte;
3554  int more;
3555
3556  do
3557    {
3558      byte = value & 0x7f;
3559      value >>= 7;
3560      more = ! ((value == 0 && (byte & 0x40) == 0)
3561		|| (value == -1 && (byte & 0x40) != 0));
3562      if (more)
3563	byte |= 0x80;
3564      VARRAY_PUSH_UCHAR (*data_area, byte);
3565    }
3566  while (more);
3567}
3568
3569
3570#ifndef HAVE_AS_LEB128
3571static int
3572dw2_size_of_call_site_table ()
3573{
3574  int n = cfun->eh->call_site_data_used;
3575  int size = n * (4 + 4 + 4);
3576  int i;
3577
3578  for (i = 0; i < n; ++i)
3579    {
3580      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3581      size += size_of_uleb128 (cs->action);
3582    }
3583
3584  return size;
3585}
3586
3587static int
3588sjlj_size_of_call_site_table ()
3589{
3590  int n = cfun->eh->call_site_data_used;
3591  int size = 0;
3592  int i;
3593
3594  for (i = 0; i < n; ++i)
3595    {
3596      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3597      size += size_of_uleb128 (INTVAL (cs->landing_pad));
3598      size += size_of_uleb128 (cs->action);
3599    }
3600
3601  return size;
3602}
3603#endif
3604
3605static void
3606dw2_output_call_site_table ()
3607{
3608  const char *const function_start_lab
3609    = IDENTIFIER_POINTER (current_function_func_begin_label);
3610  int n = cfun->eh->call_site_data_used;
3611  int i;
3612
3613  for (i = 0; i < n; ++i)
3614    {
3615      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3616      char reg_start_lab[32];
3617      char reg_end_lab[32];
3618      char landing_pad_lab[32];
3619
3620      ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3621      ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3622
3623      if (cs->landing_pad)
3624	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3625				     CODE_LABEL_NUMBER (cs->landing_pad));
3626
3627      /* ??? Perhaps use insn length scaling if the assembler supports
3628	 generic arithmetic.  */
3629      /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3630	 data4 if the function is small enough.  */
3631#ifdef HAVE_AS_LEB128
3632      dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3633				    "region %d start", i);
3634      dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3635				    "length");
3636      if (cs->landing_pad)
3637	dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3638				      "landing pad");
3639      else
3640	dw2_asm_output_data_uleb128 (0, "landing pad");
3641#else
3642      dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3643			    "region %d start", i);
3644      dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3645      if (cs->landing_pad)
3646	dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3647			      "landing pad");
3648      else
3649	dw2_asm_output_data (4, 0, "landing pad");
3650#endif
3651      dw2_asm_output_data_uleb128 (cs->action, "action");
3652    }
3653
3654  call_site_base += n;
3655}
3656
3657static void
3658sjlj_output_call_site_table ()
3659{
3660  int n = cfun->eh->call_site_data_used;
3661  int i;
3662
3663  for (i = 0; i < n; ++i)
3664    {
3665      struct call_site_record *cs = &cfun->eh->call_site_data[i];
3666
3667      dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3668				   "region %d landing pad", i);
3669      dw2_asm_output_data_uleb128 (cs->action, "action");
3670    }
3671
3672  call_site_base += n;
3673}
3674
3675void
3676output_function_exception_table ()
3677{
3678  int tt_format, cs_format, lp_format, i, n;
3679#ifdef HAVE_AS_LEB128
3680  char ttype_label[32];
3681  char cs_after_size_label[32];
3682  char cs_end_label[32];
3683#else
3684  int call_site_len;
3685#endif
3686  int have_tt_data;
3687  int funcdef_number;
3688  int tt_format_size = 0;
3689
3690  /* Not all functions need anything.  */
3691  if (! cfun->uses_eh_lsda)
3692    return;
3693
3694  funcdef_number = (USING_SJLJ_EXCEPTIONS
3695		    ? sjlj_funcdef_number
3696		    : current_funcdef_number);
3697
3698#ifdef IA64_UNWIND_INFO
3699  fputs ("\t.personality\t", asm_out_file);
3700  output_addr_const (asm_out_file, eh_personality_libfunc);
3701  fputs ("\n\t.handlerdata\n", asm_out_file);
3702  /* Note that varasm still thinks we're in the function's code section.
3703     The ".endp" directive that will immediately follow will take us back.  */
3704#else
3705  (*targetm.asm_out.exception_section) ();
3706#endif
3707
3708  have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3709		  || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3710
3711  /* Indicate the format of the @TType entries.  */
3712  if (! have_tt_data)
3713    tt_format = DW_EH_PE_omit;
3714  else
3715    {
3716      tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3717#ifdef HAVE_AS_LEB128
3718      ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3719#endif
3720      tt_format_size = size_of_encoded_value (tt_format);
3721
3722      assemble_align (tt_format_size * BITS_PER_UNIT);
3723    }
3724
3725  ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3726
3727  /* The LSDA header.  */
3728
3729  /* Indicate the format of the landing pad start pointer.  An omitted
3730     field implies @LPStart == @Start.  */
3731  /* Currently we always put @LPStart == @Start.  This field would
3732     be most useful in moving the landing pads completely out of
3733     line to another section, but it could also be used to minimize
3734     the size of uleb128 landing pad offsets.  */
3735  lp_format = DW_EH_PE_omit;
3736  dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3737		       eh_data_format_name (lp_format));
3738
3739  /* @LPStart pointer would go here.  */
3740
3741  dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3742		       eh_data_format_name (tt_format));
3743
3744#ifndef HAVE_AS_LEB128
3745  if (USING_SJLJ_EXCEPTIONS)
3746    call_site_len = sjlj_size_of_call_site_table ();
3747  else
3748    call_site_len = dw2_size_of_call_site_table ();
3749#endif
3750
3751  /* A pc-relative 4-byte displacement to the @TType data.  */
3752  if (have_tt_data)
3753    {
3754#ifdef HAVE_AS_LEB128
3755      char ttype_after_disp_label[32];
3756      ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3757				   funcdef_number);
3758      dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3759				    "@TType base offset");
3760      ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3761#else
3762      /* Ug.  Alignment queers things.  */
3763      unsigned int before_disp, after_disp, last_disp, disp;
3764
3765      before_disp = 1 + 1;
3766      after_disp = (1 + size_of_uleb128 (call_site_len)
3767		    + call_site_len
3768		    + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3769		    + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3770		       * tt_format_size));
3771
3772      disp = after_disp;
3773      do
3774	{
3775	  unsigned int disp_size, pad;
3776
3777	  last_disp = disp;
3778	  disp_size = size_of_uleb128 (disp);
3779	  pad = before_disp + disp_size + after_disp;
3780	  if (pad % tt_format_size)
3781	    pad = tt_format_size - (pad % tt_format_size);
3782	  else
3783	    pad = 0;
3784	  disp = after_disp + pad;
3785	}
3786      while (disp != last_disp);
3787
3788      dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3789#endif
3790    }
3791
3792  /* Indicate the format of the call-site offsets.  */
3793#ifdef HAVE_AS_LEB128
3794  cs_format = DW_EH_PE_uleb128;
3795#else
3796  cs_format = DW_EH_PE_udata4;
3797#endif
3798  dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3799		       eh_data_format_name (cs_format));
3800
3801#ifdef HAVE_AS_LEB128
3802  ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3803			       funcdef_number);
3804  ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3805			       funcdef_number);
3806  dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3807				"Call-site table length");
3808  ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3809  if (USING_SJLJ_EXCEPTIONS)
3810    sjlj_output_call_site_table ();
3811  else
3812    dw2_output_call_site_table ();
3813  ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3814#else
3815  dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3816  if (USING_SJLJ_EXCEPTIONS)
3817    sjlj_output_call_site_table ();
3818  else
3819    dw2_output_call_site_table ();
3820#endif
3821
3822  /* ??? Decode and interpret the data for flag_debug_asm.  */
3823  n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3824  for (i = 0; i < n; ++i)
3825    dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3826			 (i ? NULL : "Action record table"));
3827
3828  if (have_tt_data)
3829    assemble_align (tt_format_size * BITS_PER_UNIT);
3830
3831  i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3832  while (i-- > 0)
3833    {
3834      tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3835      rtx value;
3836
3837      if (type == NULL_TREE)
3838	type = integer_zero_node;
3839      else
3840	type = lookup_type_for_runtime (type);
3841
3842      value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3843      if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3844	assemble_integer (value, tt_format_size,
3845			  tt_format_size * BITS_PER_UNIT, 1);
3846      else
3847        dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3848    }
3849
3850#ifdef HAVE_AS_LEB128
3851  if (have_tt_data)
3852      ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3853#endif
3854
3855  /* ??? Decode and interpret the data for flag_debug_asm.  */
3856  n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3857  for (i = 0; i < n; ++i)
3858    dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3859			 (i ? NULL : "Exception specification table"));
3860
3861  function_section (current_function_decl);
3862
3863  if (USING_SJLJ_EXCEPTIONS)
3864    sjlj_funcdef_number += 1;
3865}
3866