except.c (output_function_exception_table): Use assemble_align.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "insn-config.h"
58 #include "except.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
62 #include "output.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
65 #include "dwarf2.h"
66 #include "toplev.h"
67 #include "hashtab.h"
68 #include "intl.h"
69 #include "ggc.h"
70 #include "tm_p.h"
71
72
73 /* Provide defaults for stuff that may not be defined when using
74 sjlj exceptions. */
75 #ifndef EH_RETURN_STACKADJ_RTX
76 #define EH_RETURN_STACKADJ_RTX 0
77 #endif
78 #ifndef EH_RETURN_HANDLER_RTX
79 #define EH_RETURN_HANDLER_RTX 0
80 #endif
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
83 #endif
84
85
86 /* Nonzero means enable synchronous exceptions for non-call instructions. */
87 int flag_non_call_exceptions;
88
89 /* Protect cleanup actions with must-not-throw regions, with a call
90 to the given failure handler. */
91 tree (*lang_protect_cleanup_actions) PARAMS ((void));
92
93 /* Return true if type A catches type B. */
94 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
95
96 /* Map a type to a runtime object to match type. */
97 tree (*lang_eh_runtime_type) PARAMS ((tree));
98
99 /* A list of labels used for exception handlers. */
100 rtx exception_handler_labels;
101
102 static int call_site_base;
103 static unsigned int sjlj_funcdef_number;
104 static htab_t type_to_runtime_map;
105
106 /* Describe the SjLj_Function_Context structure. */
107 static tree sjlj_fc_type_node;
108 static int sjlj_fc_call_site_ofs;
109 static int sjlj_fc_data_ofs;
110 static int sjlj_fc_personality_ofs;
111 static int sjlj_fc_lsda_ofs;
112 static int sjlj_fc_jbuf_ofs;
113 \f
114 /* Describes one exception region. */
115 struct eh_region
116 {
117 /* The immediately surrounding region. */
118 struct eh_region *outer;
119
120 /* The list of immediately contained regions. */
121 struct eh_region *inner;
122 struct eh_region *next_peer;
123
124 /* An identifier for this region. */
125 int region_number;
126
127 /* Each region does exactly one thing. */
128 enum eh_region_type
129 {
130 ERT_CLEANUP = 1,
131 ERT_TRY,
132 ERT_CATCH,
133 ERT_ALLOWED_EXCEPTIONS,
134 ERT_MUST_NOT_THROW,
135 ERT_THROW,
136 ERT_FIXUP
137 } type;
138
139 /* Holds the action to perform based on the preceeding type. */
140 union {
141 /* A list of catch blocks, a surrounding try block,
142 and the label for continuing after a catch. */
143 struct {
144 struct eh_region *catch;
145 struct eh_region *last_catch;
146 struct eh_region *prev_try;
147 rtx continue_label;
148 } try;
149
150 /* The list through the catch handlers, the type object
151 matched, and a pointer to the generated code. */
152 struct {
153 struct eh_region *next_catch;
154 struct eh_region *prev_catch;
155 tree type;
156 int filter;
157 } catch;
158
159 /* A tree_list of allowed types. */
160 struct {
161 tree type_list;
162 int filter;
163 } allowed;
164
165 /* The type given by a call to "throw foo();", or discovered
166 for a throw. */
167 struct {
168 tree type;
169 } throw;
170
171 /* Retain the cleanup expression even after expansion so that
172 we can match up fixup regions. */
173 struct {
174 tree exp;
175 } cleanup;
176
177 /* The real region (by expression and by pointer) that fixup code
178 should live in. */
179 struct {
180 tree cleanup_exp;
181 struct eh_region *real_region;
182 } fixup;
183 } u;
184
185 /* Entry point for this region's handler before landing pads are built. */
186 rtx label;
187
188 /* Entry point for this region's handler from the runtime eh library. */
189 rtx landing_pad;
190
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
193
194 /* The RESX insn for handing off control to the next outermost handler,
195 if appropriate. */
196 rtx resume;
197 };
198
199 /* Used to save exception status for each function. */
200 struct eh_status
201 {
202 /* The tree of all regions for this function. */
203 struct eh_region *region_tree;
204
205 /* The same information as an indexable array. */
206 struct eh_region **region_array;
207
208 /* The most recently open region. */
209 struct eh_region *cur_region;
210
211 /* This is the region for which we are processing catch blocks. */
212 struct eh_region *try_region;
213
214 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
215 node is itself a TREE_CHAINed list of handlers for regions that
216 are not yet closed. The TREE_VALUE of each entry contains the
217 handler for the corresponding entry on the ehstack. */
218 tree protect_list;
219
220 rtx filter;
221 rtx exc_ptr;
222
223 int built_landing_pads;
224 int last_region_number;
225
226 varray_type ttype_data;
227 varray_type ehspec_data;
228 varray_type action_record_data;
229
230 struct call_site_record
231 {
232 rtx landing_pad;
233 int action;
234 } *call_site_data;
235 int call_site_data_used;
236 int call_site_data_size;
237
238 rtx ehr_stackadj;
239 rtx ehr_handler;
240 rtx ehr_label;
241
242 rtx sjlj_fc;
243 rtx sjlj_exit_after;
244 };
245
246 \f
247 static void mark_eh_region PARAMS ((struct eh_region *));
248
249 static int t2r_eq PARAMS ((const PTR,
250 const PTR));
251 static hashval_t t2r_hash PARAMS ((const PTR));
252 static int t2r_mark_1 PARAMS ((PTR *, PTR));
253 static void t2r_mark PARAMS ((PTR));
254 static void add_type_for_runtime PARAMS ((tree));
255 static tree lookup_type_for_runtime PARAMS ((tree));
256
257 static struct eh_region *expand_eh_region_end PARAMS ((void));
258
259 static rtx get_exception_filter PARAMS ((struct function *));
260
261 static void collect_eh_region_array PARAMS ((void));
262 static void resolve_fixup_regions PARAMS ((void));
263 static void remove_fixup_regions PARAMS ((void));
264 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
265
266 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
267 struct inline_remap *));
268 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
269 struct eh_region **));
270 static int ttypes_filter_eq PARAMS ((const PTR,
271 const PTR));
272 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
273 static int ehspec_filter_eq PARAMS ((const PTR,
274 const PTR));
275 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
276 static int add_ttypes_entry PARAMS ((htab_t, tree));
277 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
278 tree));
279 static void assign_filter_values PARAMS ((void));
280 static void build_post_landing_pads PARAMS ((void));
281 static void connect_post_landing_pads PARAMS ((void));
282 static void dw2_build_landing_pads PARAMS ((void));
283
284 struct sjlj_lp_info;
285 static bool sjlj_find_directly_reachable_regions
286 PARAMS ((struct sjlj_lp_info *));
287 static void sjlj_assign_call_site_values
288 PARAMS ((rtx, struct sjlj_lp_info *));
289 static void sjlj_mark_call_sites
290 PARAMS ((struct sjlj_lp_info *));
291 static void sjlj_emit_function_enter PARAMS ((rtx));
292 static void sjlj_emit_function_exit PARAMS ((void));
293 static void sjlj_emit_dispatch_table
294 PARAMS ((rtx, struct sjlj_lp_info *));
295 static void sjlj_build_landing_pads PARAMS ((void));
296
297 static void remove_exception_handler_label PARAMS ((rtx));
298 static void remove_eh_handler PARAMS ((struct eh_region *));
299
300 struct reachable_info;
301
302 /* The return value of reachable_next_level. */
303 enum reachable_code
304 {
305 /* The given exception is not processed by the given region. */
306 RNL_NOT_CAUGHT,
307 /* The given exception may need processing by the given region. */
308 RNL_MAYBE_CAUGHT,
309 /* The given exception is completely processed by the given region. */
310 RNL_CAUGHT,
311 /* The given exception is completely processed by the runtime. */
312 RNL_BLOCKED
313 };
314
315 static int check_handled PARAMS ((tree, tree));
316 static void add_reachable_handler
317 PARAMS ((struct reachable_info *, struct eh_region *,
318 struct eh_region *));
319 static enum reachable_code reachable_next_level
320 PARAMS ((struct eh_region *, tree, struct reachable_info *));
321
322 static int action_record_eq PARAMS ((const PTR,
323 const PTR));
324 static hashval_t action_record_hash PARAMS ((const PTR));
325 static int add_action_record PARAMS ((htab_t, int, int));
326 static int collect_one_action_chain PARAMS ((htab_t,
327 struct eh_region *));
328 static int add_call_site PARAMS ((rtx, int));
329
330 static void push_uleb128 PARAMS ((varray_type *,
331 unsigned int));
332 static void push_sleb128 PARAMS ((varray_type *, int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table PARAMS ((void));
335 static int sjlj_size_of_call_site_table PARAMS ((void));
336 #endif
337 static void dw2_output_call_site_table PARAMS ((void));
338 static void sjlj_output_call_site_table PARAMS ((void));
339
340 \f
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
344
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
347
348 int
349 doing_eh (do_warn)
350 int do_warn;
351 {
352 if (! flag_exceptions)
353 {
354 static int warned = 0;
355 if (! warned && do_warn)
356 {
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
359 }
360 return 0;
361 }
362 return 1;
363 }
364
365 \f
366 void
367 init_eh ()
368 {
369 ggc_add_rtx_root (&exception_handler_labels, 1);
370
371 if (! flag_exceptions)
372 return;
373
374 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
375 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
376
377 /* Create the SjLj_Function_Context structure. This should match
378 the definition in unwind-sjlj.c. */
379 if (USING_SJLJ_EXCEPTIONS)
380 {
381 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
382
383 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
384 ggc_add_tree_root (&sjlj_fc_type_node, 1);
385
386 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
387 build_pointer_type (sjlj_fc_type_node));
388 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
389
390 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
391 integer_type_node);
392 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
393
394 tmp = build_index_type (build_int_2 (4 - 1, 0));
395 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
396 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
397 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
398
399 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
400 ptr_type_node);
401 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
402
403 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
404 ptr_type_node);
405 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
406
407 #ifdef DONT_USE_BUILTIN_SETJMP
408 #ifdef JMP_BUF_SIZE
409 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
410 #else
411 /* Should be large enough for most systems, if it is not,
412 JMP_BUF_SIZE should be defined with the proper value. It will
413 also tend to be larger than necessary for most systems, a more
414 optimal port will define JMP_BUF_SIZE. */
415 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
416 #endif
417 #else
418 /* This is 2 for builtin_setjmp, plus whatever the target requires
419 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
420 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
421 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
422 #endif
423 tmp = build_index_type (tmp);
424 tmp = build_array_type (ptr_type_node, tmp);
425 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
426 #ifdef DONT_USE_BUILTIN_SETJMP
427 /* We don't know what the alignment requirements of the
428 runtime's jmp_buf has. Overestimate. */
429 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
430 DECL_USER_ALIGN (f_jbuf) = 1;
431 #endif
432 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
433
434 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
435 TREE_CHAIN (f_prev) = f_cs;
436 TREE_CHAIN (f_cs) = f_data;
437 TREE_CHAIN (f_data) = f_per;
438 TREE_CHAIN (f_per) = f_lsda;
439 TREE_CHAIN (f_lsda) = f_jbuf;
440
441 layout_type (sjlj_fc_type_node);
442
443 /* Cache the interesting field offsets so that we have
444 easy access from rtl. */
445 sjlj_fc_call_site_ofs
446 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
447 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
448 sjlj_fc_data_ofs
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
451 sjlj_fc_personality_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
454 sjlj_fc_lsda_ofs
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
457 sjlj_fc_jbuf_ofs
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
460 }
461 }
462
463 void
464 init_eh_for_function ()
465 {
466 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
467 }
468
469 /* Mark EH for GC. */
470
471 static void
472 mark_eh_region (region)
473 struct eh_region *region;
474 {
475 if (! region)
476 return;
477
478 switch (region->type)
479 {
480 case ERT_CLEANUP:
481 ggc_mark_tree (region->u.cleanup.exp);
482 break;
483 case ERT_TRY:
484 ggc_mark_rtx (region->u.try.continue_label);
485 break;
486 case ERT_CATCH:
487 ggc_mark_tree (region->u.catch.type);
488 break;
489 case ERT_ALLOWED_EXCEPTIONS:
490 ggc_mark_tree (region->u.allowed.type_list);
491 break;
492 case ERT_MUST_NOT_THROW:
493 break;
494 case ERT_THROW:
495 ggc_mark_tree (region->u.throw.type);
496 break;
497 case ERT_FIXUP:
498 ggc_mark_tree (region->u.fixup.cleanup_exp);
499 break;
500 default:
501 abort ();
502 }
503
504 ggc_mark_rtx (region->label);
505 ggc_mark_rtx (region->resume);
506 ggc_mark_rtx (region->landing_pad);
507 ggc_mark_rtx (region->post_landing_pad);
508 }
509
510 void
511 mark_eh_status (eh)
512 struct eh_status *eh;
513 {
514 int i;
515
516 if (eh == 0)
517 return;
518
519 /* If we've called collect_eh_region_array, use it. Otherwise walk
520 the tree non-recursively. */
521 if (eh->region_array)
522 {
523 for (i = eh->last_region_number; i > 0; --i)
524 {
525 struct eh_region *r = eh->region_array[i];
526 if (r && r->region_number == i)
527 mark_eh_region (r);
528 }
529 }
530 else if (eh->region_tree)
531 {
532 struct eh_region *r = eh->region_tree;
533 while (1)
534 {
535 mark_eh_region (r);
536 if (r->inner)
537 r = r->inner;
538 else if (r->next_peer)
539 r = r->next_peer;
540 else
541 {
542 do {
543 r = r->outer;
544 if (r == NULL)
545 goto tree_done;
546 } while (r->next_peer == NULL);
547 r = r->next_peer;
548 }
549 }
550 tree_done:;
551 }
552
553 ggc_mark_tree (eh->protect_list);
554 ggc_mark_rtx (eh->filter);
555 ggc_mark_rtx (eh->exc_ptr);
556 ggc_mark_tree_varray (eh->ttype_data);
557
558 if (eh->call_site_data)
559 {
560 for (i = eh->call_site_data_used - 1; i >= 0; --i)
561 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
562 }
563
564 ggc_mark_rtx (eh->ehr_stackadj);
565 ggc_mark_rtx (eh->ehr_handler);
566 ggc_mark_rtx (eh->ehr_label);
567
568 ggc_mark_rtx (eh->sjlj_fc);
569 ggc_mark_rtx (eh->sjlj_exit_after);
570 }
571
572 void
573 free_eh_status (f)
574 struct function *f;
575 {
576 struct eh_status *eh = f->eh;
577
578 if (eh->region_array)
579 {
580 int i;
581 for (i = eh->last_region_number; i > 0; --i)
582 {
583 struct eh_region *r = eh->region_array[i];
584 /* Mind we don't free a region struct more than once. */
585 if (r && r->region_number == i)
586 free (r);
587 }
588 free (eh->region_array);
589 }
590 else if (eh->region_tree)
591 {
592 struct eh_region *next, *r = eh->region_tree;
593 while (1)
594 {
595 if (r->inner)
596 r = r->inner;
597 else if (r->next_peer)
598 {
599 next = r->next_peer;
600 free (r);
601 r = next;
602 }
603 else
604 {
605 do {
606 next = r->outer;
607 free (r);
608 r = next;
609 if (r == NULL)
610 goto tree_done;
611 } while (r->next_peer == NULL);
612 next = r->next_peer;
613 free (r);
614 r = next;
615 }
616 }
617 tree_done:;
618 }
619
620 VARRAY_FREE (eh->ttype_data);
621 VARRAY_FREE (eh->ehspec_data);
622 VARRAY_FREE (eh->action_record_data);
623 if (eh->call_site_data)
624 free (eh->call_site_data);
625
626 free (eh);
627 f->eh = NULL;
628 }
629
630 \f
631 /* Start an exception handling region. All instructions emitted
632 after this point are considered to be part of the region until
633 expand_eh_region_end is invoked. */
634
635 void
636 expand_eh_region_start ()
637 {
638 struct eh_region *new_region;
639 struct eh_region *cur_region;
640 rtx note;
641
642 if (! doing_eh (0))
643 return;
644
645 /* Insert a new blank region as a leaf in the tree. */
646 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
647 cur_region = cfun->eh->cur_region;
648 new_region->outer = cur_region;
649 if (cur_region)
650 {
651 new_region->next_peer = cur_region->inner;
652 cur_region->inner = new_region;
653 }
654 else
655 {
656 new_region->next_peer = cfun->eh->region_tree;
657 cfun->eh->region_tree = new_region;
658 }
659 cfun->eh->cur_region = new_region;
660
661 /* Create a note marking the start of this region. */
662 new_region->region_number = ++cfun->eh->last_region_number;
663 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
664 NOTE_EH_HANDLER (note) = new_region->region_number;
665 }
666
667 /* Common code to end a region. Returns the region just ended. */
668
669 static struct eh_region *
670 expand_eh_region_end ()
671 {
672 struct eh_region *cur_region = cfun->eh->cur_region;
673 rtx note;
674
675 /* Create a nute marking the end of this region. */
676 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
677 NOTE_EH_HANDLER (note) = cur_region->region_number;
678
679 /* Pop. */
680 cfun->eh->cur_region = cur_region->outer;
681
682 return cur_region;
683 }
684
685 /* End an exception handling region for a cleanup. HANDLER is an
686 expression to expand for the cleanup. */
687
688 void
689 expand_eh_region_end_cleanup (handler)
690 tree handler;
691 {
692 struct eh_region *region;
693 tree protect_cleanup_actions;
694 rtx around_label;
695 rtx data_save[2];
696
697 if (! doing_eh (0))
698 return;
699
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
704
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
707
708 emit_label (region->label);
709
710 /* Give the language a chance to specify an action to be taken if an
711 exception is thrown that would propogate out of the HANDLER. */
712 protect_cleanup_actions
713 = (lang_protect_cleanup_actions
714 ? (*lang_protect_cleanup_actions) ()
715 : NULL_TREE);
716
717 if (protect_cleanup_actions)
718 expand_eh_region_start ();
719
720 /* In case this cleanup involves an inline destructor with a try block in
721 it, we need to save the EH return data registers around it. */
722 data_save[0] = gen_reg_rtx (Pmode);
723 emit_move_insn (data_save[0], get_exception_pointer (cfun));
724 data_save[1] = gen_reg_rtx (word_mode);
725 emit_move_insn (data_save[1], get_exception_filter (cfun));
726
727 expand_expr (handler, const0_rtx, VOIDmode, 0);
728
729 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
730 emit_move_insn (cfun->eh->filter, data_save[1]);
731
732 if (protect_cleanup_actions)
733 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
734
735 /* We need any stack adjustment complete before the around_label. */
736 do_pending_stack_adjust ();
737
738 /* We delay the generation of the _Unwind_Resume until we generate
739 landing pads. We emit a marker here so as to get good control
740 flow data in the meantime. */
741 region->resume
742 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
743 emit_barrier ();
744
745 emit_label (around_label);
746 }
747
748 /* End an exception handling region for a try block, and prepares
749 for subsequent calls to expand_start_catch. */
750
751 void
752 expand_start_all_catch ()
753 {
754 struct eh_region *region;
755
756 if (! doing_eh (1))
757 return;
758
759 region = expand_eh_region_end ();
760 region->type = ERT_TRY;
761 region->u.try.prev_try = cfun->eh->try_region;
762 region->u.try.continue_label = gen_label_rtx ();
763
764 cfun->eh->try_region = region;
765
766 emit_jump (region->u.try.continue_label);
767 }
768
769 /* Begin a catch clause. TYPE is the type caught, or null if this is
770 a catch-all clause. */
771
772 void
773 expand_start_catch (type)
774 tree type;
775 {
776 struct eh_region *t, *c, *l;
777
778 if (! doing_eh (0))
779 return;
780
781 if (type)
782 add_type_for_runtime (type);
783 expand_eh_region_start ();
784
785 t = cfun->eh->try_region;
786 c = cfun->eh->cur_region;
787 c->type = ERT_CATCH;
788 c->u.catch.type = type;
789 c->label = gen_label_rtx ();
790
791 l = t->u.try.last_catch;
792 c->u.catch.prev_catch = l;
793 if (l)
794 l->u.catch.next_catch = c;
795 else
796 t->u.try.catch = c;
797 t->u.try.last_catch = c;
798
799 emit_label (c->label);
800 }
801
802 /* End a catch clause. Control will resume after the try/catch block. */
803
804 void
805 expand_end_catch ()
806 {
807 struct eh_region *try_region, *catch_region;
808
809 if (! doing_eh (0))
810 return;
811
812 catch_region = expand_eh_region_end ();
813 try_region = cfun->eh->try_region;
814
815 emit_jump (try_region->u.try.continue_label);
816 }
817
818 /* End a sequence of catch handlers for a try block. */
819
820 void
821 expand_end_all_catch ()
822 {
823 struct eh_region *try_region;
824
825 if (! doing_eh (0))
826 return;
827
828 try_region = cfun->eh->try_region;
829 cfun->eh->try_region = try_region->u.try.prev_try;
830
831 emit_label (try_region->u.try.continue_label);
832 }
833
834 /* End an exception region for an exception type filter. ALLOWED is a
835 TREE_LIST of types to be matched by the runtime. FAILURE is an
836 expression to invoke if a mismatch ocurrs. */
837
838 void
839 expand_eh_region_end_allowed (allowed, failure)
840 tree allowed, failure;
841 {
842 struct eh_region *region;
843 rtx around_label;
844
845 if (! doing_eh (0))
846 return;
847
848 region = expand_eh_region_end ();
849 region->type = ERT_ALLOWED_EXCEPTIONS;
850 region->u.allowed.type_list = allowed;
851 region->label = gen_label_rtx ();
852
853 for (; allowed ; allowed = TREE_CHAIN (allowed))
854 add_type_for_runtime (TREE_VALUE (allowed));
855
856 /* We must emit the call to FAILURE here, so that if this function
857 throws a different exception, that it will be processed by the
858 correct region. */
859
860 around_label = gen_label_rtx ();
861 emit_jump (around_label);
862
863 emit_label (region->label);
864 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
865 /* We must adjust the stack before we reach the AROUND_LABEL because
866 the call to FAILURE does not occur on all paths to the
867 AROUND_LABEL. */
868 do_pending_stack_adjust ();
869
870 emit_label (around_label);
871 }
872
873 /* End an exception region for a must-not-throw filter. FAILURE is an
874 expression invoke if an uncaught exception propagates this far.
875
876 This is conceptually identical to expand_eh_region_end_allowed with
877 an empty allowed list (if you passed "std::terminate" instead of
878 "__cxa_call_unexpected"), but they are represented differently in
879 the C++ LSDA. */
880
881 void
882 expand_eh_region_end_must_not_throw (failure)
883 tree failure;
884 {
885 struct eh_region *region;
886 rtx around_label;
887
888 if (! doing_eh (0))
889 return;
890
891 region = expand_eh_region_end ();
892 region->type = ERT_MUST_NOT_THROW;
893 region->label = gen_label_rtx ();
894
895 /* We must emit the call to FAILURE here, so that if this function
896 throws a different exception, that it will be processed by the
897 correct region. */
898
899 around_label = gen_label_rtx ();
900 emit_jump (around_label);
901
902 emit_label (region->label);
903 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
904
905 emit_label (around_label);
906 }
907
908 /* End an exception region for a throw. No handling goes on here,
909 but it's the easiest way for the front-end to indicate what type
910 is being thrown. */
911
912 void
913 expand_eh_region_end_throw (type)
914 tree type;
915 {
916 struct eh_region *region;
917
918 if (! doing_eh (0))
919 return;
920
921 region = expand_eh_region_end ();
922 region->type = ERT_THROW;
923 region->u.throw.type = type;
924 }
925
926 /* End a fixup region. Within this region the cleanups for the immediately
927 enclosing region are _not_ run. This is used for goto cleanup to avoid
928 destroying an object twice.
929
930 This would be an extraordinarily simple prospect, were it not for the
931 fact that we don't actually know what the immediately enclosing region
932 is. This surprising fact is because expand_cleanups is currently
933 generating a sequence that it will insert somewhere else. We collect
934 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
935
936 void
937 expand_eh_region_end_fixup (handler)
938 tree handler;
939 {
940 struct eh_region *fixup;
941
942 if (! doing_eh (0))
943 return;
944
945 fixup = expand_eh_region_end ();
946 fixup->type = ERT_FIXUP;
947 fixup->u.fixup.cleanup_exp = handler;
948 }
949
950 /* Return an rtl expression for a pointer to the exception object
951 within a handler. */
952
953 rtx
954 get_exception_pointer (fun)
955 struct function *fun;
956 {
957 rtx exc_ptr = fun->eh->exc_ptr;
958 if (fun == cfun && ! exc_ptr)
959 {
960 exc_ptr = gen_reg_rtx (Pmode);
961 fun->eh->exc_ptr = exc_ptr;
962 }
963 return exc_ptr;
964 }
965
966 /* Return an rtl expression for the exception dispatch filter
967 within a handler. */
968
969 static rtx
970 get_exception_filter (fun)
971 struct function *fun;
972 {
973 rtx filter = fun->eh->filter;
974 if (fun == cfun && ! filter)
975 {
976 filter = gen_reg_rtx (word_mode);
977 fun->eh->filter = filter;
978 }
979 return filter;
980 }
981 \f
982 /* Begin a region that will contain entries created with
983 add_partial_entry. */
984
985 void
986 begin_protect_partials ()
987 {
988 /* Push room for a new list. */
989 cfun->eh->protect_list
990 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
991 }
992
993 /* Start a new exception region for a region of code that has a
994 cleanup action and push the HANDLER for the region onto
995 protect_list. All of the regions created with add_partial_entry
996 will be ended when end_protect_partials is invoked. */
997
998 void
999 add_partial_entry (handler)
1000 tree handler;
1001 {
1002 expand_eh_region_start ();
1003
1004 /* ??? This comment was old before the most recent rewrite. We
1005 really ought to fix the callers at some point. */
1006 /* For backwards compatibility, we allow callers to omit calls to
1007 begin_protect_partials for the outermost region. So, we must
1008 explicitly do so here. */
1009 if (!cfun->eh->protect_list)
1010 begin_protect_partials ();
1011
1012 /* Add this entry to the front of the list. */
1013 TREE_VALUE (cfun->eh->protect_list)
1014 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1015 }
1016
1017 /* End all the pending exception regions on protect_list. */
1018
1019 void
1020 end_protect_partials ()
1021 {
1022 tree t;
1023
1024 /* ??? This comment was old before the most recent rewrite. We
1025 really ought to fix the callers at some point. */
1026 /* For backwards compatibility, we allow callers to omit the call to
1027 begin_protect_partials for the outermost region. So,
1028 PROTECT_LIST may be NULL. */
1029 if (!cfun->eh->protect_list)
1030 return;
1031
1032 /* Pop the topmost entry. */
1033 t = TREE_VALUE (cfun->eh->protect_list);
1034 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1035
1036 /* End all the exception regions. */
1037 for (; t; t = TREE_CHAIN (t))
1038 expand_eh_region_end_cleanup (TREE_VALUE (t));
1039 }
1040
1041 \f
1042 /* This section is for the exception handling specific optimization pass. */
1043
1044 /* Random access the exception region tree. It's just as simple to
1045 collect the regions this way as in expand_eh_region_start, but
1046 without having to realloc memory. */
1047
1048 static void
1049 collect_eh_region_array ()
1050 {
1051 struct eh_region **array, *i;
1052
1053 i = cfun->eh->region_tree;
1054 if (! i)
1055 return;
1056
1057 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1058 cfun->eh->region_array = array;
1059
1060 while (1)
1061 {
1062 array[i->region_number] = i;
1063
1064 /* If there are sub-regions, process them. */
1065 if (i->inner)
1066 i = i->inner;
1067 /* If there are peers, process them. */
1068 else if (i->next_peer)
1069 i = i->next_peer;
1070 /* Otherwise, step back up the tree to the next peer. */
1071 else
1072 {
1073 do {
1074 i = i->outer;
1075 if (i == NULL)
1076 return;
1077 } while (i->next_peer == NULL);
1078 i = i->next_peer;
1079 }
1080 }
1081 }
1082
1083 static void
1084 resolve_fixup_regions ()
1085 {
1086 int i, j, n = cfun->eh->last_region_number;
1087
1088 for (i = 1; i <= n; ++i)
1089 {
1090 struct eh_region *fixup = cfun->eh->region_array[i];
1091 struct eh_region *cleanup = 0;
1092
1093 if (! fixup || fixup->type != ERT_FIXUP)
1094 continue;
1095
1096 for (j = 1; j <= n; ++j)
1097 {
1098 cleanup = cfun->eh->region_array[j];
1099 if (cleanup->type == ERT_CLEANUP
1100 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1101 break;
1102 }
1103 if (j > n)
1104 abort ();
1105
1106 fixup->u.fixup.real_region = cleanup->outer;
1107 }
1108 }
1109
1110 /* Now that we've discovered what region actually encloses a fixup,
1111 we can shuffle pointers and remove them from the tree. */
1112
1113 static void
1114 remove_fixup_regions ()
1115 {
1116 int i;
1117 rtx insn, note;
1118 struct eh_region *fixup;
1119
1120 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1121 for instructions referencing fixup regions. This is only
1122 strictly necessary for fixup regions with no parent, but
1123 doesn't hurt to do it for all regions. */
1124 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1125 if (INSN_P (insn)
1126 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1127 && INTVAL (XEXP (note, 0)) > 0
1128 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1129 && fixup->type == ERT_FIXUP)
1130 {
1131 if (fixup->u.fixup.real_region)
1132 XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
1133 else
1134 remove_note (insn, note);
1135 }
1136
1137 /* Remove the fixup regions from the tree. */
1138 for (i = cfun->eh->last_region_number; i > 0; --i)
1139 {
1140 fixup = cfun->eh->region_array[i];
1141 if (! fixup)
1142 continue;
1143
1144 /* Allow GC to maybe free some memory. */
1145 if (fixup->type == ERT_CLEANUP)
1146 fixup->u.cleanup.exp = NULL_TREE;
1147
1148 if (fixup->type != ERT_FIXUP)
1149 continue;
1150
1151 if (fixup->inner)
1152 {
1153 struct eh_region *parent, *p, **pp;
1154
1155 parent = fixup->u.fixup.real_region;
1156
1157 /* Fix up the children's parent pointers; find the end of
1158 the list. */
1159 for (p = fixup->inner; ; p = p->next_peer)
1160 {
1161 p->outer = parent;
1162 if (! p->next_peer)
1163 break;
1164 }
1165
1166 /* In the tree of cleanups, only outer-inner ordering matters.
1167 So link the children back in anywhere at the correct level. */
1168 if (parent)
1169 pp = &parent->inner;
1170 else
1171 pp = &cfun->eh->region_tree;
1172 p->next_peer = *pp;
1173 *pp = fixup->inner;
1174 fixup->inner = NULL;
1175 }
1176
1177 remove_eh_handler (fixup);
1178 }
1179 }
1180
1181 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1182 can_throw instruction in the region. */
1183
1184 static void
1185 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1186 rtx *pinsns;
1187 int *orig_sp;
1188 int cur;
1189 {
1190 int *sp = orig_sp;
1191 rtx insn, next;
1192
1193 for (insn = *pinsns; insn ; insn = next)
1194 {
1195 next = NEXT_INSN (insn);
1196 if (GET_CODE (insn) == NOTE)
1197 {
1198 int kind = NOTE_LINE_NUMBER (insn);
1199 if (kind == NOTE_INSN_EH_REGION_BEG
1200 || kind == NOTE_INSN_EH_REGION_END)
1201 {
1202 if (kind == NOTE_INSN_EH_REGION_BEG)
1203 {
1204 struct eh_region *r;
1205
1206 *sp++ = cur;
1207 cur = NOTE_EH_HANDLER (insn);
1208
1209 r = cfun->eh->region_array[cur];
1210 if (r->type == ERT_FIXUP)
1211 {
1212 r = r->u.fixup.real_region;
1213 cur = r ? r->region_number : 0;
1214 }
1215 else if (r->type == ERT_CATCH)
1216 {
1217 r = r->outer;
1218 cur = r ? r->region_number : 0;
1219 }
1220 }
1221 else
1222 cur = *--sp;
1223
1224 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1225 requires extra care to adjust sequence start. */
1226 if (insn == *pinsns)
1227 *pinsns = next;
1228 remove_insn (insn);
1229 continue;
1230 }
1231 }
1232 else if (INSN_P (insn))
1233 {
1234 if (cur > 0
1235 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1236 /* Calls can always potentially throw exceptions, unless
1237 they have a REG_EH_REGION note with a value of 0 or less.
1238 Which should be the only possible kind so far. */
1239 && (GET_CODE (insn) == CALL_INSN
1240 /* If we wanted exceptions for non-call insns, then
1241 any may_trap_p instruction could throw. */
1242 || (flag_non_call_exceptions
1243 && may_trap_p (PATTERN (insn)))))
1244 {
1245 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1246 REG_NOTES (insn));
1247 }
1248
1249 if (GET_CODE (insn) == CALL_INSN
1250 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1251 {
1252 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1253 sp, cur);
1254 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1255 sp, cur);
1256 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1257 sp, cur);
1258 }
1259 }
1260 }
1261
1262 if (sp != orig_sp)
1263 abort ();
1264 }
1265
1266 void
1267 convert_from_eh_region_ranges ()
1268 {
1269 int *stack;
1270 rtx insns;
1271
1272 collect_eh_region_array ();
1273 resolve_fixup_regions ();
1274
1275 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1276 insns = get_insns ();
1277 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1278 free (stack);
1279
1280 remove_fixup_regions ();
1281 }
1282
1283 void
1284 find_exception_handler_labels ()
1285 {
1286 rtx list = NULL_RTX;
1287 int i;
1288
1289 free_EXPR_LIST_list (&exception_handler_labels);
1290
1291 if (cfun->eh->region_tree == NULL)
1292 return;
1293
1294 for (i = cfun->eh->last_region_number; i > 0; --i)
1295 {
1296 struct eh_region *region = cfun->eh->region_array[i];
1297 rtx lab;
1298
1299 if (! region)
1300 continue;
1301 if (cfun->eh->built_landing_pads)
1302 lab = region->landing_pad;
1303 else
1304 lab = region->label;
1305
1306 if (lab)
1307 list = alloc_EXPR_LIST (0, lab, list);
1308 }
1309
1310 /* For sjlj exceptions, need the return label to remain live until
1311 after landing pad generation. */
1312 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1313 list = alloc_EXPR_LIST (0, return_label, list);
1314
1315 exception_handler_labels = list;
1316 }
1317
1318 \f
1319 static struct eh_region *
1320 duplicate_eh_region_1 (o, map)
1321 struct eh_region *o;
1322 struct inline_remap *map;
1323 {
1324 struct eh_region *n
1325 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1326
1327 n->region_number = o->region_number + cfun->eh->last_region_number;
1328 n->type = o->type;
1329
1330 switch (n->type)
1331 {
1332 case ERT_CLEANUP:
1333 case ERT_MUST_NOT_THROW:
1334 break;
1335
1336 case ERT_TRY:
1337 if (o->u.try.continue_label)
1338 n->u.try.continue_label
1339 = get_label_from_map (map,
1340 CODE_LABEL_NUMBER (o->u.try.continue_label));
1341 break;
1342
1343 case ERT_CATCH:
1344 n->u.catch.type = o->u.catch.type;
1345 break;
1346
1347 case ERT_ALLOWED_EXCEPTIONS:
1348 n->u.allowed.type_list = o->u.allowed.type_list;
1349 break;
1350
1351 case ERT_THROW:
1352 n->u.throw.type = o->u.throw.type;
1353
1354 default:
1355 abort ();
1356 }
1357
1358 if (o->label)
1359 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1360 if (o->resume)
1361 {
1362 n->resume = map->insn_map[INSN_UID (o->resume)];
1363 if (n->resume == NULL)
1364 abort ();
1365 }
1366
1367 return n;
1368 }
1369
1370 static void
1371 duplicate_eh_region_2 (o, n_array)
1372 struct eh_region *o;
1373 struct eh_region **n_array;
1374 {
1375 struct eh_region *n = n_array[o->region_number];
1376
1377 switch (n->type)
1378 {
1379 case ERT_TRY:
1380 n->u.try.catch = n_array[o->u.try.catch->region_number];
1381 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1382 break;
1383
1384 case ERT_CATCH:
1385 if (o->u.catch.next_catch)
1386 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1387 if (o->u.catch.prev_catch)
1388 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1389 break;
1390
1391 default:
1392 break;
1393 }
1394
1395 if (o->outer)
1396 n->outer = n_array[o->outer->region_number];
1397 if (o->inner)
1398 n->inner = n_array[o->inner->region_number];
1399 if (o->next_peer)
1400 n->next_peer = n_array[o->next_peer->region_number];
1401 }
1402
1403 int
1404 duplicate_eh_regions (ifun, map)
1405 struct function *ifun;
1406 struct inline_remap *map;
1407 {
1408 int ifun_last_region_number = ifun->eh->last_region_number;
1409 struct eh_region **n_array, *root, *cur;
1410 int i;
1411
1412 if (ifun_last_region_number == 0)
1413 return 0;
1414
1415 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1416
1417 for (i = 1; i <= ifun_last_region_number; ++i)
1418 {
1419 cur = ifun->eh->region_array[i];
1420 if (!cur || cur->region_number != i)
1421 continue;
1422 n_array[i] = duplicate_eh_region_1 (cur, map);
1423 }
1424 for (i = 1; i <= ifun_last_region_number; ++i)
1425 {
1426 cur = ifun->eh->region_array[i];
1427 if (!cur || cur->region_number != i)
1428 continue;
1429 duplicate_eh_region_2 (cur, n_array);
1430 }
1431
1432 root = n_array[ifun->eh->region_tree->region_number];
1433 cur = cfun->eh->cur_region;
1434 if (cur)
1435 {
1436 struct eh_region *p = cur->inner;
1437 if (p)
1438 {
1439 while (p->next_peer)
1440 p = p->next_peer;
1441 p->next_peer = root;
1442 }
1443 else
1444 cur->inner = root;
1445
1446 for (i = 1; i <= ifun_last_region_number; ++i)
1447 if (n_array[i] && n_array[i]->outer == NULL)
1448 n_array[i]->outer = cur;
1449 }
1450 else
1451 {
1452 struct eh_region *p = cfun->eh->region_tree;
1453 if (p)
1454 {
1455 while (p->next_peer)
1456 p = p->next_peer;
1457 p->next_peer = root;
1458 }
1459 else
1460 cfun->eh->region_tree = root;
1461 }
1462
1463 free (n_array);
1464
1465 i = cfun->eh->last_region_number;
1466 cfun->eh->last_region_number = i + ifun_last_region_number;
1467 return i;
1468 }
1469
1470 \f
1471 /* ??? Move from tree.c to tree.h. */
1472 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1473
1474 static int
1475 t2r_eq (pentry, pdata)
1476 const PTR pentry;
1477 const PTR pdata;
1478 {
1479 tree entry = (tree) pentry;
1480 tree data = (tree) pdata;
1481
1482 return TREE_PURPOSE (entry) == data;
1483 }
1484
1485 static hashval_t
1486 t2r_hash (pentry)
1487 const PTR pentry;
1488 {
1489 tree entry = (tree) pentry;
1490 return TYPE_HASH (TREE_PURPOSE (entry));
1491 }
1492
1493 static int
1494 t2r_mark_1 (slot, data)
1495 PTR *slot;
1496 PTR data ATTRIBUTE_UNUSED;
1497 {
1498 tree contents = (tree) *slot;
1499 ggc_mark_tree (contents);
1500 return 1;
1501 }
1502
1503 static void
1504 t2r_mark (addr)
1505 PTR addr;
1506 {
1507 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1508 }
1509
1510 static void
1511 add_type_for_runtime (type)
1512 tree type;
1513 {
1514 tree *slot;
1515
1516 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1517 TYPE_HASH (type), INSERT);
1518 if (*slot == NULL)
1519 {
1520 tree runtime = (*lang_eh_runtime_type) (type);
1521 *slot = tree_cons (type, runtime, NULL_TREE);
1522 }
1523 }
1524
1525 static tree
1526 lookup_type_for_runtime (type)
1527 tree type;
1528 {
1529 tree *slot;
1530
1531 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1532 TYPE_HASH (type), NO_INSERT);
1533
1534 /* We should have always inserrted the data earlier. */
1535 return TREE_VALUE (*slot);
1536 }
1537
1538 \f
1539 /* Represent an entry in @TTypes for either catch actions
1540 or exception filter actions. */
1541 struct ttypes_filter
1542 {
1543 tree t;
1544 int filter;
1545 };
1546
1547 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1548 (a tree) for a @TTypes type node we are thinking about adding. */
1549
1550 static int
1551 ttypes_filter_eq (pentry, pdata)
1552 const PTR pentry;
1553 const PTR pdata;
1554 {
1555 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1556 tree data = (tree) pdata;
1557
1558 return entry->t == data;
1559 }
1560
1561 static hashval_t
1562 ttypes_filter_hash (pentry)
1563 const PTR pentry;
1564 {
1565 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1566 return TYPE_HASH (entry->t);
1567 }
1568
1569 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1570 exception specification list we are thinking about adding. */
1571 /* ??? Currently we use the type lists in the order given. Someone
1572 should put these in some canonical order. */
1573
1574 static int
1575 ehspec_filter_eq (pentry, pdata)
1576 const PTR pentry;
1577 const PTR pdata;
1578 {
1579 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1580 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1581
1582 return type_list_equal (entry->t, data->t);
1583 }
1584
1585 /* Hash function for exception specification lists. */
1586
1587 static hashval_t
1588 ehspec_filter_hash (pentry)
1589 const PTR pentry;
1590 {
1591 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1592 hashval_t h = 0;
1593 tree list;
1594
1595 for (list = entry->t; list ; list = TREE_CHAIN (list))
1596 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1597 return h;
1598 }
1599
1600 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1601 up the search. Return the filter value to be used. */
1602
1603 static int
1604 add_ttypes_entry (ttypes_hash, type)
1605 htab_t ttypes_hash;
1606 tree type;
1607 {
1608 struct ttypes_filter **slot, *n;
1609
1610 slot = (struct ttypes_filter **)
1611 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1612
1613 if ((n = *slot) == NULL)
1614 {
1615 /* Filter value is a 1 based table index. */
1616
1617 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1618 n->t = type;
1619 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1620 *slot = n;
1621
1622 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1623 }
1624
1625 return n->filter;
1626 }
1627
1628 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1629 to speed up the search. Return the filter value to be used. */
1630
1631 static int
1632 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1633 htab_t ehspec_hash;
1634 htab_t ttypes_hash;
1635 tree list;
1636 {
1637 struct ttypes_filter **slot, *n;
1638 struct ttypes_filter dummy;
1639
1640 dummy.t = list;
1641 slot = (struct ttypes_filter **)
1642 htab_find_slot (ehspec_hash, &dummy, INSERT);
1643
1644 if ((n = *slot) == NULL)
1645 {
1646 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1647
1648 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1649 n->t = list;
1650 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1651 *slot = n;
1652
1653 /* Look up each type in the list and encode its filter
1654 value as a uleb128. Terminate the list with 0. */
1655 for (; list ; list = TREE_CHAIN (list))
1656 push_uleb128 (&cfun->eh->ehspec_data,
1657 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1658 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1659 }
1660
1661 return n->filter;
1662 }
1663
1664 /* Generate the action filter values to be used for CATCH and
1665 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1666 we use lots of landing pads, and so every type or list can share
1667 the same filter value, which saves table space. */
1668
1669 static void
1670 assign_filter_values ()
1671 {
1672 int i;
1673 htab_t ttypes, ehspec;
1674
1675 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1676 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1677
1678 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1679 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1680
1681 for (i = cfun->eh->last_region_number; i > 0; --i)
1682 {
1683 struct eh_region *r = cfun->eh->region_array[i];
1684
1685 /* Mind we don't process a region more than once. */
1686 if (!r || r->region_number != i)
1687 continue;
1688
1689 switch (r->type)
1690 {
1691 case ERT_CATCH:
1692 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1693 break;
1694
1695 case ERT_ALLOWED_EXCEPTIONS:
1696 r->u.allowed.filter
1697 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1698 break;
1699
1700 default:
1701 break;
1702 }
1703 }
1704
1705 htab_delete (ttypes);
1706 htab_delete (ehspec);
1707 }
1708
1709 static void
1710 build_post_landing_pads ()
1711 {
1712 int i;
1713
1714 for (i = cfun->eh->last_region_number; i > 0; --i)
1715 {
1716 struct eh_region *region = cfun->eh->region_array[i];
1717 rtx seq;
1718
1719 /* Mind we don't process a region more than once. */
1720 if (!region || region->region_number != i)
1721 continue;
1722
1723 switch (region->type)
1724 {
1725 case ERT_TRY:
1726 /* ??? Collect the set of all non-overlapping catch handlers
1727 all the way up the chain until blocked by a cleanup. */
1728 /* ??? Outer try regions can share landing pads with inner
1729 try regions if the types are completely non-overlapping,
1730 and there are no interveaning cleanups. */
1731
1732 region->post_landing_pad = gen_label_rtx ();
1733
1734 start_sequence ();
1735
1736 emit_label (region->post_landing_pad);
1737
1738 /* ??? It is mighty inconvenient to call back into the
1739 switch statement generation code in expand_end_case.
1740 Rapid prototyping sez a sequence of ifs. */
1741 {
1742 struct eh_region *c;
1743 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1744 {
1745 /* ??? _Unwind_ForcedUnwind wants no match here. */
1746 if (c->u.catch.type == NULL)
1747 emit_jump (c->label);
1748 else
1749 emit_cmp_and_jump_insns (cfun->eh->filter,
1750 GEN_INT (c->u.catch.filter),
1751 EQ, NULL_RTX, word_mode,
1752 0, 0, c->label);
1753 }
1754 }
1755
1756 /* We delay the generation of the _Unwind_Resume until we generate
1757 landing pads. We emit a marker here so as to get good control
1758 flow data in the meantime. */
1759 region->resume
1760 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1761 emit_barrier ();
1762
1763 seq = get_insns ();
1764 end_sequence ();
1765
1766 emit_insns_before (seq, region->u.try.catch->label);
1767 break;
1768
1769 case ERT_ALLOWED_EXCEPTIONS:
1770 region->post_landing_pad = gen_label_rtx ();
1771
1772 start_sequence ();
1773
1774 emit_label (region->post_landing_pad);
1775
1776 emit_cmp_and_jump_insns (cfun->eh->filter,
1777 GEN_INT (region->u.allowed.filter),
1778 EQ, NULL_RTX, word_mode, 0, 0,
1779 region->label);
1780
1781 /* We delay the generation of the _Unwind_Resume until we generate
1782 landing pads. We emit a marker here so as to get good control
1783 flow data in the meantime. */
1784 region->resume
1785 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1786 emit_barrier ();
1787
1788 seq = get_insns ();
1789 end_sequence ();
1790
1791 emit_insns_before (seq, region->label);
1792 break;
1793
1794 case ERT_CLEANUP:
1795 case ERT_MUST_NOT_THROW:
1796 region->post_landing_pad = region->label;
1797 break;
1798
1799 case ERT_CATCH:
1800 case ERT_THROW:
1801 /* Nothing to do. */
1802 break;
1803
1804 default:
1805 abort ();
1806 }
1807 }
1808 }
1809
1810 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1811 _Unwind_Resume otherwise. */
1812
1813 static void
1814 connect_post_landing_pads ()
1815 {
1816 int i;
1817
1818 for (i = cfun->eh->last_region_number; i > 0; --i)
1819 {
1820 struct eh_region *region = cfun->eh->region_array[i];
1821 struct eh_region *outer;
1822 rtx seq;
1823
1824 /* Mind we don't process a region more than once. */
1825 if (!region || region->region_number != i)
1826 continue;
1827
1828 /* If there is no RESX, or it has been deleted by flow, there's
1829 nothing to fix up. */
1830 if (! region->resume || INSN_DELETED_P (region->resume))
1831 continue;
1832
1833 /* Search for another landing pad in this function. */
1834 for (outer = region->outer; outer ; outer = outer->outer)
1835 if (outer->post_landing_pad)
1836 break;
1837
1838 start_sequence ();
1839
1840 if (outer)
1841 emit_jump (outer->post_landing_pad);
1842 else
1843 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1844 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1845
1846 seq = get_insns ();
1847 end_sequence ();
1848 emit_insns_before (seq, region->resume);
1849 flow_delete_insn (region->resume);
1850 }
1851 }
1852
1853 \f
1854 static void
1855 dw2_build_landing_pads ()
1856 {
1857 int i;
1858 unsigned int j;
1859
1860 for (i = cfun->eh->last_region_number; i > 0; --i)
1861 {
1862 struct eh_region *region = cfun->eh->region_array[i];
1863 rtx seq;
1864
1865 /* Mind we don't process a region more than once. */
1866 if (!region || region->region_number != i)
1867 continue;
1868
1869 if (region->type != ERT_CLEANUP
1870 && region->type != ERT_TRY
1871 && region->type != ERT_ALLOWED_EXCEPTIONS)
1872 continue;
1873
1874 start_sequence ();
1875
1876 region->landing_pad = gen_label_rtx ();
1877 emit_label (region->landing_pad);
1878
1879 #ifdef HAVE_exception_receiver
1880 if (HAVE_exception_receiver)
1881 emit_insn (gen_exception_receiver ());
1882 else
1883 #endif
1884 #ifdef HAVE_nonlocal_goto_receiver
1885 if (HAVE_nonlocal_goto_receiver)
1886 emit_insn (gen_nonlocal_goto_receiver ());
1887 else
1888 #endif
1889 { /* Nothing */ }
1890
1891 /* If the eh_return data registers are call-saved, then we
1892 won't have considered them clobbered from the call that
1893 threw. Kill them now. */
1894 for (j = 0; ; ++j)
1895 {
1896 unsigned r = EH_RETURN_DATA_REGNO (j);
1897 if (r == INVALID_REGNUM)
1898 break;
1899 if (! call_used_regs[r])
1900 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1901 }
1902
1903 emit_move_insn (cfun->eh->exc_ptr,
1904 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1905 emit_move_insn (cfun->eh->filter,
1906 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1907
1908 seq = get_insns ();
1909 end_sequence ();
1910
1911 emit_insns_before (seq, region->post_landing_pad);
1912 }
1913 }
1914
1915 \f
1916 struct sjlj_lp_info
1917 {
1918 int directly_reachable;
1919 int action_index;
1920 int dispatch_index;
1921 int call_site_index;
1922 };
1923
1924 static bool
1925 sjlj_find_directly_reachable_regions (lp_info)
1926 struct sjlj_lp_info *lp_info;
1927 {
1928 rtx insn;
1929 bool found_one = false;
1930
1931 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1932 {
1933 struct eh_region *region;
1934 tree type_thrown;
1935 rtx note;
1936
1937 if (! INSN_P (insn))
1938 continue;
1939
1940 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1941 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1942 continue;
1943
1944 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1945
1946 type_thrown = NULL_TREE;
1947 if (region->type == ERT_THROW)
1948 {
1949 type_thrown = region->u.throw.type;
1950 region = region->outer;
1951 }
1952
1953 /* Find the first containing region that might handle the exception.
1954 That's the landing pad to which we will transfer control. */
1955 for (; region; region = region->outer)
1956 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1957 break;
1958
1959 if (region)
1960 {
1961 lp_info[region->region_number].directly_reachable = 1;
1962 found_one = true;
1963 }
1964 }
1965
1966 return found_one;
1967 }
1968
1969 static void
1970 sjlj_assign_call_site_values (dispatch_label, lp_info)
1971 rtx dispatch_label;
1972 struct sjlj_lp_info *lp_info;
1973 {
1974 htab_t ar_hash;
1975 int i, index;
1976
1977 /* First task: build the action table. */
1978
1979 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1980 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1981
1982 for (i = cfun->eh->last_region_number; i > 0; --i)
1983 if (lp_info[i].directly_reachable)
1984 {
1985 struct eh_region *r = cfun->eh->region_array[i];
1986 r->landing_pad = dispatch_label;
1987 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1988 if (lp_info[i].action_index != -1)
1989 cfun->uses_eh_lsda = 1;
1990 }
1991
1992 htab_delete (ar_hash);
1993
1994 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1995 landing pad label for the region. For sjlj though, there is one
1996 common landing pad from which we dispatch to the post-landing pads.
1997
1998 A region receives a dispatch index if it is directly reachable
1999 and requires in-function processing. Regions that share post-landing
2000 pads may share dispatch indicies. */
2001 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2002 (see build_post_landing_pads) so we don't bother checking for it. */
2003
2004 index = 0;
2005 for (i = cfun->eh->last_region_number; i > 0; --i)
2006 if (lp_info[i].directly_reachable
2007 && lp_info[i].action_index >= 0)
2008 lp_info[i].dispatch_index = index++;
2009
2010 /* Finally: assign call-site values. If dwarf2 terms, this would be
2011 the region number assigned by convert_to_eh_region_ranges, but
2012 handles no-action and must-not-throw differently. */
2013
2014 call_site_base = 1;
2015 for (i = cfun->eh->last_region_number; i > 0; --i)
2016 if (lp_info[i].directly_reachable)
2017 {
2018 int action = lp_info[i].action_index;
2019
2020 /* Map must-not-throw to otherwise unused call-site index 0. */
2021 if (action == -2)
2022 index = 0;
2023 /* Map no-action to otherwise unused call-site index -1. */
2024 else if (action == -1)
2025 index = -1;
2026 /* Otherwise, look it up in the table. */
2027 else
2028 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2029
2030 lp_info[i].call_site_index = index;
2031 }
2032 }
2033
2034 static void
2035 sjlj_mark_call_sites (lp_info)
2036 struct sjlj_lp_info *lp_info;
2037 {
2038 int last_call_site = -2;
2039 rtx insn, mem;
2040
2041 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2042 sjlj_fc_call_site_ofs);
2043
2044 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2045 {
2046 struct eh_region *region;
2047 int this_call_site;
2048 rtx note, before, p;
2049
2050 /* Reset value tracking at extended basic block boundaries. */
2051 if (GET_CODE (insn) == CODE_LABEL)
2052 last_call_site = -2;
2053
2054 if (! INSN_P (insn))
2055 continue;
2056
2057 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2058 if (!note)
2059 {
2060 /* Calls (and trapping insns) without notes are outside any
2061 exception handling region in this function. Mark them as
2062 no action. */
2063 if (GET_CODE (insn) == CALL_INSN
2064 || (flag_non_call_exceptions
2065 && may_trap_p (PATTERN (insn))))
2066 this_call_site = -1;
2067 else
2068 continue;
2069 }
2070 else
2071 {
2072 /* Calls that are known to not throw need not be marked. */
2073 if (INTVAL (XEXP (note, 0)) <= 0)
2074 continue;
2075
2076 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2077 this_call_site = lp_info[region->region_number].call_site_index;
2078 }
2079
2080 if (this_call_site == last_call_site)
2081 continue;
2082
2083 /* Don't separate a call from it's argument loads. */
2084 before = insn;
2085 if (GET_CODE (insn) == CALL_INSN)
2086 before = find_first_parameter_load (insn, NULL_RTX);
2087
2088 start_sequence ();
2089 emit_move_insn (mem, GEN_INT (this_call_site));
2090 p = get_insns ();
2091 end_sequence ();
2092
2093 emit_insns_before (p, before);
2094 last_call_site = this_call_site;
2095 }
2096 }
2097
2098 /* Construct the SjLj_Function_Context. */
2099
2100 static void
2101 sjlj_emit_function_enter (dispatch_label)
2102 rtx dispatch_label;
2103 {
2104 rtx fn_begin, fc, mem, seq;
2105
2106 fc = cfun->eh->sjlj_fc;
2107
2108 start_sequence ();
2109
2110 /* We're storing this libcall's address into memory instead of
2111 calling it directly. Thus, we must call assemble_external_libcall
2112 here, as we can not depend on emit_library_call to do it for us. */
2113 assemble_external_libcall (eh_personality_libfunc);
2114 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2115 emit_move_insn (mem, eh_personality_libfunc);
2116
2117 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2118 if (cfun->uses_eh_lsda)
2119 {
2120 char buf[20];
2121 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2122 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2123 }
2124 else
2125 emit_move_insn (mem, const0_rtx);
2126
2127 #ifdef DONT_USE_BUILTIN_SETJMP
2128 {
2129 rtx x, note;
2130 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2131 TYPE_MODE (integer_type_node), 1,
2132 plus_constant (XEXP (fc, 0),
2133 sjlj_fc_jbuf_ofs), Pmode);
2134
2135 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2136 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2137
2138 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2139 TYPE_MODE (integer_type_node), 0, 0,
2140 dispatch_label);
2141 }
2142 #else
2143 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2144 dispatch_label);
2145 #endif
2146
2147 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2148 1, XEXP (fc, 0), Pmode);
2149
2150 seq = get_insns ();
2151 end_sequence ();
2152
2153 /* ??? Instead of doing this at the beginning of the function,
2154 do this in a block that is at loop level 0 and dominates all
2155 can_throw_internal instructions. */
2156
2157 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2158 if (GET_CODE (fn_begin) == NOTE
2159 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2160 break;
2161 emit_insns_after (seq, fn_begin);
2162 }
2163
2164 /* Call back from expand_function_end to know where we should put
2165 the call to unwind_sjlj_unregister_libfunc if needed. */
2166
2167 void
2168 sjlj_emit_function_exit_after (after)
2169 rtx after;
2170 {
2171 cfun->eh->sjlj_exit_after = after;
2172 }
2173
2174 static void
2175 sjlj_emit_function_exit ()
2176 {
2177 rtx seq;
2178
2179 start_sequence ();
2180
2181 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2182 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2183
2184 seq = get_insns ();
2185 end_sequence ();
2186
2187 /* ??? Really this can be done in any block at loop level 0 that
2188 post-dominates all can_throw_internal instructions. This is
2189 the last possible moment. */
2190
2191 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2192 }
2193
2194 static void
2195 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2196 rtx dispatch_label;
2197 struct sjlj_lp_info *lp_info;
2198 {
2199 int i, first_reachable;
2200 rtx mem, dispatch, seq, fc;
2201
2202 fc = cfun->eh->sjlj_fc;
2203
2204 start_sequence ();
2205
2206 emit_label (dispatch_label);
2207
2208 #ifndef DONT_USE_BUILTIN_SETJMP
2209 expand_builtin_setjmp_receiver (dispatch_label);
2210 #endif
2211
2212 /* Load up dispatch index, exc_ptr and filter values from the
2213 function context. */
2214 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2215 sjlj_fc_call_site_ofs);
2216 dispatch = copy_to_reg (mem);
2217
2218 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2219 if (word_mode != Pmode)
2220 {
2221 #ifdef POINTERS_EXTEND_UNSIGNED
2222 mem = convert_memory_address (Pmode, mem);
2223 #else
2224 mem = convert_to_mode (Pmode, mem, 0);
2225 #endif
2226 }
2227 emit_move_insn (cfun->eh->exc_ptr, mem);
2228
2229 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2230 emit_move_insn (cfun->eh->filter, mem);
2231
2232 /* Jump to one of the directly reachable regions. */
2233 /* ??? This really ought to be using a switch statement. */
2234
2235 first_reachable = 0;
2236 for (i = cfun->eh->last_region_number; i > 0; --i)
2237 {
2238 if (! lp_info[i].directly_reachable
2239 || lp_info[i].action_index < 0)
2240 continue;
2241
2242 if (! first_reachable)
2243 {
2244 first_reachable = i;
2245 continue;
2246 }
2247
2248 emit_cmp_and_jump_insns (dispatch,
2249 GEN_INT (lp_info[i].dispatch_index), EQ,
2250 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2251 cfun->eh->region_array[i]->post_landing_pad);
2252 }
2253
2254 seq = get_insns ();
2255 end_sequence ();
2256
2257 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2258 ->post_landing_pad));
2259 }
2260
2261 static void
2262 sjlj_build_landing_pads ()
2263 {
2264 struct sjlj_lp_info *lp_info;
2265
2266 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2267 sizeof (struct sjlj_lp_info));
2268
2269 if (sjlj_find_directly_reachable_regions (lp_info))
2270 {
2271 rtx dispatch_label = gen_label_rtx ();
2272
2273 cfun->eh->sjlj_fc
2274 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2275 int_size_in_bytes (sjlj_fc_type_node),
2276 TYPE_ALIGN (sjlj_fc_type_node));
2277
2278 sjlj_assign_call_site_values (dispatch_label, lp_info);
2279 sjlj_mark_call_sites (lp_info);
2280
2281 sjlj_emit_function_enter (dispatch_label);
2282 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2283 sjlj_emit_function_exit ();
2284 }
2285
2286 free (lp_info);
2287 }
2288
2289 void
2290 finish_eh_generation ()
2291 {
2292 /* Nothing to do if no regions created. */
2293 if (cfun->eh->region_tree == NULL)
2294 return;
2295
2296 /* The object here is to provide find_basic_blocks with detailed
2297 information (via reachable_handlers) on how exception control
2298 flows within the function. In this first pass, we can include
2299 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2300 regions, and hope that it will be useful in deleting unreachable
2301 handlers. Subsequently, we will generate landing pads which will
2302 connect many of the handlers, and then type information will not
2303 be effective. Still, this is a win over previous implementations. */
2304
2305 rebuild_jump_labels (get_insns ());
2306 find_basic_blocks (get_insns (), max_reg_num (), 0);
2307 cleanup_cfg (CLEANUP_PRE_LOOP);
2308
2309 /* These registers are used by the landing pads. Make sure they
2310 have been generated. */
2311 get_exception_pointer (cfun);
2312 get_exception_filter (cfun);
2313
2314 /* Construct the landing pads. */
2315
2316 assign_filter_values ();
2317 build_post_landing_pads ();
2318 connect_post_landing_pads ();
2319 if (USING_SJLJ_EXCEPTIONS)
2320 sjlj_build_landing_pads ();
2321 else
2322 dw2_build_landing_pads ();
2323
2324 cfun->eh->built_landing_pads = 1;
2325
2326 /* We've totally changed the CFG. Start over. */
2327 find_exception_handler_labels ();
2328 rebuild_jump_labels (get_insns ());
2329 find_basic_blocks (get_insns (), max_reg_num (), 0);
2330 cleanup_cfg (CLEANUP_PRE_LOOP);
2331 }
2332 \f
2333 /* This section handles removing dead code for flow. */
2334
2335 /* Remove LABEL from the exception_handler_labels list. */
2336
2337 static void
2338 remove_exception_handler_label (label)
2339 rtx label;
2340 {
2341 rtx *pl, l;
2342
2343 for (pl = &exception_handler_labels, l = *pl;
2344 XEXP (l, 0) != label;
2345 pl = &XEXP (l, 1), l = *pl)
2346 continue;
2347
2348 *pl = XEXP (l, 1);
2349 free_EXPR_LIST_node (l);
2350 }
2351
2352 /* Splice REGION from the region tree etc. */
2353
2354 static void
2355 remove_eh_handler (region)
2356 struct eh_region *region;
2357 {
2358 struct eh_region **pp, *p;
2359 rtx lab;
2360 int i;
2361
2362 /* For the benefit of efficiently handling REG_EH_REGION notes,
2363 replace this region in the region array with its containing
2364 region. Note that previous region deletions may result in
2365 multiple copies of this region in the array, so we have to
2366 search the whole thing. */
2367 for (i = cfun->eh->last_region_number; i > 0; --i)
2368 if (cfun->eh->region_array[i] == region)
2369 cfun->eh->region_array[i] = region->outer;
2370
2371 if (cfun->eh->built_landing_pads)
2372 lab = region->landing_pad;
2373 else
2374 lab = region->label;
2375 if (lab)
2376 remove_exception_handler_label (lab);
2377
2378 if (region->outer)
2379 pp = &region->outer->inner;
2380 else
2381 pp = &cfun->eh->region_tree;
2382 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2383 continue;
2384
2385 if (region->inner)
2386 {
2387 for (p = region->inner; p->next_peer ; p = p->next_peer)
2388 p->outer = region->outer;
2389 p->next_peer = region->next_peer;
2390 p->outer = region->outer;
2391 *pp = region->inner;
2392 }
2393 else
2394 *pp = region->next_peer;
2395
2396 if (region->type == ERT_CATCH)
2397 {
2398 struct eh_region *try, *next, *prev;
2399
2400 for (try = region->next_peer;
2401 try->type == ERT_CATCH;
2402 try = try->next_peer)
2403 continue;
2404 if (try->type != ERT_TRY)
2405 abort ();
2406
2407 next = region->u.catch.next_catch;
2408 prev = region->u.catch.prev_catch;
2409
2410 if (next)
2411 next->u.catch.prev_catch = prev;
2412 else
2413 try->u.try.last_catch = prev;
2414 if (prev)
2415 prev->u.catch.next_catch = next;
2416 else
2417 {
2418 try->u.try.catch = next;
2419 if (! next)
2420 remove_eh_handler (try);
2421 }
2422 }
2423
2424 free (region);
2425 }
2426
2427 /* LABEL heads a basic block that is about to be deleted. If this
2428 label corresponds to an exception region, we may be able to
2429 delete the region. */
2430
2431 void
2432 maybe_remove_eh_handler (label)
2433 rtx label;
2434 {
2435 int i;
2436
2437 /* ??? After generating landing pads, it's not so simple to determine
2438 if the region data is completely unused. One must examine the
2439 landing pad and the post landing pad, and whether an inner try block
2440 is referencing the catch handlers directly. */
2441 if (cfun->eh->built_landing_pads)
2442 return;
2443
2444 for (i = cfun->eh->last_region_number; i > 0; --i)
2445 {
2446 struct eh_region *region = cfun->eh->region_array[i];
2447 if (region && region->label == label)
2448 {
2449 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2450 because there is no path to the fallback call to terminate.
2451 But the region continues to affect call-site data until there
2452 are no more contained calls, which we don't see here. */
2453 if (region->type == ERT_MUST_NOT_THROW)
2454 {
2455 remove_exception_handler_label (region->label);
2456 region->label = NULL_RTX;
2457 }
2458 else
2459 remove_eh_handler (region);
2460 break;
2461 }
2462 }
2463 }
2464
2465 \f
2466 /* This section describes CFG exception edges for flow. */
2467
2468 /* For communicating between calls to reachable_next_level. */
2469 struct reachable_info
2470 {
2471 tree types_caught;
2472 tree types_allowed;
2473 rtx handlers;
2474 };
2475
2476 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2477 base class of TYPE, is in HANDLED. */
2478
2479 static int
2480 check_handled (handled, type)
2481 tree handled, type;
2482 {
2483 tree t;
2484
2485 /* We can check for exact matches without front-end help. */
2486 if (! lang_eh_type_covers)
2487 {
2488 for (t = handled; t ; t = TREE_CHAIN (t))
2489 if (TREE_VALUE (t) == type)
2490 return 1;
2491 }
2492 else
2493 {
2494 for (t = handled; t ; t = TREE_CHAIN (t))
2495 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2496 return 1;
2497 }
2498
2499 return 0;
2500 }
2501
2502 /* A subroutine of reachable_next_level. If we are collecting a list
2503 of handlers, add one. After landing pad generation, reference
2504 it instead of the handlers themselves. Further, the handlers are
2505 all wired together, so by referencing one, we've got them all.
2506 Before landing pad generation we reference each handler individually.
2507
2508 LP_REGION contains the landing pad; REGION is the handler. */
2509
2510 static void
2511 add_reachable_handler (info, lp_region, region)
2512 struct reachable_info *info;
2513 struct eh_region *lp_region;
2514 struct eh_region *region;
2515 {
2516 if (! info)
2517 return;
2518
2519 if (cfun->eh->built_landing_pads)
2520 {
2521 if (! info->handlers)
2522 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2523 }
2524 else
2525 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2526 }
2527
2528 /* Process one level of exception regions for reachability.
2529 If TYPE_THROWN is non-null, then it is the *exact* type being
2530 propagated. If INFO is non-null, then collect handler labels
2531 and caught/allowed type information between invocations. */
2532
2533 static enum reachable_code
2534 reachable_next_level (region, type_thrown, info)
2535 struct eh_region *region;
2536 tree type_thrown;
2537 struct reachable_info *info;
2538 {
2539 switch (region->type)
2540 {
2541 case ERT_CLEANUP:
2542 /* Before landing-pad generation, we model control flow
2543 directly to the individual handlers. In this way we can
2544 see that catch handler types may shadow one another. */
2545 add_reachable_handler (info, region, region);
2546 return RNL_MAYBE_CAUGHT;
2547
2548 case ERT_TRY:
2549 {
2550 struct eh_region *c;
2551 enum reachable_code ret = RNL_NOT_CAUGHT;
2552
2553 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2554 {
2555 /* A catch-all handler ends the search. */
2556 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2557 to be run as well. */
2558 if (c->u.catch.type == NULL)
2559 {
2560 add_reachable_handler (info, region, c);
2561 return RNL_CAUGHT;
2562 }
2563
2564 if (type_thrown)
2565 {
2566 /* If we have a type match, end the search. */
2567 if (c->u.catch.type == type_thrown
2568 || (lang_eh_type_covers
2569 && (*lang_eh_type_covers) (c->u.catch.type,
2570 type_thrown)))
2571 {
2572 add_reachable_handler (info, region, c);
2573 return RNL_CAUGHT;
2574 }
2575
2576 /* If we have definitive information of a match failure,
2577 the catch won't trigger. */
2578 if (lang_eh_type_covers)
2579 return RNL_NOT_CAUGHT;
2580 }
2581
2582 if (! info)
2583 ret = RNL_MAYBE_CAUGHT;
2584
2585 /* A type must not have been previously caught. */
2586 else if (! check_handled (info->types_caught, c->u.catch.type))
2587 {
2588 add_reachable_handler (info, region, c);
2589 info->types_caught = tree_cons (NULL, c->u.catch.type,
2590 info->types_caught);
2591
2592 /* ??? If the catch type is a base class of every allowed
2593 type, then we know we can stop the search. */
2594 ret = RNL_MAYBE_CAUGHT;
2595 }
2596 }
2597
2598 return ret;
2599 }
2600
2601 case ERT_ALLOWED_EXCEPTIONS:
2602 /* An empty list of types definitely ends the search. */
2603 if (region->u.allowed.type_list == NULL_TREE)
2604 {
2605 add_reachable_handler (info, region, region);
2606 return RNL_CAUGHT;
2607 }
2608
2609 /* Collect a list of lists of allowed types for use in detecting
2610 when a catch may be transformed into a catch-all. */
2611 if (info)
2612 info->types_allowed = tree_cons (NULL_TREE,
2613 region->u.allowed.type_list,
2614 info->types_allowed);
2615
2616 /* If we have definitive information about the type heirarchy,
2617 then we can tell if the thrown type will pass through the
2618 filter. */
2619 if (type_thrown && lang_eh_type_covers)
2620 {
2621 if (check_handled (region->u.allowed.type_list, type_thrown))
2622 return RNL_NOT_CAUGHT;
2623 else
2624 {
2625 add_reachable_handler (info, region, region);
2626 return RNL_CAUGHT;
2627 }
2628 }
2629
2630 add_reachable_handler (info, region, region);
2631 return RNL_MAYBE_CAUGHT;
2632
2633 case ERT_CATCH:
2634 /* Catch regions are handled by their controling try region. */
2635 return RNL_NOT_CAUGHT;
2636
2637 case ERT_MUST_NOT_THROW:
2638 /* Here we end our search, since no exceptions may propagate.
2639 If we've touched down at some landing pad previous, then the
2640 explicit function call we generated may be used. Otherwise
2641 the call is made by the runtime. */
2642 if (info && info->handlers)
2643 {
2644 add_reachable_handler (info, region, region);
2645 return RNL_CAUGHT;
2646 }
2647 else
2648 return RNL_BLOCKED;
2649
2650 case ERT_THROW:
2651 case ERT_FIXUP:
2652 /* Shouldn't see these here. */
2653 break;
2654 }
2655
2656 abort ();
2657 }
2658
2659 /* Retrieve a list of labels of exception handlers which can be
2660 reached by a given insn. */
2661
2662 rtx
2663 reachable_handlers (insn)
2664 rtx insn;
2665 {
2666 struct reachable_info info;
2667 struct eh_region *region;
2668 tree type_thrown;
2669 int region_number;
2670
2671 if (GET_CODE (insn) == JUMP_INSN
2672 && GET_CODE (PATTERN (insn)) == RESX)
2673 region_number = XINT (PATTERN (insn), 0);
2674 else
2675 {
2676 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2677 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2678 return NULL;
2679 region_number = INTVAL (XEXP (note, 0));
2680 }
2681
2682 memset (&info, 0, sizeof (info));
2683
2684 region = cfun->eh->region_array[region_number];
2685
2686 type_thrown = NULL_TREE;
2687 if (GET_CODE (insn) == JUMP_INSN
2688 && GET_CODE (PATTERN (insn)) == RESX)
2689 {
2690 /* A RESX leaves a region instead of entering it. Thus the
2691 region itself may have been deleted out from under us. */
2692 if (region == NULL)
2693 return NULL;
2694 region = region->outer;
2695 }
2696 else if (region->type == ERT_THROW)
2697 {
2698 type_thrown = region->u.throw.type;
2699 region = region->outer;
2700 }
2701
2702 for (; region; region = region->outer)
2703 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2704 break;
2705
2706 return info.handlers;
2707 }
2708
2709 /* Determine if the given INSN can throw an exception that is caught
2710 within the function. */
2711
2712 bool
2713 can_throw_internal (insn)
2714 rtx insn;
2715 {
2716 struct eh_region *region;
2717 tree type_thrown;
2718 rtx note;
2719
2720 if (! INSN_P (insn))
2721 return false;
2722
2723 if (GET_CODE (insn) == INSN
2724 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2725 insn = XVECEXP (PATTERN (insn), 0, 0);
2726
2727 if (GET_CODE (insn) == CALL_INSN
2728 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2729 {
2730 int i;
2731 for (i = 0; i < 3; ++i)
2732 {
2733 rtx sub = XEXP (PATTERN (insn), i);
2734 for (; sub ; sub = NEXT_INSN (sub))
2735 if (can_throw_internal (sub))
2736 return true;
2737 }
2738 return false;
2739 }
2740
2741 /* Every insn that might throw has an EH_REGION note. */
2742 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2743 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2744 return false;
2745
2746 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2747
2748 type_thrown = NULL_TREE;
2749 if (region->type == ERT_THROW)
2750 {
2751 type_thrown = region->u.throw.type;
2752 region = region->outer;
2753 }
2754
2755 /* If this exception is ignored by each and every containing region,
2756 then control passes straight out. The runtime may handle some
2757 regions, which also do not require processing internally. */
2758 for (; region; region = region->outer)
2759 {
2760 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2761 if (how == RNL_BLOCKED)
2762 return false;
2763 if (how != RNL_NOT_CAUGHT)
2764 return true;
2765 }
2766
2767 return false;
2768 }
2769
2770 /* Determine if the given INSN can throw an exception that is
2771 visible outside the function. */
2772
2773 bool
2774 can_throw_external (insn)
2775 rtx insn;
2776 {
2777 struct eh_region *region;
2778 tree type_thrown;
2779 rtx note;
2780
2781 if (! INSN_P (insn))
2782 return false;
2783
2784 if (GET_CODE (insn) == INSN
2785 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2786 insn = XVECEXP (PATTERN (insn), 0, 0);
2787
2788 if (GET_CODE (insn) == CALL_INSN
2789 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2790 {
2791 int i;
2792 for (i = 0; i < 3; ++i)
2793 {
2794 rtx sub = XEXP (PATTERN (insn), i);
2795 for (; sub ; sub = NEXT_INSN (sub))
2796 if (can_throw_external (sub))
2797 return true;
2798 }
2799 return false;
2800 }
2801
2802 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2803 if (!note)
2804 {
2805 /* Calls (and trapping insns) without notes are outside any
2806 exception handling region in this function. We have to
2807 assume it might throw. Given that the front end and middle
2808 ends mark known NOTHROW functions, this isn't so wildly
2809 inaccurate. */
2810 return (GET_CODE (insn) == CALL_INSN
2811 || (flag_non_call_exceptions
2812 && may_trap_p (PATTERN (insn))));
2813 }
2814 if (INTVAL (XEXP (note, 0)) <= 0)
2815 return false;
2816
2817 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2818
2819 type_thrown = NULL_TREE;
2820 if (region->type == ERT_THROW)
2821 {
2822 type_thrown = region->u.throw.type;
2823 region = region->outer;
2824 }
2825
2826 /* If the exception is caught or blocked by any containing region,
2827 then it is not seen by any calling function. */
2828 for (; region ; region = region->outer)
2829 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2830 return false;
2831
2832 return true;
2833 }
2834
2835 /* True if nothing in this function can throw outside this function. */
2836
2837 bool
2838 nothrow_function_p ()
2839 {
2840 rtx insn;
2841
2842 if (! flag_exceptions)
2843 return true;
2844
2845 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2846 if (can_throw_external (insn))
2847 return false;
2848 for (insn = current_function_epilogue_delay_list; insn;
2849 insn = XEXP (insn, 1))
2850 if (can_throw_external (insn))
2851 return false;
2852
2853 return true;
2854 }
2855
2856 \f
2857 /* Various hooks for unwind library. */
2858
2859 /* Do any necessary initialization to access arbitrary stack frames.
2860 On the SPARC, this means flushing the register windows. */
2861
2862 void
2863 expand_builtin_unwind_init ()
2864 {
2865 /* Set this so all the registers get saved in our frame; we need to be
2866 able to copy the saved values for any registers from frames we unwind. */
2867 current_function_has_nonlocal_label = 1;
2868
2869 #ifdef SETUP_FRAME_ADDRESSES
2870 SETUP_FRAME_ADDRESSES ();
2871 #endif
2872 }
2873
2874 rtx
2875 expand_builtin_eh_return_data_regno (arglist)
2876 tree arglist;
2877 {
2878 tree which = TREE_VALUE (arglist);
2879 unsigned HOST_WIDE_INT iwhich;
2880
2881 if (TREE_CODE (which) != INTEGER_CST)
2882 {
2883 error ("argument of `__builtin_eh_return_regno' must be constant");
2884 return constm1_rtx;
2885 }
2886
2887 iwhich = tree_low_cst (which, 1);
2888 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2889 if (iwhich == INVALID_REGNUM)
2890 return constm1_rtx;
2891
2892 #ifdef DWARF_FRAME_REGNUM
2893 iwhich = DWARF_FRAME_REGNUM (iwhich);
2894 #else
2895 iwhich = DBX_REGISTER_NUMBER (iwhich);
2896 #endif
2897
2898 return GEN_INT (iwhich);
2899 }
2900
2901 /* Given a value extracted from the return address register or stack slot,
2902 return the actual address encoded in that value. */
2903
2904 rtx
2905 expand_builtin_extract_return_addr (addr_tree)
2906 tree addr_tree;
2907 {
2908 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2909
2910 /* First mask out any unwanted bits. */
2911 #ifdef MASK_RETURN_ADDR
2912 expand_and (addr, MASK_RETURN_ADDR, addr);
2913 #endif
2914
2915 /* Then adjust to find the real return address. */
2916 #if defined (RETURN_ADDR_OFFSET)
2917 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2918 #endif
2919
2920 return addr;
2921 }
2922
2923 /* Given an actual address in addr_tree, do any necessary encoding
2924 and return the value to be stored in the return address register or
2925 stack slot so the epilogue will return to that address. */
2926
2927 rtx
2928 expand_builtin_frob_return_addr (addr_tree)
2929 tree addr_tree;
2930 {
2931 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2932
2933 #ifdef POINTERS_EXTEND_UNSIGNED
2934 addr = convert_memory_address (Pmode, addr);
2935 #endif
2936
2937 #ifdef RETURN_ADDR_OFFSET
2938 addr = force_reg (Pmode, addr);
2939 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2940 #endif
2941
2942 return addr;
2943 }
2944
2945 /* Set up the epilogue with the magic bits we'll need to return to the
2946 exception handler. */
2947
2948 void
2949 expand_builtin_eh_return (stackadj_tree, handler_tree)
2950 tree stackadj_tree, handler_tree;
2951 {
2952 rtx stackadj, handler;
2953
2954 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2955 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2956
2957 #ifdef POINTERS_EXTEND_UNSIGNED
2958 stackadj = convert_memory_address (Pmode, stackadj);
2959 handler = convert_memory_address (Pmode, handler);
2960 #endif
2961
2962 if (! cfun->eh->ehr_label)
2963 {
2964 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2965 cfun->eh->ehr_handler = copy_to_reg (handler);
2966 cfun->eh->ehr_label = gen_label_rtx ();
2967 }
2968 else
2969 {
2970 if (stackadj != cfun->eh->ehr_stackadj)
2971 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
2972 if (handler != cfun->eh->ehr_handler)
2973 emit_move_insn (cfun->eh->ehr_handler, handler);
2974 }
2975
2976 emit_jump (cfun->eh->ehr_label);
2977 }
2978
2979 void
2980 expand_eh_return ()
2981 {
2982 rtx sa, ra, around_label;
2983
2984 if (! cfun->eh->ehr_label)
2985 return;
2986
2987 sa = EH_RETURN_STACKADJ_RTX;
2988 if (! sa)
2989 {
2990 error ("__builtin_eh_return not supported on this target");
2991 return;
2992 }
2993
2994 current_function_calls_eh_return = 1;
2995
2996 around_label = gen_label_rtx ();
2997 emit_move_insn (sa, const0_rtx);
2998 emit_jump (around_label);
2999
3000 emit_label (cfun->eh->ehr_label);
3001 clobber_return_register ();
3002
3003 #ifdef HAVE_eh_return
3004 if (HAVE_eh_return)
3005 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3006 else
3007 #endif
3008 {
3009 ra = EH_RETURN_HANDLER_RTX;
3010 if (! ra)
3011 {
3012 error ("__builtin_eh_return not supported on this target");
3013 ra = gen_reg_rtx (Pmode);
3014 }
3015
3016 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3017 emit_move_insn (ra, cfun->eh->ehr_handler);
3018 }
3019
3020 emit_label (around_label);
3021 }
3022 \f
3023 struct action_record
3024 {
3025 int offset;
3026 int filter;
3027 int next;
3028 };
3029
3030 static int
3031 action_record_eq (pentry, pdata)
3032 const PTR pentry;
3033 const PTR pdata;
3034 {
3035 const struct action_record *entry = (const struct action_record *) pentry;
3036 const struct action_record *data = (const struct action_record *) pdata;
3037 return entry->filter == data->filter && entry->next == data->next;
3038 }
3039
3040 static hashval_t
3041 action_record_hash (pentry)
3042 const PTR pentry;
3043 {
3044 const struct action_record *entry = (const struct action_record *) pentry;
3045 return entry->next * 1009 + entry->filter;
3046 }
3047
3048 static int
3049 add_action_record (ar_hash, filter, next)
3050 htab_t ar_hash;
3051 int filter, next;
3052 {
3053 struct action_record **slot, *new, tmp;
3054
3055 tmp.filter = filter;
3056 tmp.next = next;
3057 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3058
3059 if ((new = *slot) == NULL)
3060 {
3061 new = (struct action_record *) xmalloc (sizeof (*new));
3062 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3063 new->filter = filter;
3064 new->next = next;
3065 *slot = new;
3066
3067 /* The filter value goes in untouched. The link to the next
3068 record is a "self-relative" byte offset, or zero to indicate
3069 that there is no next record. So convert the absolute 1 based
3070 indicies we've been carrying around into a displacement. */
3071
3072 push_sleb128 (&cfun->eh->action_record_data, filter);
3073 if (next)
3074 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3075 push_sleb128 (&cfun->eh->action_record_data, next);
3076 }
3077
3078 return new->offset;
3079 }
3080
3081 static int
3082 collect_one_action_chain (ar_hash, region)
3083 htab_t ar_hash;
3084 struct eh_region *region;
3085 {
3086 struct eh_region *c;
3087 int next;
3088
3089 /* If we've reached the top of the region chain, then we have
3090 no actions, and require no landing pad. */
3091 if (region == NULL)
3092 return -1;
3093
3094 switch (region->type)
3095 {
3096 case ERT_CLEANUP:
3097 /* A cleanup adds a zero filter to the beginning of the chain, but
3098 there are special cases to look out for. If there are *only*
3099 cleanups along a path, then it compresses to a zero action.
3100 Further, if there are multiple cleanups along a path, we only
3101 need to represent one of them, as that is enough to trigger
3102 entry to the landing pad at runtime. */
3103 next = collect_one_action_chain (ar_hash, region->outer);
3104 if (next <= 0)
3105 return 0;
3106 for (c = region->outer; c ; c = c->outer)
3107 if (c->type == ERT_CLEANUP)
3108 return next;
3109 return add_action_record (ar_hash, 0, next);
3110
3111 case ERT_TRY:
3112 /* Process the associated catch regions in reverse order.
3113 If there's a catch-all handler, then we don't need to
3114 search outer regions. Use a magic -3 value to record
3115 that we havn't done the outer search. */
3116 next = -3;
3117 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3118 {
3119 if (c->u.catch.type == NULL)
3120 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3121 else
3122 {
3123 if (next == -3)
3124 {
3125 next = collect_one_action_chain (ar_hash, region->outer);
3126 if (next < 0)
3127 next = 0;
3128 }
3129 next = add_action_record (ar_hash, c->u.catch.filter, next);
3130 }
3131 }
3132 return next;
3133
3134 case ERT_ALLOWED_EXCEPTIONS:
3135 /* An exception specification adds its filter to the
3136 beginning of the chain. */
3137 next = collect_one_action_chain (ar_hash, region->outer);
3138 return add_action_record (ar_hash, region->u.allowed.filter,
3139 next < 0 ? 0 : next);
3140
3141 case ERT_MUST_NOT_THROW:
3142 /* A must-not-throw region with no inner handlers or cleanups
3143 requires no call-site entry. Note that this differs from
3144 the no handler or cleanup case in that we do require an lsda
3145 to be generated. Return a magic -2 value to record this. */
3146 return -2;
3147
3148 case ERT_CATCH:
3149 case ERT_THROW:
3150 /* CATCH regions are handled in TRY above. THROW regions are
3151 for optimization information only and produce no output. */
3152 return collect_one_action_chain (ar_hash, region->outer);
3153
3154 default:
3155 abort ();
3156 }
3157 }
3158
3159 static int
3160 add_call_site (landing_pad, action)
3161 rtx landing_pad;
3162 int action;
3163 {
3164 struct call_site_record *data = cfun->eh->call_site_data;
3165 int used = cfun->eh->call_site_data_used;
3166 int size = cfun->eh->call_site_data_size;
3167
3168 if (used >= size)
3169 {
3170 size = (size ? size * 2 : 64);
3171 data = (struct call_site_record *)
3172 xrealloc (data, sizeof (*data) * size);
3173 cfun->eh->call_site_data = data;
3174 cfun->eh->call_site_data_size = size;
3175 }
3176
3177 data[used].landing_pad = landing_pad;
3178 data[used].action = action;
3179
3180 cfun->eh->call_site_data_used = used + 1;
3181
3182 return used + call_site_base;
3183 }
3184
3185 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3186 The new note numbers will not refer to region numbers, but
3187 instead to call site entries. */
3188
3189 void
3190 convert_to_eh_region_ranges ()
3191 {
3192 rtx insn, iter, note;
3193 htab_t ar_hash;
3194 int last_action = -3;
3195 rtx last_action_insn = NULL_RTX;
3196 rtx last_landing_pad = NULL_RTX;
3197 rtx first_no_action_insn = NULL_RTX;
3198 int call_site = 0;
3199
3200 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3201 return;
3202
3203 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3204
3205 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3206
3207 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3208 if (INSN_P (iter))
3209 {
3210 struct eh_region *region;
3211 int this_action;
3212 rtx this_landing_pad;
3213
3214 insn = iter;
3215 if (GET_CODE (insn) == INSN
3216 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3217 insn = XVECEXP (PATTERN (insn), 0, 0);
3218
3219 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3220 if (!note)
3221 {
3222 if (! (GET_CODE (insn) == CALL_INSN
3223 || (flag_non_call_exceptions
3224 && may_trap_p (PATTERN (insn)))))
3225 continue;
3226 this_action = -1;
3227 region = NULL;
3228 }
3229 else
3230 {
3231 if (INTVAL (XEXP (note, 0)) <= 0)
3232 continue;
3233 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3234 this_action = collect_one_action_chain (ar_hash, region);
3235 }
3236
3237 /* Existence of catch handlers, or must-not-throw regions
3238 implies that an lsda is needed (even if empty). */
3239 if (this_action != -1)
3240 cfun->uses_eh_lsda = 1;
3241
3242 /* Delay creation of region notes for no-action regions
3243 until we're sure that an lsda will be required. */
3244 else if (last_action == -3)
3245 {
3246 first_no_action_insn = iter;
3247 last_action = -1;
3248 }
3249
3250 /* Cleanups and handlers may share action chains but not
3251 landing pads. Collect the landing pad for this region. */
3252 if (this_action >= 0)
3253 {
3254 struct eh_region *o;
3255 for (o = region; ! o->landing_pad ; o = o->outer)
3256 continue;
3257 this_landing_pad = o->landing_pad;
3258 }
3259 else
3260 this_landing_pad = NULL_RTX;
3261
3262 /* Differing actions or landing pads implies a change in call-site
3263 info, which implies some EH_REGION note should be emitted. */
3264 if (last_action != this_action
3265 || last_landing_pad != this_landing_pad)
3266 {
3267 /* If we'd not seen a previous action (-3) or the previous
3268 action was must-not-throw (-2), then we do not need an
3269 end note. */
3270 if (last_action >= -1)
3271 {
3272 /* If we delayed the creation of the begin, do it now. */
3273 if (first_no_action_insn)
3274 {
3275 call_site = add_call_site (NULL_RTX, 0);
3276 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3277 first_no_action_insn);
3278 NOTE_EH_HANDLER (note) = call_site;
3279 first_no_action_insn = NULL_RTX;
3280 }
3281
3282 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3283 last_action_insn);
3284 NOTE_EH_HANDLER (note) = call_site;
3285 }
3286
3287 /* If the new action is must-not-throw, then no region notes
3288 are created. */
3289 if (this_action >= -1)
3290 {
3291 call_site = add_call_site (this_landing_pad,
3292 this_action < 0 ? 0 : this_action);
3293 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3294 NOTE_EH_HANDLER (note) = call_site;
3295 }
3296
3297 last_action = this_action;
3298 last_landing_pad = this_landing_pad;
3299 }
3300 last_action_insn = iter;
3301 }
3302
3303 if (last_action >= -1 && ! first_no_action_insn)
3304 {
3305 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3306 NOTE_EH_HANDLER (note) = call_site;
3307 }
3308
3309 htab_delete (ar_hash);
3310 }
3311
3312 \f
3313 static void
3314 push_uleb128 (data_area, value)
3315 varray_type *data_area;
3316 unsigned int value;
3317 {
3318 do
3319 {
3320 unsigned char byte = value & 0x7f;
3321 value >>= 7;
3322 if (value)
3323 byte |= 0x80;
3324 VARRAY_PUSH_UCHAR (*data_area, byte);
3325 }
3326 while (value);
3327 }
3328
3329 static void
3330 push_sleb128 (data_area, value)
3331 varray_type *data_area;
3332 int value;
3333 {
3334 unsigned char byte;
3335 int more;
3336
3337 do
3338 {
3339 byte = value & 0x7f;
3340 value >>= 7;
3341 more = ! ((value == 0 && (byte & 0x40) == 0)
3342 || (value == -1 && (byte & 0x40) != 0));
3343 if (more)
3344 byte |= 0x80;
3345 VARRAY_PUSH_UCHAR (*data_area, byte);
3346 }
3347 while (more);
3348 }
3349
3350 \f
3351 #ifndef HAVE_AS_LEB128
3352 static int
3353 dw2_size_of_call_site_table ()
3354 {
3355 int n = cfun->eh->call_site_data_used;
3356 int size = n * (4 + 4 + 4);
3357 int i;
3358
3359 for (i = 0; i < n; ++i)
3360 {
3361 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3362 size += size_of_uleb128 (cs->action);
3363 }
3364
3365 return size;
3366 }
3367
3368 static int
3369 sjlj_size_of_call_site_table ()
3370 {
3371 int n = cfun->eh->call_site_data_used;
3372 int size = 0;
3373 int i;
3374
3375 for (i = 0; i < n; ++i)
3376 {
3377 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3378 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3379 size += size_of_uleb128 (cs->action);
3380 }
3381
3382 return size;
3383 }
3384 #endif
3385
3386 static void
3387 dw2_output_call_site_table ()
3388 {
3389 const char *function_start_lab
3390 = IDENTIFIER_POINTER (current_function_func_begin_label);
3391 int n = cfun->eh->call_site_data_used;
3392 int i;
3393
3394 for (i = 0; i < n; ++i)
3395 {
3396 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3397 char reg_start_lab[32];
3398 char reg_end_lab[32];
3399 char landing_pad_lab[32];
3400
3401 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3402 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3403
3404 if (cs->landing_pad)
3405 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3406 CODE_LABEL_NUMBER (cs->landing_pad));
3407
3408 /* ??? Perhaps use insn length scaling if the assembler supports
3409 generic arithmetic. */
3410 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3411 data4 if the function is small enough. */
3412 #ifdef HAVE_AS_LEB128
3413 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3414 "region %d start", i);
3415 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3416 "length");
3417 if (cs->landing_pad)
3418 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3419 "landing pad");
3420 else
3421 dw2_asm_output_data_uleb128 (0, "landing pad");
3422 #else
3423 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3424 "region %d start", i);
3425 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3426 if (cs->landing_pad)
3427 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3428 "landing pad");
3429 else
3430 dw2_asm_output_data (4, 0, "landing pad");
3431 #endif
3432 dw2_asm_output_data_uleb128 (cs->action, "action");
3433 }
3434
3435 call_site_base += n;
3436 }
3437
3438 static void
3439 sjlj_output_call_site_table ()
3440 {
3441 int n = cfun->eh->call_site_data_used;
3442 int i;
3443
3444 for (i = 0; i < n; ++i)
3445 {
3446 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3447
3448 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3449 "region %d landing pad", i);
3450 dw2_asm_output_data_uleb128 (cs->action, "action");
3451 }
3452
3453 call_site_base += n;
3454 }
3455
3456 void
3457 output_function_exception_table ()
3458 {
3459 int tt_format, cs_format, lp_format, i, n;
3460 #ifdef HAVE_AS_LEB128
3461 char ttype_label[32];
3462 char cs_after_size_label[32];
3463 char cs_end_label[32];
3464 #else
3465 int call_site_len;
3466 #endif
3467 int have_tt_data;
3468 int funcdef_number;
3469 int tt_format_size = 0;
3470
3471 /* Not all functions need anything. */
3472 if (! cfun->uses_eh_lsda)
3473 return;
3474
3475 funcdef_number = (USING_SJLJ_EXCEPTIONS
3476 ? sjlj_funcdef_number
3477 : current_funcdef_number);
3478
3479 #ifdef IA64_UNWIND_INFO
3480 fputs ("\t.personality\t", asm_out_file);
3481 output_addr_const (asm_out_file, eh_personality_libfunc);
3482 fputs ("\n\t.handlerdata\n", asm_out_file);
3483 /* Note that varasm still thinks we're in the function's code section.
3484 The ".endp" directive that will immediately follow will take us back. */
3485 #else
3486 exception_section ();
3487 #endif
3488
3489 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3490 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3491
3492 /* Indicate the format of the @TType entries. */
3493 if (! have_tt_data)
3494 tt_format = DW_EH_PE_omit;
3495 else
3496 {
3497 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3498 #ifdef HAVE_AS_LEB128
3499 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3500 #endif
3501 tt_format_size = size_of_encoded_value (tt_format);
3502
3503 assemble_align (tt_format_size * BITS_PER_UNIT);
3504 }
3505
3506 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3507
3508 /* The LSDA header. */
3509
3510 /* Indicate the format of the landing pad start pointer. An omitted
3511 field implies @LPStart == @Start. */
3512 /* Currently we always put @LPStart == @Start. This field would
3513 be most useful in moving the landing pads completely out of
3514 line to another section, but it could also be used to minimize
3515 the size of uleb128 landing pad offsets. */
3516 lp_format = DW_EH_PE_omit;
3517 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3518 eh_data_format_name (lp_format));
3519
3520 /* @LPStart pointer would go here. */
3521
3522 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3523 eh_data_format_name (tt_format));
3524
3525 #ifndef HAVE_AS_LEB128
3526 if (USING_SJLJ_EXCEPTIONS)
3527 call_site_len = sjlj_size_of_call_site_table ();
3528 else
3529 call_site_len = dw2_size_of_call_site_table ();
3530 #endif
3531
3532 /* A pc-relative 4-byte displacement to the @TType data. */
3533 if (have_tt_data)
3534 {
3535 #ifdef HAVE_AS_LEB128
3536 char ttype_after_disp_label[32];
3537 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3538 funcdef_number);
3539 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3540 "@TType base offset");
3541 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3542 #else
3543 /* Ug. Alignment queers things. */
3544 unsigned int before_disp, after_disp, last_disp, disp;
3545
3546 before_disp = 1 + 1;
3547 after_disp = (1 + size_of_uleb128 (call_site_len)
3548 + call_site_len
3549 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3550 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3551 * tt_format_size));
3552
3553 disp = after_disp;
3554 do
3555 {
3556 unsigned int disp_size, pad;
3557
3558 last_disp = disp;
3559 disp_size = size_of_uleb128 (disp);
3560 pad = before_disp + disp_size + after_disp;
3561 if (pad % tt_format_size)
3562 pad = tt_format_size - (pad % tt_format_size);
3563 else
3564 pad = 0;
3565 disp = after_disp + pad;
3566 }
3567 while (disp != last_disp);
3568
3569 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3570 #endif
3571 }
3572
3573 /* Indicate the format of the call-site offsets. */
3574 #ifdef HAVE_AS_LEB128
3575 cs_format = DW_EH_PE_uleb128;
3576 #else
3577 cs_format = DW_EH_PE_udata4;
3578 #endif
3579 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3580 eh_data_format_name (cs_format));
3581
3582 #ifdef HAVE_AS_LEB128
3583 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3584 funcdef_number);
3585 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3586 funcdef_number);
3587 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3588 "Call-site table length");
3589 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3590 if (USING_SJLJ_EXCEPTIONS)
3591 sjlj_output_call_site_table ();
3592 else
3593 dw2_output_call_site_table ();
3594 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3595 #else
3596 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3597 if (USING_SJLJ_EXCEPTIONS)
3598 sjlj_output_call_site_table ();
3599 else
3600 dw2_output_call_site_table ();
3601 #endif
3602
3603 /* ??? Decode and interpret the data for flag_debug_asm. */
3604 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3605 for (i = 0; i < n; ++i)
3606 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3607 (i ? NULL : "Action record table"));
3608
3609 if (have_tt_data)
3610 assemble_align (tt_format_size * BITS_PER_UNIT);
3611
3612 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3613 while (i-- > 0)
3614 {
3615 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3616
3617 if (type == NULL_TREE)
3618 type = integer_zero_node;
3619 else
3620 type = lookup_type_for_runtime (type);
3621
3622 dw2_asm_output_encoded_addr_rtx (tt_format,
3623 expand_expr (type, NULL_RTX, VOIDmode,
3624 EXPAND_INITIALIZER),
3625 NULL);
3626 }
3627
3628 #ifdef HAVE_AS_LEB128
3629 if (have_tt_data)
3630 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3631 #endif
3632
3633 /* ??? Decode and interpret the data for flag_debug_asm. */
3634 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3635 for (i = 0; i < n; ++i)
3636 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3637 (i ? NULL : "Exception specification table"));
3638
3639 function_section (current_function_decl);
3640
3641 if (USING_SJLJ_EXCEPTIONS)
3642 sjlj_funcdef_number += 1;
3643 }