* config/alpha/x-vms (version): Change "." to "_".
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "rtl.h"
53 #include "tree.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "insn-config.h"
59 #include "except.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
63 #include "output.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
66 #include "dwarf2.h"
67 #include "toplev.h"
68 #include "hashtab.h"
69 #include "intl.h"
70 #include "ggc.h"
71 #include "tm_p.h"
72 #include "target.h"
73
74 /* Provide defaults for stuff that may not be defined when using
75 sjlj exceptions. */
76 #ifndef EH_RETURN_STACKADJ_RTX
77 #define EH_RETURN_STACKADJ_RTX 0
78 #endif
79 #ifndef EH_RETURN_HANDLER_RTX
80 #define EH_RETURN_HANDLER_RTX 0
81 #endif
82 #ifndef EH_RETURN_DATA_REGNO
83 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
84 #endif
85
86
87 /* Nonzero means enable synchronous exceptions for non-call instructions. */
88 int flag_non_call_exceptions;
89
90 /* Protect cleanup actions with must-not-throw regions, with a call
91 to the given failure handler. */
92 tree (*lang_protect_cleanup_actions) PARAMS ((void));
93
94 /* Return true if type A catches type B. */
95 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
96
97 /* Map a type to a runtime object to match type. */
98 tree (*lang_eh_runtime_type) PARAMS ((tree));
99
100 /* A list of labels used for exception handlers. */
101 rtx exception_handler_labels;
102
103 static int call_site_base;
104 static unsigned int sjlj_funcdef_number;
105 static htab_t type_to_runtime_map;
106
107 /* Describe the SjLj_Function_Context structure. */
108 static tree sjlj_fc_type_node;
109 static int sjlj_fc_call_site_ofs;
110 static int sjlj_fc_data_ofs;
111 static int sjlj_fc_personality_ofs;
112 static int sjlj_fc_lsda_ofs;
113 static int sjlj_fc_jbuf_ofs;
114 \f
115 /* Describes one exception region. */
116 struct eh_region
117 {
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
120
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
124
125 /* An identifier for this region. */
126 int region_number;
127
128 /* Each region does exactly one thing. */
129 enum eh_region_type
130 {
131 ERT_UNKNOWN = 0,
132 ERT_CLEANUP,
133 ERT_TRY,
134 ERT_CATCH,
135 ERT_ALLOWED_EXCEPTIONS,
136 ERT_MUST_NOT_THROW,
137 ERT_THROW,
138 ERT_FIXUP
139 } type;
140
141 /* Holds the action to perform based on the preceding type. */
142 union {
143 /* A list of catch blocks, a surrounding try block,
144 and the label for continuing after a catch. */
145 struct {
146 struct eh_region *catch;
147 struct eh_region *last_catch;
148 struct eh_region *prev_try;
149 rtx continue_label;
150 } try;
151
152 /* The list through the catch handlers, the list of type objects
153 matched, and the list of associated filters. */
154 struct {
155 struct eh_region *next_catch;
156 struct eh_region *prev_catch;
157 tree type_list;
158 tree filter_list;
159 } catch;
160
161 /* A tree_list of allowed types. */
162 struct {
163 tree type_list;
164 int filter;
165 } allowed;
166
167 /* The type given by a call to "throw foo();", or discovered
168 for a throw. */
169 struct {
170 tree type;
171 } throw;
172
173 /* Retain the cleanup expression even after expansion so that
174 we can match up fixup regions. */
175 struct {
176 tree exp;
177 } cleanup;
178
179 /* The real region (by expression and by pointer) that fixup code
180 should live in. */
181 struct {
182 tree cleanup_exp;
183 struct eh_region *real_region;
184 } fixup;
185 } u;
186
187 /* Entry point for this region's handler before landing pads are built. */
188 rtx label;
189
190 /* Entry point for this region's handler from the runtime eh library. */
191 rtx landing_pad;
192
193 /* Entry point for this region's handler from an inner region. */
194 rtx post_landing_pad;
195
196 /* The RESX insn for handing off control to the next outermost handler,
197 if appropriate. */
198 rtx resume;
199 };
200
201 /* Used to save exception status for each function. */
202 struct eh_status
203 {
204 /* The tree of all regions for this function. */
205 struct eh_region *region_tree;
206
207 /* The same information as an indexable array. */
208 struct eh_region **region_array;
209
210 /* The most recently open region. */
211 struct eh_region *cur_region;
212
213 /* This is the region for which we are processing catch blocks. */
214 struct eh_region *try_region;
215
216 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
217 node is itself a TREE_CHAINed list of handlers for regions that
218 are not yet closed. The TREE_VALUE of each entry contains the
219 handler for the corresponding entry on the ehstack. */
220 tree protect_list;
221
222 rtx filter;
223 rtx exc_ptr;
224
225 int built_landing_pads;
226 int last_region_number;
227
228 varray_type ttype_data;
229 varray_type ehspec_data;
230 varray_type action_record_data;
231
232 struct call_site_record
233 {
234 rtx landing_pad;
235 int action;
236 } *call_site_data;
237 int call_site_data_used;
238 int call_site_data_size;
239
240 rtx ehr_stackadj;
241 rtx ehr_handler;
242 rtx ehr_label;
243
244 rtx sjlj_fc;
245 rtx sjlj_exit_after;
246 };
247
248 \f
249 static void mark_eh_region PARAMS ((struct eh_region *));
250
251 static int t2r_eq PARAMS ((const PTR,
252 const PTR));
253 static hashval_t t2r_hash PARAMS ((const PTR));
254 static int t2r_mark_1 PARAMS ((PTR *, PTR));
255 static void t2r_mark PARAMS ((PTR));
256 static void add_type_for_runtime PARAMS ((tree));
257 static tree lookup_type_for_runtime PARAMS ((tree));
258
259 static struct eh_region *expand_eh_region_end PARAMS ((void));
260
261 static rtx get_exception_filter PARAMS ((struct function *));
262
263 static void collect_eh_region_array PARAMS ((void));
264 static void resolve_fixup_regions PARAMS ((void));
265 static void remove_fixup_regions PARAMS ((void));
266 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
267
268 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
269 struct inline_remap *));
270 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
271 struct eh_region **));
272 static int ttypes_filter_eq PARAMS ((const PTR,
273 const PTR));
274 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
275 static int ehspec_filter_eq PARAMS ((const PTR,
276 const PTR));
277 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
278 static int add_ttypes_entry PARAMS ((htab_t, tree));
279 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
280 tree));
281 static void assign_filter_values PARAMS ((void));
282 static void build_post_landing_pads PARAMS ((void));
283 static void connect_post_landing_pads PARAMS ((void));
284 static void dw2_build_landing_pads PARAMS ((void));
285
286 struct sjlj_lp_info;
287 static bool sjlj_find_directly_reachable_regions
288 PARAMS ((struct sjlj_lp_info *));
289 static void sjlj_assign_call_site_values
290 PARAMS ((rtx, struct sjlj_lp_info *));
291 static void sjlj_mark_call_sites
292 PARAMS ((struct sjlj_lp_info *));
293 static void sjlj_emit_function_enter PARAMS ((rtx));
294 static void sjlj_emit_function_exit PARAMS ((void));
295 static void sjlj_emit_dispatch_table
296 PARAMS ((rtx, struct sjlj_lp_info *));
297 static void sjlj_build_landing_pads PARAMS ((void));
298
299 static void remove_exception_handler_label PARAMS ((rtx));
300 static void remove_eh_handler PARAMS ((struct eh_region *));
301
302 struct reachable_info;
303
304 /* The return value of reachable_next_level. */
305 enum reachable_code
306 {
307 /* The given exception is not processed by the given region. */
308 RNL_NOT_CAUGHT,
309 /* The given exception may need processing by the given region. */
310 RNL_MAYBE_CAUGHT,
311 /* The given exception is completely processed by the given region. */
312 RNL_CAUGHT,
313 /* The given exception is completely processed by the runtime. */
314 RNL_BLOCKED
315 };
316
317 static int check_handled PARAMS ((tree, tree));
318 static void add_reachable_handler
319 PARAMS ((struct reachable_info *, struct eh_region *,
320 struct eh_region *));
321 static enum reachable_code reachable_next_level
322 PARAMS ((struct eh_region *, tree, struct reachable_info *));
323
324 static int action_record_eq PARAMS ((const PTR,
325 const PTR));
326 static hashval_t action_record_hash PARAMS ((const PTR));
327 static int add_action_record PARAMS ((htab_t, int, int));
328 static int collect_one_action_chain PARAMS ((htab_t,
329 struct eh_region *));
330 static int add_call_site PARAMS ((rtx, int));
331
332 static void push_uleb128 PARAMS ((varray_type *,
333 unsigned int));
334 static void push_sleb128 PARAMS ((varray_type *, int));
335 #ifndef HAVE_AS_LEB128
336 static int dw2_size_of_call_site_table PARAMS ((void));
337 static int sjlj_size_of_call_site_table PARAMS ((void));
338 #endif
339 static void dw2_output_call_site_table PARAMS ((void));
340 static void sjlj_output_call_site_table PARAMS ((void));
341
342 \f
343 /* Routine to see if exception handling is turned on.
344 DO_WARN is non-zero if we want to inform the user that exception
345 handling is turned off.
346
347 This is used to ensure that -fexceptions has been specified if the
348 compiler tries to use any exception-specific functions. */
349
350 int
351 doing_eh (do_warn)
352 int do_warn;
353 {
354 if (! flag_exceptions)
355 {
356 static int warned = 0;
357 if (! warned && do_warn)
358 {
359 error ("exception handling disabled, use -fexceptions to enable");
360 warned = 1;
361 }
362 return 0;
363 }
364 return 1;
365 }
366
367 \f
368 void
369 init_eh ()
370 {
371 ggc_add_rtx_root (&exception_handler_labels, 1);
372
373 if (! flag_exceptions)
374 return;
375
376 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
377 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
378
379 /* Create the SjLj_Function_Context structure. This should match
380 the definition in unwind-sjlj.c. */
381 if (USING_SJLJ_EXCEPTIONS)
382 {
383 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
384
385 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
386 ggc_add_tree_root (&sjlj_fc_type_node, 1);
387
388 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
389 build_pointer_type (sjlj_fc_type_node));
390 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
391
392 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
393 integer_type_node);
394 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
395
396 tmp = build_index_type (build_int_2 (4 - 1, 0));
397 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
398 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
399 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
400
401 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
402 ptr_type_node);
403 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
404
405 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
406 ptr_type_node);
407 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
408
409 #ifdef DONT_USE_BUILTIN_SETJMP
410 #ifdef JMP_BUF_SIZE
411 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
412 #else
413 /* Should be large enough for most systems, if it is not,
414 JMP_BUF_SIZE should be defined with the proper value. It will
415 also tend to be larger than necessary for most systems, a more
416 optimal port will define JMP_BUF_SIZE. */
417 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
418 #endif
419 #else
420 /* This is 2 for builtin_setjmp, plus whatever the target requires
421 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
422 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
423 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
424 #endif
425 tmp = build_index_type (tmp);
426 tmp = build_array_type (ptr_type_node, tmp);
427 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
428 #ifdef DONT_USE_BUILTIN_SETJMP
429 /* We don't know what the alignment requirements of the
430 runtime's jmp_buf has. Overestimate. */
431 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
432 DECL_USER_ALIGN (f_jbuf) = 1;
433 #endif
434 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
435
436 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
437 TREE_CHAIN (f_prev) = f_cs;
438 TREE_CHAIN (f_cs) = f_data;
439 TREE_CHAIN (f_data) = f_per;
440 TREE_CHAIN (f_per) = f_lsda;
441 TREE_CHAIN (f_lsda) = f_jbuf;
442
443 layout_type (sjlj_fc_type_node);
444
445 /* Cache the interesting field offsets so that we have
446 easy access from rtl. */
447 sjlj_fc_call_site_ofs
448 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
449 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
450 sjlj_fc_data_ofs
451 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
452 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
453 sjlj_fc_personality_ofs
454 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
455 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
456 sjlj_fc_lsda_ofs
457 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
458 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
459 sjlj_fc_jbuf_ofs
460 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
461 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
462 }
463 }
464
465 void
466 init_eh_for_function ()
467 {
468 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
469 }
470
471 /* Mark EH for GC. */
472
473 static void
474 mark_eh_region (region)
475 struct eh_region *region;
476 {
477 if (! region)
478 return;
479
480 switch (region->type)
481 {
482 case ERT_UNKNOWN:
483 /* This can happen if a nested function is inside the body of a region
484 and we do a GC as part of processing it. */
485 break;
486 case ERT_CLEANUP:
487 ggc_mark_tree (region->u.cleanup.exp);
488 break;
489 case ERT_TRY:
490 ggc_mark_rtx (region->u.try.continue_label);
491 break;
492 case ERT_CATCH:
493 ggc_mark_tree (region->u.catch.type_list);
494 ggc_mark_tree (region->u.catch.filter_list);
495 break;
496 case ERT_ALLOWED_EXCEPTIONS:
497 ggc_mark_tree (region->u.allowed.type_list);
498 break;
499 case ERT_MUST_NOT_THROW:
500 break;
501 case ERT_THROW:
502 ggc_mark_tree (region->u.throw.type);
503 break;
504 case ERT_FIXUP:
505 ggc_mark_tree (region->u.fixup.cleanup_exp);
506 break;
507 default:
508 abort ();
509 }
510
511 ggc_mark_rtx (region->label);
512 ggc_mark_rtx (region->resume);
513 ggc_mark_rtx (region->landing_pad);
514 ggc_mark_rtx (region->post_landing_pad);
515 }
516
517 void
518 mark_eh_status (eh)
519 struct eh_status *eh;
520 {
521 int i;
522
523 if (eh == 0)
524 return;
525
526 /* If we've called collect_eh_region_array, use it. Otherwise walk
527 the tree non-recursively. */
528 if (eh->region_array)
529 {
530 for (i = eh->last_region_number; i > 0; --i)
531 {
532 struct eh_region *r = eh->region_array[i];
533 if (r && r->region_number == i)
534 mark_eh_region (r);
535 }
536 }
537 else if (eh->region_tree)
538 {
539 struct eh_region *r = eh->region_tree;
540 while (1)
541 {
542 mark_eh_region (r);
543 if (r->inner)
544 r = r->inner;
545 else if (r->next_peer)
546 r = r->next_peer;
547 else
548 {
549 do {
550 r = r->outer;
551 if (r == NULL)
552 goto tree_done;
553 } while (r->next_peer == NULL);
554 r = r->next_peer;
555 }
556 }
557 tree_done:;
558 }
559
560 ggc_mark_tree (eh->protect_list);
561 ggc_mark_rtx (eh->filter);
562 ggc_mark_rtx (eh->exc_ptr);
563 ggc_mark_tree_varray (eh->ttype_data);
564
565 if (eh->call_site_data)
566 {
567 for (i = eh->call_site_data_used - 1; i >= 0; --i)
568 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
569 }
570
571 ggc_mark_rtx (eh->ehr_stackadj);
572 ggc_mark_rtx (eh->ehr_handler);
573 ggc_mark_rtx (eh->ehr_label);
574
575 ggc_mark_rtx (eh->sjlj_fc);
576 ggc_mark_rtx (eh->sjlj_exit_after);
577 }
578
579 void
580 free_eh_status (f)
581 struct function *f;
582 {
583 struct eh_status *eh = f->eh;
584
585 if (eh->region_array)
586 {
587 int i;
588 for (i = eh->last_region_number; i > 0; --i)
589 {
590 struct eh_region *r = eh->region_array[i];
591 /* Mind we don't free a region struct more than once. */
592 if (r && r->region_number == i)
593 free (r);
594 }
595 free (eh->region_array);
596 }
597 else if (eh->region_tree)
598 {
599 struct eh_region *next, *r = eh->region_tree;
600 while (1)
601 {
602 if (r->inner)
603 r = r->inner;
604 else if (r->next_peer)
605 {
606 next = r->next_peer;
607 free (r);
608 r = next;
609 }
610 else
611 {
612 do {
613 next = r->outer;
614 free (r);
615 r = next;
616 if (r == NULL)
617 goto tree_done;
618 } while (r->next_peer == NULL);
619 next = r->next_peer;
620 free (r);
621 r = next;
622 }
623 }
624 tree_done:;
625 }
626
627 VARRAY_FREE (eh->ttype_data);
628 VARRAY_FREE (eh->ehspec_data);
629 VARRAY_FREE (eh->action_record_data);
630 if (eh->call_site_data)
631 free (eh->call_site_data);
632
633 free (eh);
634 f->eh = NULL;
635 }
636
637 \f
638 /* Start an exception handling region. All instructions emitted
639 after this point are considered to be part of the region until
640 expand_eh_region_end is invoked. */
641
642 void
643 expand_eh_region_start ()
644 {
645 struct eh_region *new_region;
646 struct eh_region *cur_region;
647 rtx note;
648
649 if (! doing_eh (0))
650 return;
651
652 /* Insert a new blank region as a leaf in the tree. */
653 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
654 cur_region = cfun->eh->cur_region;
655 new_region->outer = cur_region;
656 if (cur_region)
657 {
658 new_region->next_peer = cur_region->inner;
659 cur_region->inner = new_region;
660 }
661 else
662 {
663 new_region->next_peer = cfun->eh->region_tree;
664 cfun->eh->region_tree = new_region;
665 }
666 cfun->eh->cur_region = new_region;
667
668 /* Create a note marking the start of this region. */
669 new_region->region_number = ++cfun->eh->last_region_number;
670 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
671 NOTE_EH_HANDLER (note) = new_region->region_number;
672 }
673
674 /* Common code to end a region. Returns the region just ended. */
675
676 static struct eh_region *
677 expand_eh_region_end ()
678 {
679 struct eh_region *cur_region = cfun->eh->cur_region;
680 rtx note;
681
682 /* Create a note marking the end of this region. */
683 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
684 NOTE_EH_HANDLER (note) = cur_region->region_number;
685
686 /* Pop. */
687 cfun->eh->cur_region = cur_region->outer;
688
689 return cur_region;
690 }
691
692 /* End an exception handling region for a cleanup. HANDLER is an
693 expression to expand for the cleanup. */
694
695 void
696 expand_eh_region_end_cleanup (handler)
697 tree handler;
698 {
699 struct eh_region *region;
700 tree protect_cleanup_actions;
701 rtx around_label;
702 rtx data_save[2];
703
704 if (! doing_eh (0))
705 return;
706
707 region = expand_eh_region_end ();
708 region->type = ERT_CLEANUP;
709 region->label = gen_label_rtx ();
710 region->u.cleanup.exp = handler;
711
712 around_label = gen_label_rtx ();
713 emit_jump (around_label);
714
715 emit_label (region->label);
716
717 /* Give the language a chance to specify an action to be taken if an
718 exception is thrown that would propagate out of the HANDLER. */
719 protect_cleanup_actions
720 = (lang_protect_cleanup_actions
721 ? (*lang_protect_cleanup_actions) ()
722 : NULL_TREE);
723
724 if (protect_cleanup_actions)
725 expand_eh_region_start ();
726
727 /* In case this cleanup involves an inline destructor with a try block in
728 it, we need to save the EH return data registers around it. */
729 data_save[0] = gen_reg_rtx (Pmode);
730 emit_move_insn (data_save[0], get_exception_pointer (cfun));
731 data_save[1] = gen_reg_rtx (word_mode);
732 emit_move_insn (data_save[1], get_exception_filter (cfun));
733
734 expand_expr (handler, const0_rtx, VOIDmode, 0);
735
736 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
737 emit_move_insn (cfun->eh->filter, data_save[1]);
738
739 if (protect_cleanup_actions)
740 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
741
742 /* We need any stack adjustment complete before the around_label. */
743 do_pending_stack_adjust ();
744
745 /* We delay the generation of the _Unwind_Resume until we generate
746 landing pads. We emit a marker here so as to get good control
747 flow data in the meantime. */
748 region->resume
749 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
750 emit_barrier ();
751
752 emit_label (around_label);
753 }
754
755 /* End an exception handling region for a try block, and prepares
756 for subsequent calls to expand_start_catch. */
757
758 void
759 expand_start_all_catch ()
760 {
761 struct eh_region *region;
762
763 if (! doing_eh (1))
764 return;
765
766 region = expand_eh_region_end ();
767 region->type = ERT_TRY;
768 region->u.try.prev_try = cfun->eh->try_region;
769 region->u.try.continue_label = gen_label_rtx ();
770
771 cfun->eh->try_region = region;
772
773 emit_jump (region->u.try.continue_label);
774 }
775
776 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
777 null if this is a catch-all clause. Providing a type list enables to
778 associate the catch region with potentially several exception types, which
779 is useful e.g. for Ada. */
780
781 void
782 expand_start_catch (type_or_list)
783 tree type_or_list;
784 {
785 struct eh_region *t, *c, *l;
786 tree type_list;
787
788 if (! doing_eh (0))
789 return;
790
791 type_list = type_or_list;
792
793 if (type_or_list)
794 {
795 /* Ensure to always end up with a type list to normalize further
796 processing, then register each type against the runtime types
797 map. */
798 tree type_node;
799
800 if (TREE_CODE (type_or_list) != TREE_LIST)
801 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
802
803 type_node = type_list;
804 for (; type_node; type_node = TREE_CHAIN (type_node))
805 add_type_for_runtime (TREE_VALUE (type_node));
806 }
807
808 expand_eh_region_start ();
809
810 t = cfun->eh->try_region;
811 c = cfun->eh->cur_region;
812 c->type = ERT_CATCH;
813 c->u.catch.type_list = type_list;
814 c->label = gen_label_rtx ();
815
816 l = t->u.try.last_catch;
817 c->u.catch.prev_catch = l;
818 if (l)
819 l->u.catch.next_catch = c;
820 else
821 t->u.try.catch = c;
822 t->u.try.last_catch = c;
823
824 emit_label (c->label);
825 }
826
827 /* End a catch clause. Control will resume after the try/catch block. */
828
829 void
830 expand_end_catch ()
831 {
832 struct eh_region *try_region, *catch_region;
833
834 if (! doing_eh (0))
835 return;
836
837 catch_region = expand_eh_region_end ();
838 try_region = cfun->eh->try_region;
839
840 emit_jump (try_region->u.try.continue_label);
841 }
842
843 /* End a sequence of catch handlers for a try block. */
844
845 void
846 expand_end_all_catch ()
847 {
848 struct eh_region *try_region;
849
850 if (! doing_eh (0))
851 return;
852
853 try_region = cfun->eh->try_region;
854 cfun->eh->try_region = try_region->u.try.prev_try;
855
856 emit_label (try_region->u.try.continue_label);
857 }
858
859 /* End an exception region for an exception type filter. ALLOWED is a
860 TREE_LIST of types to be matched by the runtime. FAILURE is an
861 expression to invoke if a mismatch occurs.
862
863 ??? We could use these semantics for calls to rethrow, too; if we can
864 see the surrounding catch clause, we know that the exception we're
865 rethrowing satisfies the "filter" of the catch type. */
866
867 void
868 expand_eh_region_end_allowed (allowed, failure)
869 tree allowed, failure;
870 {
871 struct eh_region *region;
872 rtx around_label;
873
874 if (! doing_eh (0))
875 return;
876
877 region = expand_eh_region_end ();
878 region->type = ERT_ALLOWED_EXCEPTIONS;
879 region->u.allowed.type_list = allowed;
880 region->label = gen_label_rtx ();
881
882 for (; allowed ; allowed = TREE_CHAIN (allowed))
883 add_type_for_runtime (TREE_VALUE (allowed));
884
885 /* We must emit the call to FAILURE here, so that if this function
886 throws a different exception, that it will be processed by the
887 correct region. */
888
889 around_label = gen_label_rtx ();
890 emit_jump (around_label);
891
892 emit_label (region->label);
893 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
894 /* We must adjust the stack before we reach the AROUND_LABEL because
895 the call to FAILURE does not occur on all paths to the
896 AROUND_LABEL. */
897 do_pending_stack_adjust ();
898
899 emit_label (around_label);
900 }
901
902 /* End an exception region for a must-not-throw filter. FAILURE is an
903 expression invoke if an uncaught exception propagates this far.
904
905 This is conceptually identical to expand_eh_region_end_allowed with
906 an empty allowed list (if you passed "std::terminate" instead of
907 "__cxa_call_unexpected"), but they are represented differently in
908 the C++ LSDA. */
909
910 void
911 expand_eh_region_end_must_not_throw (failure)
912 tree failure;
913 {
914 struct eh_region *region;
915 rtx around_label;
916
917 if (! doing_eh (0))
918 return;
919
920 region = expand_eh_region_end ();
921 region->type = ERT_MUST_NOT_THROW;
922 region->label = gen_label_rtx ();
923
924 /* We must emit the call to FAILURE here, so that if this function
925 throws a different exception, that it will be processed by the
926 correct region. */
927
928 around_label = gen_label_rtx ();
929 emit_jump (around_label);
930
931 emit_label (region->label);
932 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
933
934 emit_label (around_label);
935 }
936
937 /* End an exception region for a throw. No handling goes on here,
938 but it's the easiest way for the front-end to indicate what type
939 is being thrown. */
940
941 void
942 expand_eh_region_end_throw (type)
943 tree type;
944 {
945 struct eh_region *region;
946
947 if (! doing_eh (0))
948 return;
949
950 region = expand_eh_region_end ();
951 region->type = ERT_THROW;
952 region->u.throw.type = type;
953 }
954
955 /* End a fixup region. Within this region the cleanups for the immediately
956 enclosing region are _not_ run. This is used for goto cleanup to avoid
957 destroying an object twice.
958
959 This would be an extraordinarily simple prospect, were it not for the
960 fact that we don't actually know what the immediately enclosing region
961 is. This surprising fact is because expand_cleanups is currently
962 generating a sequence that it will insert somewhere else. We collect
963 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
964
965 void
966 expand_eh_region_end_fixup (handler)
967 tree handler;
968 {
969 struct eh_region *fixup;
970
971 if (! doing_eh (0))
972 return;
973
974 fixup = expand_eh_region_end ();
975 fixup->type = ERT_FIXUP;
976 fixup->u.fixup.cleanup_exp = handler;
977 }
978
979 /* Return an rtl expression for a pointer to the exception object
980 within a handler. */
981
982 rtx
983 get_exception_pointer (fun)
984 struct function *fun;
985 {
986 rtx exc_ptr = fun->eh->exc_ptr;
987 if (fun == cfun && ! exc_ptr)
988 {
989 exc_ptr = gen_reg_rtx (Pmode);
990 fun->eh->exc_ptr = exc_ptr;
991 }
992 return exc_ptr;
993 }
994
995 /* Return an rtl expression for the exception dispatch filter
996 within a handler. */
997
998 static rtx
999 get_exception_filter (fun)
1000 struct function *fun;
1001 {
1002 rtx filter = fun->eh->filter;
1003 if (fun == cfun && ! filter)
1004 {
1005 filter = gen_reg_rtx (word_mode);
1006 fun->eh->filter = filter;
1007 }
1008 return filter;
1009 }
1010 \f
1011 /* Begin a region that will contain entries created with
1012 add_partial_entry. */
1013
1014 void
1015 begin_protect_partials ()
1016 {
1017 /* Push room for a new list. */
1018 cfun->eh->protect_list
1019 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
1020 }
1021
1022 /* Start a new exception region for a region of code that has a
1023 cleanup action and push the HANDLER for the region onto
1024 protect_list. All of the regions created with add_partial_entry
1025 will be ended when end_protect_partials is invoked. */
1026
1027 void
1028 add_partial_entry (handler)
1029 tree handler;
1030 {
1031 expand_eh_region_start ();
1032
1033 /* ??? This comment was old before the most recent rewrite. We
1034 really ought to fix the callers at some point. */
1035 /* For backwards compatibility, we allow callers to omit calls to
1036 begin_protect_partials for the outermost region. So, we must
1037 explicitly do so here. */
1038 if (!cfun->eh->protect_list)
1039 begin_protect_partials ();
1040
1041 /* Add this entry to the front of the list. */
1042 TREE_VALUE (cfun->eh->protect_list)
1043 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1044 }
1045
1046 /* End all the pending exception regions on protect_list. */
1047
1048 void
1049 end_protect_partials ()
1050 {
1051 tree t;
1052
1053 /* ??? This comment was old before the most recent rewrite. We
1054 really ought to fix the callers at some point. */
1055 /* For backwards compatibility, we allow callers to omit the call to
1056 begin_protect_partials for the outermost region. So,
1057 PROTECT_LIST may be NULL. */
1058 if (!cfun->eh->protect_list)
1059 return;
1060
1061 /* Pop the topmost entry. */
1062 t = TREE_VALUE (cfun->eh->protect_list);
1063 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1064
1065 /* End all the exception regions. */
1066 for (; t; t = TREE_CHAIN (t))
1067 expand_eh_region_end_cleanup (TREE_VALUE (t));
1068 }
1069
1070 \f
1071 /* This section is for the exception handling specific optimization pass. */
1072
1073 /* Random access the exception region tree. It's just as simple to
1074 collect the regions this way as in expand_eh_region_start, but
1075 without having to realloc memory. */
1076
1077 static void
1078 collect_eh_region_array ()
1079 {
1080 struct eh_region **array, *i;
1081
1082 i = cfun->eh->region_tree;
1083 if (! i)
1084 return;
1085
1086 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1087 cfun->eh->region_array = array;
1088
1089 while (1)
1090 {
1091 array[i->region_number] = i;
1092
1093 /* If there are sub-regions, process them. */
1094 if (i->inner)
1095 i = i->inner;
1096 /* If there are peers, process them. */
1097 else if (i->next_peer)
1098 i = i->next_peer;
1099 /* Otherwise, step back up the tree to the next peer. */
1100 else
1101 {
1102 do {
1103 i = i->outer;
1104 if (i == NULL)
1105 return;
1106 } while (i->next_peer == NULL);
1107 i = i->next_peer;
1108 }
1109 }
1110 }
1111
1112 static void
1113 resolve_fixup_regions ()
1114 {
1115 int i, j, n = cfun->eh->last_region_number;
1116
1117 for (i = 1; i <= n; ++i)
1118 {
1119 struct eh_region *fixup = cfun->eh->region_array[i];
1120 struct eh_region *cleanup = 0;
1121
1122 if (! fixup || fixup->type != ERT_FIXUP)
1123 continue;
1124
1125 for (j = 1; j <= n; ++j)
1126 {
1127 cleanup = cfun->eh->region_array[j];
1128 if (cleanup->type == ERT_CLEANUP
1129 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1130 break;
1131 }
1132 if (j > n)
1133 abort ();
1134
1135 fixup->u.fixup.real_region = cleanup->outer;
1136 }
1137 }
1138
1139 /* Now that we've discovered what region actually encloses a fixup,
1140 we can shuffle pointers and remove them from the tree. */
1141
1142 static void
1143 remove_fixup_regions ()
1144 {
1145 int i;
1146 rtx insn, note;
1147 struct eh_region *fixup;
1148
1149 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1150 for instructions referencing fixup regions. This is only
1151 strictly necessary for fixup regions with no parent, but
1152 doesn't hurt to do it for all regions. */
1153 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1154 if (INSN_P (insn)
1155 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1156 && INTVAL (XEXP (note, 0)) > 0
1157 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1158 && fixup->type == ERT_FIXUP)
1159 {
1160 if (fixup->u.fixup.real_region)
1161 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1162 else
1163 remove_note (insn, note);
1164 }
1165
1166 /* Remove the fixup regions from the tree. */
1167 for (i = cfun->eh->last_region_number; i > 0; --i)
1168 {
1169 fixup = cfun->eh->region_array[i];
1170 if (! fixup)
1171 continue;
1172
1173 /* Allow GC to maybe free some memory. */
1174 if (fixup->type == ERT_CLEANUP)
1175 fixup->u.cleanup.exp = NULL_TREE;
1176
1177 if (fixup->type != ERT_FIXUP)
1178 continue;
1179
1180 if (fixup->inner)
1181 {
1182 struct eh_region *parent, *p, **pp;
1183
1184 parent = fixup->u.fixup.real_region;
1185
1186 /* Fix up the children's parent pointers; find the end of
1187 the list. */
1188 for (p = fixup->inner; ; p = p->next_peer)
1189 {
1190 p->outer = parent;
1191 if (! p->next_peer)
1192 break;
1193 }
1194
1195 /* In the tree of cleanups, only outer-inner ordering matters.
1196 So link the children back in anywhere at the correct level. */
1197 if (parent)
1198 pp = &parent->inner;
1199 else
1200 pp = &cfun->eh->region_tree;
1201 p->next_peer = *pp;
1202 *pp = fixup->inner;
1203 fixup->inner = NULL;
1204 }
1205
1206 remove_eh_handler (fixup);
1207 }
1208 }
1209
1210 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1211 can_throw instruction in the region. */
1212
1213 static void
1214 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1215 rtx *pinsns;
1216 int *orig_sp;
1217 int cur;
1218 {
1219 int *sp = orig_sp;
1220 rtx insn, next;
1221
1222 for (insn = *pinsns; insn ; insn = next)
1223 {
1224 next = NEXT_INSN (insn);
1225 if (GET_CODE (insn) == NOTE)
1226 {
1227 int kind = NOTE_LINE_NUMBER (insn);
1228 if (kind == NOTE_INSN_EH_REGION_BEG
1229 || kind == NOTE_INSN_EH_REGION_END)
1230 {
1231 if (kind == NOTE_INSN_EH_REGION_BEG)
1232 {
1233 struct eh_region *r;
1234
1235 *sp++ = cur;
1236 cur = NOTE_EH_HANDLER (insn);
1237
1238 r = cfun->eh->region_array[cur];
1239 if (r->type == ERT_FIXUP)
1240 {
1241 r = r->u.fixup.real_region;
1242 cur = r ? r->region_number : 0;
1243 }
1244 else if (r->type == ERT_CATCH)
1245 {
1246 r = r->outer;
1247 cur = r ? r->region_number : 0;
1248 }
1249 }
1250 else
1251 cur = *--sp;
1252
1253 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1254 requires extra care to adjust sequence start. */
1255 if (insn == *pinsns)
1256 *pinsns = next;
1257 remove_insn (insn);
1258 continue;
1259 }
1260 }
1261 else if (INSN_P (insn))
1262 {
1263 if (cur > 0
1264 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1265 /* Calls can always potentially throw exceptions, unless
1266 they have a REG_EH_REGION note with a value of 0 or less.
1267 Which should be the only possible kind so far. */
1268 && (GET_CODE (insn) == CALL_INSN
1269 /* If we wanted exceptions for non-call insns, then
1270 any may_trap_p instruction could throw. */
1271 || (flag_non_call_exceptions
1272 && GET_CODE (PATTERN (insn)) != CLOBBER
1273 && GET_CODE (PATTERN (insn)) != USE
1274 && may_trap_p (PATTERN (insn)))))
1275 {
1276 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1277 REG_NOTES (insn));
1278 }
1279
1280 if (GET_CODE (insn) == CALL_INSN
1281 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1282 {
1283 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1284 sp, cur);
1285 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1286 sp, cur);
1287 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1288 sp, cur);
1289 }
1290 }
1291 }
1292
1293 if (sp != orig_sp)
1294 abort ();
1295 }
1296
1297 void
1298 convert_from_eh_region_ranges ()
1299 {
1300 int *stack;
1301 rtx insns;
1302
1303 collect_eh_region_array ();
1304 resolve_fixup_regions ();
1305
1306 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1307 insns = get_insns ();
1308 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1309 free (stack);
1310
1311 remove_fixup_regions ();
1312 }
1313
1314 void
1315 find_exception_handler_labels ()
1316 {
1317 rtx list = NULL_RTX;
1318 int i;
1319
1320 free_EXPR_LIST_list (&exception_handler_labels);
1321
1322 if (cfun->eh->region_tree == NULL)
1323 return;
1324
1325 for (i = cfun->eh->last_region_number; i > 0; --i)
1326 {
1327 struct eh_region *region = cfun->eh->region_array[i];
1328 rtx lab;
1329
1330 if (! region)
1331 continue;
1332 if (cfun->eh->built_landing_pads)
1333 lab = region->landing_pad;
1334 else
1335 lab = region->label;
1336
1337 if (lab)
1338 list = alloc_EXPR_LIST (0, lab, list);
1339 }
1340
1341 /* For sjlj exceptions, need the return label to remain live until
1342 after landing pad generation. */
1343 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1344 list = alloc_EXPR_LIST (0, return_label, list);
1345
1346 exception_handler_labels = list;
1347 }
1348
1349 \f
1350 static struct eh_region *
1351 duplicate_eh_region_1 (o, map)
1352 struct eh_region *o;
1353 struct inline_remap *map;
1354 {
1355 struct eh_region *n
1356 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1357
1358 n->region_number = o->region_number + cfun->eh->last_region_number;
1359 n->type = o->type;
1360
1361 switch (n->type)
1362 {
1363 case ERT_CLEANUP:
1364 case ERT_MUST_NOT_THROW:
1365 break;
1366
1367 case ERT_TRY:
1368 if (o->u.try.continue_label)
1369 n->u.try.continue_label
1370 = get_label_from_map (map,
1371 CODE_LABEL_NUMBER (o->u.try.continue_label));
1372 break;
1373
1374 case ERT_CATCH:
1375 n->u.catch.type_list = o->u.catch.type_list;
1376 break;
1377
1378 case ERT_ALLOWED_EXCEPTIONS:
1379 n->u.allowed.type_list = o->u.allowed.type_list;
1380 break;
1381
1382 case ERT_THROW:
1383 n->u.throw.type = o->u.throw.type;
1384
1385 default:
1386 abort ();
1387 }
1388
1389 if (o->label)
1390 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1391 if (o->resume)
1392 {
1393 n->resume = map->insn_map[INSN_UID (o->resume)];
1394 if (n->resume == NULL)
1395 abort ();
1396 }
1397
1398 return n;
1399 }
1400
1401 static void
1402 duplicate_eh_region_2 (o, n_array)
1403 struct eh_region *o;
1404 struct eh_region **n_array;
1405 {
1406 struct eh_region *n = n_array[o->region_number];
1407
1408 switch (n->type)
1409 {
1410 case ERT_TRY:
1411 n->u.try.catch = n_array[o->u.try.catch->region_number];
1412 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1413 break;
1414
1415 case ERT_CATCH:
1416 if (o->u.catch.next_catch)
1417 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1418 if (o->u.catch.prev_catch)
1419 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1420 break;
1421
1422 default:
1423 break;
1424 }
1425
1426 if (o->outer)
1427 n->outer = n_array[o->outer->region_number];
1428 if (o->inner)
1429 n->inner = n_array[o->inner->region_number];
1430 if (o->next_peer)
1431 n->next_peer = n_array[o->next_peer->region_number];
1432 }
1433
1434 int
1435 duplicate_eh_regions (ifun, map)
1436 struct function *ifun;
1437 struct inline_remap *map;
1438 {
1439 int ifun_last_region_number = ifun->eh->last_region_number;
1440 struct eh_region **n_array, *root, *cur;
1441 int i;
1442
1443 if (ifun_last_region_number == 0)
1444 return 0;
1445
1446 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1447
1448 for (i = 1; i <= ifun_last_region_number; ++i)
1449 {
1450 cur = ifun->eh->region_array[i];
1451 if (!cur || cur->region_number != i)
1452 continue;
1453 n_array[i] = duplicate_eh_region_1 (cur, map);
1454 }
1455 for (i = 1; i <= ifun_last_region_number; ++i)
1456 {
1457 cur = ifun->eh->region_array[i];
1458 if (!cur || cur->region_number != i)
1459 continue;
1460 duplicate_eh_region_2 (cur, n_array);
1461 }
1462
1463 root = n_array[ifun->eh->region_tree->region_number];
1464 cur = cfun->eh->cur_region;
1465 if (cur)
1466 {
1467 struct eh_region *p = cur->inner;
1468 if (p)
1469 {
1470 while (p->next_peer)
1471 p = p->next_peer;
1472 p->next_peer = root;
1473 }
1474 else
1475 cur->inner = root;
1476
1477 for (i = 1; i <= ifun_last_region_number; ++i)
1478 if (n_array[i] && n_array[i]->outer == NULL)
1479 n_array[i]->outer = cur;
1480 }
1481 else
1482 {
1483 struct eh_region *p = cfun->eh->region_tree;
1484 if (p)
1485 {
1486 while (p->next_peer)
1487 p = p->next_peer;
1488 p->next_peer = root;
1489 }
1490 else
1491 cfun->eh->region_tree = root;
1492 }
1493
1494 free (n_array);
1495
1496 i = cfun->eh->last_region_number;
1497 cfun->eh->last_region_number = i + ifun_last_region_number;
1498 return i;
1499 }
1500
1501 \f
1502 static int
1503 t2r_eq (pentry, pdata)
1504 const PTR pentry;
1505 const PTR pdata;
1506 {
1507 tree entry = (tree) pentry;
1508 tree data = (tree) pdata;
1509
1510 return TREE_PURPOSE (entry) == data;
1511 }
1512
1513 static hashval_t
1514 t2r_hash (pentry)
1515 const PTR pentry;
1516 {
1517 tree entry = (tree) pentry;
1518 return TYPE_HASH (TREE_PURPOSE (entry));
1519 }
1520
1521 static int
1522 t2r_mark_1 (slot, data)
1523 PTR *slot;
1524 PTR data ATTRIBUTE_UNUSED;
1525 {
1526 tree contents = (tree) *slot;
1527 ggc_mark_tree (contents);
1528 return 1;
1529 }
1530
1531 static void
1532 t2r_mark (addr)
1533 PTR addr;
1534 {
1535 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1536 }
1537
1538 static void
1539 add_type_for_runtime (type)
1540 tree type;
1541 {
1542 tree *slot;
1543
1544 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1545 TYPE_HASH (type), INSERT);
1546 if (*slot == NULL)
1547 {
1548 tree runtime = (*lang_eh_runtime_type) (type);
1549 *slot = tree_cons (type, runtime, NULL_TREE);
1550 }
1551 }
1552
1553 static tree
1554 lookup_type_for_runtime (type)
1555 tree type;
1556 {
1557 tree *slot;
1558
1559 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1560 TYPE_HASH (type), NO_INSERT);
1561
1562 /* We should have always inserted the data earlier. */
1563 return TREE_VALUE (*slot);
1564 }
1565
1566 \f
1567 /* Represent an entry in @TTypes for either catch actions
1568 or exception filter actions. */
1569 struct ttypes_filter
1570 {
1571 tree t;
1572 int filter;
1573 };
1574
1575 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1576 (a tree) for a @TTypes type node we are thinking about adding. */
1577
1578 static int
1579 ttypes_filter_eq (pentry, pdata)
1580 const PTR pentry;
1581 const PTR pdata;
1582 {
1583 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1584 tree data = (tree) pdata;
1585
1586 return entry->t == data;
1587 }
1588
1589 static hashval_t
1590 ttypes_filter_hash (pentry)
1591 const PTR pentry;
1592 {
1593 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1594 return TYPE_HASH (entry->t);
1595 }
1596
1597 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1598 exception specification list we are thinking about adding. */
1599 /* ??? Currently we use the type lists in the order given. Someone
1600 should put these in some canonical order. */
1601
1602 static int
1603 ehspec_filter_eq (pentry, pdata)
1604 const PTR pentry;
1605 const PTR pdata;
1606 {
1607 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1608 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1609
1610 return type_list_equal (entry->t, data->t);
1611 }
1612
1613 /* Hash function for exception specification lists. */
1614
1615 static hashval_t
1616 ehspec_filter_hash (pentry)
1617 const PTR pentry;
1618 {
1619 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1620 hashval_t h = 0;
1621 tree list;
1622
1623 for (list = entry->t; list ; list = TREE_CHAIN (list))
1624 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1625 return h;
1626 }
1627
1628 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1629 up the search. Return the filter value to be used. */
1630
1631 static int
1632 add_ttypes_entry (ttypes_hash, type)
1633 htab_t ttypes_hash;
1634 tree type;
1635 {
1636 struct ttypes_filter **slot, *n;
1637
1638 slot = (struct ttypes_filter **)
1639 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1640
1641 if ((n = *slot) == NULL)
1642 {
1643 /* Filter value is a 1 based table index. */
1644
1645 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1646 n->t = type;
1647 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1648 *slot = n;
1649
1650 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1651 }
1652
1653 return n->filter;
1654 }
1655
1656 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1657 to speed up the search. Return the filter value to be used. */
1658
1659 static int
1660 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1661 htab_t ehspec_hash;
1662 htab_t ttypes_hash;
1663 tree list;
1664 {
1665 struct ttypes_filter **slot, *n;
1666 struct ttypes_filter dummy;
1667
1668 dummy.t = list;
1669 slot = (struct ttypes_filter **)
1670 htab_find_slot (ehspec_hash, &dummy, INSERT);
1671
1672 if ((n = *slot) == NULL)
1673 {
1674 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1675
1676 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1677 n->t = list;
1678 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1679 *slot = n;
1680
1681 /* Look up each type in the list and encode its filter
1682 value as a uleb128. Terminate the list with 0. */
1683 for (; list ; list = TREE_CHAIN (list))
1684 push_uleb128 (&cfun->eh->ehspec_data,
1685 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1686 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1687 }
1688
1689 return n->filter;
1690 }
1691
1692 /* Generate the action filter values to be used for CATCH and
1693 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1694 we use lots of landing pads, and so every type or list can share
1695 the same filter value, which saves table space. */
1696
1697 static void
1698 assign_filter_values ()
1699 {
1700 int i;
1701 htab_t ttypes, ehspec;
1702
1703 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1704 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1705
1706 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1707 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1708
1709 for (i = cfun->eh->last_region_number; i > 0; --i)
1710 {
1711 struct eh_region *r = cfun->eh->region_array[i];
1712
1713 /* Mind we don't process a region more than once. */
1714 if (!r || r->region_number != i)
1715 continue;
1716
1717 switch (r->type)
1718 {
1719 case ERT_CATCH:
1720 /* Whatever type_list is (NULL or true list), we build a list
1721 of filters for the region. */
1722 r->u.catch.filter_list = NULL_TREE;
1723
1724 if (r->u.catch.type_list != NULL)
1725 {
1726 /* Get a filter value for each of the types caught and store
1727 them in the region's dedicated list. */
1728 tree tp_node = r->u.catch.type_list;
1729
1730 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1731 {
1732 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1733 tree flt_node = build_int_2 (flt, 0);
1734
1735 r->u.catch.filter_list
1736 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1737 }
1738 }
1739 else
1740 {
1741 /* Get a filter value for the NULL list also since it will need
1742 an action record anyway. */
1743 int flt = add_ttypes_entry (ttypes, NULL);
1744 tree flt_node = build_int_2 (flt, 0);
1745
1746 r->u.catch.filter_list
1747 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1748 }
1749
1750 break;
1751
1752 case ERT_ALLOWED_EXCEPTIONS:
1753 r->u.allowed.filter
1754 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1755 break;
1756
1757 default:
1758 break;
1759 }
1760 }
1761
1762 htab_delete (ttypes);
1763 htab_delete (ehspec);
1764 }
1765
1766 static void
1767 build_post_landing_pads ()
1768 {
1769 int i;
1770
1771 for (i = cfun->eh->last_region_number; i > 0; --i)
1772 {
1773 struct eh_region *region = cfun->eh->region_array[i];
1774 rtx seq;
1775
1776 /* Mind we don't process a region more than once. */
1777 if (!region || region->region_number != i)
1778 continue;
1779
1780 switch (region->type)
1781 {
1782 case ERT_TRY:
1783 /* ??? Collect the set of all non-overlapping catch handlers
1784 all the way up the chain until blocked by a cleanup. */
1785 /* ??? Outer try regions can share landing pads with inner
1786 try regions if the types are completely non-overlapping,
1787 and there are no intervening cleanups. */
1788
1789 region->post_landing_pad = gen_label_rtx ();
1790
1791 start_sequence ();
1792
1793 emit_label (region->post_landing_pad);
1794
1795 /* ??? It is mighty inconvenient to call back into the
1796 switch statement generation code in expand_end_case.
1797 Rapid prototyping sez a sequence of ifs. */
1798 {
1799 struct eh_region *c;
1800 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1801 {
1802 /* ??? _Unwind_ForcedUnwind wants no match here. */
1803 if (c->u.catch.type_list == NULL)
1804 emit_jump (c->label);
1805 else
1806 {
1807 /* Need for one cmp/jump per type caught. Each type
1808 list entry has a matching entry in the filter list
1809 (see assign_filter_values). */
1810 tree tp_node = c->u.catch.type_list;
1811 tree flt_node = c->u.catch.filter_list;
1812
1813 for (; tp_node; )
1814 {
1815 emit_cmp_and_jump_insns
1816 (cfun->eh->filter,
1817 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1818 EQ, NULL_RTX, word_mode, 0, c->label);
1819
1820 tp_node = TREE_CHAIN (tp_node);
1821 flt_node = TREE_CHAIN (flt_node);
1822 }
1823 }
1824 }
1825 }
1826
1827 /* We delay the generation of the _Unwind_Resume until we generate
1828 landing pads. We emit a marker here so as to get good control
1829 flow data in the meantime. */
1830 region->resume
1831 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1832 emit_barrier ();
1833
1834 seq = get_insns ();
1835 end_sequence ();
1836
1837 emit_insns_before (seq, region->u.try.catch->label);
1838 break;
1839
1840 case ERT_ALLOWED_EXCEPTIONS:
1841 region->post_landing_pad = gen_label_rtx ();
1842
1843 start_sequence ();
1844
1845 emit_label (region->post_landing_pad);
1846
1847 emit_cmp_and_jump_insns (cfun->eh->filter,
1848 GEN_INT (region->u.allowed.filter),
1849 EQ, NULL_RTX, word_mode, 0, region->label);
1850
1851 /* We delay the generation of the _Unwind_Resume until we generate
1852 landing pads. We emit a marker here so as to get good control
1853 flow data in the meantime. */
1854 region->resume
1855 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1856 emit_barrier ();
1857
1858 seq = get_insns ();
1859 end_sequence ();
1860
1861 emit_insns_before (seq, region->label);
1862 break;
1863
1864 case ERT_CLEANUP:
1865 case ERT_MUST_NOT_THROW:
1866 region->post_landing_pad = region->label;
1867 break;
1868
1869 case ERT_CATCH:
1870 case ERT_THROW:
1871 /* Nothing to do. */
1872 break;
1873
1874 default:
1875 abort ();
1876 }
1877 }
1878 }
1879
1880 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1881 _Unwind_Resume otherwise. */
1882
1883 static void
1884 connect_post_landing_pads ()
1885 {
1886 int i;
1887
1888 for (i = cfun->eh->last_region_number; i > 0; --i)
1889 {
1890 struct eh_region *region = cfun->eh->region_array[i];
1891 struct eh_region *outer;
1892 rtx seq;
1893
1894 /* Mind we don't process a region more than once. */
1895 if (!region || region->region_number != i)
1896 continue;
1897
1898 /* If there is no RESX, or it has been deleted by flow, there's
1899 nothing to fix up. */
1900 if (! region->resume || INSN_DELETED_P (region->resume))
1901 continue;
1902
1903 /* Search for another landing pad in this function. */
1904 for (outer = region->outer; outer ; outer = outer->outer)
1905 if (outer->post_landing_pad)
1906 break;
1907
1908 start_sequence ();
1909
1910 if (outer)
1911 emit_jump (outer->post_landing_pad);
1912 else
1913 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1914 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1915
1916 seq = get_insns ();
1917 end_sequence ();
1918 emit_insns_before (seq, region->resume);
1919 delete_insn (region->resume);
1920 }
1921 }
1922
1923 \f
1924 static void
1925 dw2_build_landing_pads ()
1926 {
1927 int i;
1928 unsigned int j;
1929
1930 for (i = cfun->eh->last_region_number; i > 0; --i)
1931 {
1932 struct eh_region *region = cfun->eh->region_array[i];
1933 rtx seq;
1934 bool clobbers_hard_regs = false;
1935
1936 /* Mind we don't process a region more than once. */
1937 if (!region || region->region_number != i)
1938 continue;
1939
1940 if (region->type != ERT_CLEANUP
1941 && region->type != ERT_TRY
1942 && region->type != ERT_ALLOWED_EXCEPTIONS)
1943 continue;
1944
1945 start_sequence ();
1946
1947 region->landing_pad = gen_label_rtx ();
1948 emit_label (region->landing_pad);
1949
1950 #ifdef HAVE_exception_receiver
1951 if (HAVE_exception_receiver)
1952 emit_insn (gen_exception_receiver ());
1953 else
1954 #endif
1955 #ifdef HAVE_nonlocal_goto_receiver
1956 if (HAVE_nonlocal_goto_receiver)
1957 emit_insn (gen_nonlocal_goto_receiver ());
1958 else
1959 #endif
1960 { /* Nothing */ }
1961
1962 /* If the eh_return data registers are call-saved, then we
1963 won't have considered them clobbered from the call that
1964 threw. Kill them now. */
1965 for (j = 0; ; ++j)
1966 {
1967 unsigned r = EH_RETURN_DATA_REGNO (j);
1968 if (r == INVALID_REGNUM)
1969 break;
1970 if (! call_used_regs[r])
1971 {
1972 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1973 clobbers_hard_regs = true;
1974 }
1975 }
1976
1977 if (clobbers_hard_regs)
1978 {
1979 /* @@@ This is a kludge. Not all machine descriptions define a
1980 blockage insn, but we must not allow the code we just generated
1981 to be reordered by scheduling. So emit an ASM_INPUT to act as
1982 blockage insn. */
1983 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1984 }
1985
1986 emit_move_insn (cfun->eh->exc_ptr,
1987 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1988 emit_move_insn (cfun->eh->filter,
1989 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1990
1991 seq = get_insns ();
1992 end_sequence ();
1993
1994 emit_insns_before (seq, region->post_landing_pad);
1995 }
1996 }
1997
1998 \f
1999 struct sjlj_lp_info
2000 {
2001 int directly_reachable;
2002 int action_index;
2003 int dispatch_index;
2004 int call_site_index;
2005 };
2006
2007 static bool
2008 sjlj_find_directly_reachable_regions (lp_info)
2009 struct sjlj_lp_info *lp_info;
2010 {
2011 rtx insn;
2012 bool found_one = false;
2013
2014 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2015 {
2016 struct eh_region *region;
2017 tree type_thrown;
2018 rtx note;
2019
2020 if (! INSN_P (insn))
2021 continue;
2022
2023 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2024 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2025 continue;
2026
2027 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2028
2029 type_thrown = NULL_TREE;
2030 if (region->type == ERT_THROW)
2031 {
2032 type_thrown = region->u.throw.type;
2033 region = region->outer;
2034 }
2035
2036 /* Find the first containing region that might handle the exception.
2037 That's the landing pad to which we will transfer control. */
2038 for (; region; region = region->outer)
2039 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
2040 break;
2041
2042 if (region)
2043 {
2044 lp_info[region->region_number].directly_reachable = 1;
2045 found_one = true;
2046 }
2047 }
2048
2049 return found_one;
2050 }
2051
2052 static void
2053 sjlj_assign_call_site_values (dispatch_label, lp_info)
2054 rtx dispatch_label;
2055 struct sjlj_lp_info *lp_info;
2056 {
2057 htab_t ar_hash;
2058 int i, index;
2059
2060 /* First task: build the action table. */
2061
2062 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2063 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2064
2065 for (i = cfun->eh->last_region_number; i > 0; --i)
2066 if (lp_info[i].directly_reachable)
2067 {
2068 struct eh_region *r = cfun->eh->region_array[i];
2069 r->landing_pad = dispatch_label;
2070 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2071 if (lp_info[i].action_index != -1)
2072 cfun->uses_eh_lsda = 1;
2073 }
2074
2075 htab_delete (ar_hash);
2076
2077 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2078 landing pad label for the region. For sjlj though, there is one
2079 common landing pad from which we dispatch to the post-landing pads.
2080
2081 A region receives a dispatch index if it is directly reachable
2082 and requires in-function processing. Regions that share post-landing
2083 pads may share dispatch indices. */
2084 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2085 (see build_post_landing_pads) so we don't bother checking for it. */
2086
2087 index = 0;
2088 for (i = cfun->eh->last_region_number; i > 0; --i)
2089 if (lp_info[i].directly_reachable
2090 && lp_info[i].action_index >= 0)
2091 lp_info[i].dispatch_index = index++;
2092
2093 /* Finally: assign call-site values. If dwarf2 terms, this would be
2094 the region number assigned by convert_to_eh_region_ranges, but
2095 handles no-action and must-not-throw differently. */
2096
2097 call_site_base = 1;
2098 for (i = cfun->eh->last_region_number; i > 0; --i)
2099 if (lp_info[i].directly_reachable)
2100 {
2101 int action = lp_info[i].action_index;
2102
2103 /* Map must-not-throw to otherwise unused call-site index 0. */
2104 if (action == -2)
2105 index = 0;
2106 /* Map no-action to otherwise unused call-site index -1. */
2107 else if (action == -1)
2108 index = -1;
2109 /* Otherwise, look it up in the table. */
2110 else
2111 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2112
2113 lp_info[i].call_site_index = index;
2114 }
2115 }
2116
2117 static void
2118 sjlj_mark_call_sites (lp_info)
2119 struct sjlj_lp_info *lp_info;
2120 {
2121 int last_call_site = -2;
2122 rtx insn, mem;
2123
2124 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2125 {
2126 struct eh_region *region;
2127 int this_call_site;
2128 rtx note, before, p;
2129
2130 /* Reset value tracking at extended basic block boundaries. */
2131 if (GET_CODE (insn) == CODE_LABEL)
2132 last_call_site = -2;
2133
2134 if (! INSN_P (insn))
2135 continue;
2136
2137 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2138 if (!note)
2139 {
2140 /* Calls (and trapping insns) without notes are outside any
2141 exception handling region in this function. Mark them as
2142 no action. */
2143 if (GET_CODE (insn) == CALL_INSN
2144 || (flag_non_call_exceptions
2145 && may_trap_p (PATTERN (insn))))
2146 this_call_site = -1;
2147 else
2148 continue;
2149 }
2150 else
2151 {
2152 /* Calls that are known to not throw need not be marked. */
2153 if (INTVAL (XEXP (note, 0)) <= 0)
2154 continue;
2155
2156 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2157 this_call_site = lp_info[region->region_number].call_site_index;
2158 }
2159
2160 if (this_call_site == last_call_site)
2161 continue;
2162
2163 /* Don't separate a call from it's argument loads. */
2164 before = insn;
2165 if (GET_CODE (insn) == CALL_INSN)
2166 before = find_first_parameter_load (insn, NULL_RTX);
2167
2168 start_sequence ();
2169 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2170 sjlj_fc_call_site_ofs);
2171 emit_move_insn (mem, GEN_INT (this_call_site));
2172 p = get_insns ();
2173 end_sequence ();
2174
2175 emit_insns_before (p, before);
2176 last_call_site = this_call_site;
2177 }
2178 }
2179
2180 /* Construct the SjLj_Function_Context. */
2181
2182 static void
2183 sjlj_emit_function_enter (dispatch_label)
2184 rtx dispatch_label;
2185 {
2186 rtx fn_begin, fc, mem, seq;
2187
2188 fc = cfun->eh->sjlj_fc;
2189
2190 start_sequence ();
2191
2192 /* We're storing this libcall's address into memory instead of
2193 calling it directly. Thus, we must call assemble_external_libcall
2194 here, as we can not depend on emit_library_call to do it for us. */
2195 assemble_external_libcall (eh_personality_libfunc);
2196 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2197 emit_move_insn (mem, eh_personality_libfunc);
2198
2199 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2200 if (cfun->uses_eh_lsda)
2201 {
2202 char buf[20];
2203 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2204 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2205 }
2206 else
2207 emit_move_insn (mem, const0_rtx);
2208
2209 #ifdef DONT_USE_BUILTIN_SETJMP
2210 {
2211 rtx x, note;
2212 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2213 TYPE_MODE (integer_type_node), 1,
2214 plus_constant (XEXP (fc, 0),
2215 sjlj_fc_jbuf_ofs), Pmode);
2216
2217 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2218 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2219
2220 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2221 TYPE_MODE (integer_type_node), 0, dispatch_label);
2222 }
2223 #else
2224 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2225 dispatch_label);
2226 #endif
2227
2228 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2229 1, XEXP (fc, 0), Pmode);
2230
2231 seq = get_insns ();
2232 end_sequence ();
2233
2234 /* ??? Instead of doing this at the beginning of the function,
2235 do this in a block that is at loop level 0 and dominates all
2236 can_throw_internal instructions. */
2237
2238 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2239 if (GET_CODE (fn_begin) == NOTE
2240 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2241 break;
2242 emit_insns_after (seq, fn_begin);
2243 }
2244
2245 /* Call back from expand_function_end to know where we should put
2246 the call to unwind_sjlj_unregister_libfunc if needed. */
2247
2248 void
2249 sjlj_emit_function_exit_after (after)
2250 rtx after;
2251 {
2252 cfun->eh->sjlj_exit_after = after;
2253 }
2254
2255 static void
2256 sjlj_emit_function_exit ()
2257 {
2258 rtx seq;
2259
2260 start_sequence ();
2261
2262 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2263 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2264
2265 seq = get_insns ();
2266 end_sequence ();
2267
2268 /* ??? Really this can be done in any block at loop level 0 that
2269 post-dominates all can_throw_internal instructions. This is
2270 the last possible moment. */
2271
2272 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2273 }
2274
2275 static void
2276 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2277 rtx dispatch_label;
2278 struct sjlj_lp_info *lp_info;
2279 {
2280 int i, first_reachable;
2281 rtx mem, dispatch, seq, fc;
2282
2283 fc = cfun->eh->sjlj_fc;
2284
2285 start_sequence ();
2286
2287 emit_label (dispatch_label);
2288
2289 #ifndef DONT_USE_BUILTIN_SETJMP
2290 expand_builtin_setjmp_receiver (dispatch_label);
2291 #endif
2292
2293 /* Load up dispatch index, exc_ptr and filter values from the
2294 function context. */
2295 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2296 sjlj_fc_call_site_ofs);
2297 dispatch = copy_to_reg (mem);
2298
2299 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2300 if (word_mode != Pmode)
2301 {
2302 #ifdef POINTERS_EXTEND_UNSIGNED
2303 mem = convert_memory_address (Pmode, mem);
2304 #else
2305 mem = convert_to_mode (Pmode, mem, 0);
2306 #endif
2307 }
2308 emit_move_insn (cfun->eh->exc_ptr, mem);
2309
2310 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2311 emit_move_insn (cfun->eh->filter, mem);
2312
2313 /* Jump to one of the directly reachable regions. */
2314 /* ??? This really ought to be using a switch statement. */
2315
2316 first_reachable = 0;
2317 for (i = cfun->eh->last_region_number; i > 0; --i)
2318 {
2319 if (! lp_info[i].directly_reachable
2320 || lp_info[i].action_index < 0)
2321 continue;
2322
2323 if (! first_reachable)
2324 {
2325 first_reachable = i;
2326 continue;
2327 }
2328
2329 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2330 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2331 cfun->eh->region_array[i]->post_landing_pad);
2332 }
2333
2334 seq = get_insns ();
2335 end_sequence ();
2336
2337 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2338 ->post_landing_pad));
2339 }
2340
2341 static void
2342 sjlj_build_landing_pads ()
2343 {
2344 struct sjlj_lp_info *lp_info;
2345
2346 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2347 sizeof (struct sjlj_lp_info));
2348
2349 if (sjlj_find_directly_reachable_regions (lp_info))
2350 {
2351 rtx dispatch_label = gen_label_rtx ();
2352
2353 cfun->eh->sjlj_fc
2354 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2355 int_size_in_bytes (sjlj_fc_type_node),
2356 TYPE_ALIGN (sjlj_fc_type_node));
2357
2358 sjlj_assign_call_site_values (dispatch_label, lp_info);
2359 sjlj_mark_call_sites (lp_info);
2360
2361 sjlj_emit_function_enter (dispatch_label);
2362 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2363 sjlj_emit_function_exit ();
2364 }
2365
2366 free (lp_info);
2367 }
2368
2369 void
2370 finish_eh_generation ()
2371 {
2372 /* Nothing to do if no regions created. */
2373 if (cfun->eh->region_tree == NULL)
2374 return;
2375
2376 /* The object here is to provide find_basic_blocks with detailed
2377 information (via reachable_handlers) on how exception control
2378 flows within the function. In this first pass, we can include
2379 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2380 regions, and hope that it will be useful in deleting unreachable
2381 handlers. Subsequently, we will generate landing pads which will
2382 connect many of the handlers, and then type information will not
2383 be effective. Still, this is a win over previous implementations. */
2384
2385 rebuild_jump_labels (get_insns ());
2386 find_basic_blocks (get_insns (), max_reg_num (), 0);
2387 cleanup_cfg (CLEANUP_PRE_LOOP);
2388
2389 /* These registers are used by the landing pads. Make sure they
2390 have been generated. */
2391 get_exception_pointer (cfun);
2392 get_exception_filter (cfun);
2393
2394 /* Construct the landing pads. */
2395
2396 assign_filter_values ();
2397 build_post_landing_pads ();
2398 connect_post_landing_pads ();
2399 if (USING_SJLJ_EXCEPTIONS)
2400 sjlj_build_landing_pads ();
2401 else
2402 dw2_build_landing_pads ();
2403
2404 cfun->eh->built_landing_pads = 1;
2405
2406 /* We've totally changed the CFG. Start over. */
2407 find_exception_handler_labels ();
2408 rebuild_jump_labels (get_insns ());
2409 find_basic_blocks (get_insns (), max_reg_num (), 0);
2410 cleanup_cfg (CLEANUP_PRE_LOOP);
2411 }
2412 \f
2413 /* This section handles removing dead code for flow. */
2414
2415 /* Remove LABEL from the exception_handler_labels list. */
2416
2417 static void
2418 remove_exception_handler_label (label)
2419 rtx label;
2420 {
2421 rtx *pl, l;
2422
2423 for (pl = &exception_handler_labels, l = *pl;
2424 XEXP (l, 0) != label;
2425 pl = &XEXP (l, 1), l = *pl)
2426 continue;
2427
2428 *pl = XEXP (l, 1);
2429 free_EXPR_LIST_node (l);
2430 }
2431
2432 /* Splice REGION from the region tree etc. */
2433
2434 static void
2435 remove_eh_handler (region)
2436 struct eh_region *region;
2437 {
2438 struct eh_region **pp, *p;
2439 rtx lab;
2440 int i;
2441
2442 /* For the benefit of efficiently handling REG_EH_REGION notes,
2443 replace this region in the region array with its containing
2444 region. Note that previous region deletions may result in
2445 multiple copies of this region in the array, so we have to
2446 search the whole thing. */
2447 for (i = cfun->eh->last_region_number; i > 0; --i)
2448 if (cfun->eh->region_array[i] == region)
2449 cfun->eh->region_array[i] = region->outer;
2450
2451 if (cfun->eh->built_landing_pads)
2452 lab = region->landing_pad;
2453 else
2454 lab = region->label;
2455 if (lab)
2456 remove_exception_handler_label (lab);
2457
2458 if (region->outer)
2459 pp = &region->outer->inner;
2460 else
2461 pp = &cfun->eh->region_tree;
2462 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2463 continue;
2464
2465 if (region->inner)
2466 {
2467 for (p = region->inner; p->next_peer ; p = p->next_peer)
2468 p->outer = region->outer;
2469 p->next_peer = region->next_peer;
2470 p->outer = region->outer;
2471 *pp = region->inner;
2472 }
2473 else
2474 *pp = region->next_peer;
2475
2476 if (region->type == ERT_CATCH)
2477 {
2478 struct eh_region *try, *next, *prev;
2479
2480 for (try = region->next_peer;
2481 try->type == ERT_CATCH;
2482 try = try->next_peer)
2483 continue;
2484 if (try->type != ERT_TRY)
2485 abort ();
2486
2487 next = region->u.catch.next_catch;
2488 prev = region->u.catch.prev_catch;
2489
2490 if (next)
2491 next->u.catch.prev_catch = prev;
2492 else
2493 try->u.try.last_catch = prev;
2494 if (prev)
2495 prev->u.catch.next_catch = next;
2496 else
2497 {
2498 try->u.try.catch = next;
2499 if (! next)
2500 remove_eh_handler (try);
2501 }
2502 }
2503
2504 free (region);
2505 }
2506
2507 /* LABEL heads a basic block that is about to be deleted. If this
2508 label corresponds to an exception region, we may be able to
2509 delete the region. */
2510
2511 void
2512 maybe_remove_eh_handler (label)
2513 rtx label;
2514 {
2515 int i;
2516
2517 /* ??? After generating landing pads, it's not so simple to determine
2518 if the region data is completely unused. One must examine the
2519 landing pad and the post landing pad, and whether an inner try block
2520 is referencing the catch handlers directly. */
2521 if (cfun->eh->built_landing_pads)
2522 return;
2523
2524 for (i = cfun->eh->last_region_number; i > 0; --i)
2525 {
2526 struct eh_region *region = cfun->eh->region_array[i];
2527 if (region && region->label == label)
2528 {
2529 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2530 because there is no path to the fallback call to terminate.
2531 But the region continues to affect call-site data until there
2532 are no more contained calls, which we don't see here. */
2533 if (region->type == ERT_MUST_NOT_THROW)
2534 {
2535 remove_exception_handler_label (region->label);
2536 region->label = NULL_RTX;
2537 }
2538 else
2539 remove_eh_handler (region);
2540 break;
2541 }
2542 }
2543 }
2544
2545 \f
2546 /* This section describes CFG exception edges for flow. */
2547
2548 /* For communicating between calls to reachable_next_level. */
2549 struct reachable_info
2550 {
2551 tree types_caught;
2552 tree types_allowed;
2553 rtx handlers;
2554 };
2555
2556 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2557 base class of TYPE, is in HANDLED. */
2558
2559 static int
2560 check_handled (handled, type)
2561 tree handled, type;
2562 {
2563 tree t;
2564
2565 /* We can check for exact matches without front-end help. */
2566 if (! lang_eh_type_covers)
2567 {
2568 for (t = handled; t ; t = TREE_CHAIN (t))
2569 if (TREE_VALUE (t) == type)
2570 return 1;
2571 }
2572 else
2573 {
2574 for (t = handled; t ; t = TREE_CHAIN (t))
2575 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2576 return 1;
2577 }
2578
2579 return 0;
2580 }
2581
2582 /* A subroutine of reachable_next_level. If we are collecting a list
2583 of handlers, add one. After landing pad generation, reference
2584 it instead of the handlers themselves. Further, the handlers are
2585 all wired together, so by referencing one, we've got them all.
2586 Before landing pad generation we reference each handler individually.
2587
2588 LP_REGION contains the landing pad; REGION is the handler. */
2589
2590 static void
2591 add_reachable_handler (info, lp_region, region)
2592 struct reachable_info *info;
2593 struct eh_region *lp_region;
2594 struct eh_region *region;
2595 {
2596 if (! info)
2597 return;
2598
2599 if (cfun->eh->built_landing_pads)
2600 {
2601 if (! info->handlers)
2602 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2603 }
2604 else
2605 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2606 }
2607
2608 /* Process one level of exception regions for reachability.
2609 If TYPE_THROWN is non-null, then it is the *exact* type being
2610 propagated. If INFO is non-null, then collect handler labels
2611 and caught/allowed type information between invocations. */
2612
2613 static enum reachable_code
2614 reachable_next_level (region, type_thrown, info)
2615 struct eh_region *region;
2616 tree type_thrown;
2617 struct reachable_info *info;
2618 {
2619 switch (region->type)
2620 {
2621 case ERT_CLEANUP:
2622 /* Before landing-pad generation, we model control flow
2623 directly to the individual handlers. In this way we can
2624 see that catch handler types may shadow one another. */
2625 add_reachable_handler (info, region, region);
2626 return RNL_MAYBE_CAUGHT;
2627
2628 case ERT_TRY:
2629 {
2630 struct eh_region *c;
2631 enum reachable_code ret = RNL_NOT_CAUGHT;
2632
2633 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2634 {
2635 /* A catch-all handler ends the search. */
2636 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2637 to be run as well. */
2638 if (c->u.catch.type_list == NULL)
2639 {
2640 add_reachable_handler (info, region, c);
2641 return RNL_CAUGHT;
2642 }
2643
2644 if (type_thrown)
2645 {
2646 /* If we have a at least one type match, end the search. */
2647 tree tp_node = c->u.catch.type_list;
2648
2649 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2650 {
2651 tree type = TREE_VALUE (tp_node);
2652
2653 if (type == type_thrown
2654 || (lang_eh_type_covers
2655 && (*lang_eh_type_covers) (type, type_thrown)))
2656 {
2657 add_reachable_handler (info, region, c);
2658 return RNL_CAUGHT;
2659 }
2660 }
2661
2662 /* If we have definitive information of a match failure,
2663 the catch won't trigger. */
2664 if (lang_eh_type_covers)
2665 return RNL_NOT_CAUGHT;
2666 }
2667
2668 /* At this point, we either don't know what type is thrown or
2669 don't have front-end assistance to help deciding if it is
2670 covered by one of the types in the list for this region.
2671
2672 We'd then like to add this region to the list of reachable
2673 handlers since it is indeed potentially reachable based on the
2674 information we have.
2675
2676 Actually, this handler is for sure not reachable if all the
2677 types it matches have already been caught. That is, it is only
2678 potentially reachable if at least one of the types it catches
2679 has not been previously caught. */
2680
2681 if (! info)
2682 ret = RNL_MAYBE_CAUGHT;
2683 else
2684 {
2685 tree tp_node = c->u.catch.type_list;
2686 bool maybe_reachable = false;
2687
2688 /* Compute the potential reachability of this handler and
2689 update the list of types caught at the same time. */
2690 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2691 {
2692 tree type = TREE_VALUE (tp_node);
2693
2694 if (! check_handled (info->types_caught, type))
2695 {
2696 info->types_caught
2697 = tree_cons (NULL, type, info->types_caught);
2698
2699 maybe_reachable = true;
2700 }
2701 }
2702
2703 if (maybe_reachable)
2704 {
2705 add_reachable_handler (info, region, c);
2706
2707 /* ??? If the catch type is a base class of every allowed
2708 type, then we know we can stop the search. */
2709 ret = RNL_MAYBE_CAUGHT;
2710 }
2711 }
2712 }
2713
2714 return ret;
2715 }
2716
2717 case ERT_ALLOWED_EXCEPTIONS:
2718 /* An empty list of types definitely ends the search. */
2719 if (region->u.allowed.type_list == NULL_TREE)
2720 {
2721 add_reachable_handler (info, region, region);
2722 return RNL_CAUGHT;
2723 }
2724
2725 /* Collect a list of lists of allowed types for use in detecting
2726 when a catch may be transformed into a catch-all. */
2727 if (info)
2728 info->types_allowed = tree_cons (NULL_TREE,
2729 region->u.allowed.type_list,
2730 info->types_allowed);
2731
2732 /* If we have definitive information about the type hierarchy,
2733 then we can tell if the thrown type will pass through the
2734 filter. */
2735 if (type_thrown && lang_eh_type_covers)
2736 {
2737 if (check_handled (region->u.allowed.type_list, type_thrown))
2738 return RNL_NOT_CAUGHT;
2739 else
2740 {
2741 add_reachable_handler (info, region, region);
2742 return RNL_CAUGHT;
2743 }
2744 }
2745
2746 add_reachable_handler (info, region, region);
2747 return RNL_MAYBE_CAUGHT;
2748
2749 case ERT_CATCH:
2750 /* Catch regions are handled by their controling try region. */
2751 return RNL_NOT_CAUGHT;
2752
2753 case ERT_MUST_NOT_THROW:
2754 /* Here we end our search, since no exceptions may propagate.
2755 If we've touched down at some landing pad previous, then the
2756 explicit function call we generated may be used. Otherwise
2757 the call is made by the runtime. */
2758 if (info && info->handlers)
2759 {
2760 add_reachable_handler (info, region, region);
2761 return RNL_CAUGHT;
2762 }
2763 else
2764 return RNL_BLOCKED;
2765
2766 case ERT_THROW:
2767 case ERT_FIXUP:
2768 /* Shouldn't see these here. */
2769 break;
2770 }
2771
2772 abort ();
2773 }
2774
2775 /* Retrieve a list of labels of exception handlers which can be
2776 reached by a given insn. */
2777
2778 rtx
2779 reachable_handlers (insn)
2780 rtx insn;
2781 {
2782 struct reachable_info info;
2783 struct eh_region *region;
2784 tree type_thrown;
2785 int region_number;
2786
2787 if (GET_CODE (insn) == JUMP_INSN
2788 && GET_CODE (PATTERN (insn)) == RESX)
2789 region_number = XINT (PATTERN (insn), 0);
2790 else
2791 {
2792 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2793 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2794 return NULL;
2795 region_number = INTVAL (XEXP (note, 0));
2796 }
2797
2798 memset (&info, 0, sizeof (info));
2799
2800 region = cfun->eh->region_array[region_number];
2801
2802 type_thrown = NULL_TREE;
2803 if (GET_CODE (insn) == JUMP_INSN
2804 && GET_CODE (PATTERN (insn)) == RESX)
2805 {
2806 /* A RESX leaves a region instead of entering it. Thus the
2807 region itself may have been deleted out from under us. */
2808 if (region == NULL)
2809 return NULL;
2810 region = region->outer;
2811 }
2812 else if (region->type == ERT_THROW)
2813 {
2814 type_thrown = region->u.throw.type;
2815 region = region->outer;
2816 }
2817
2818 for (; region; region = region->outer)
2819 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2820 break;
2821
2822 return info.handlers;
2823 }
2824
2825 /* Determine if the given INSN can throw an exception that is caught
2826 within the function. */
2827
2828 bool
2829 can_throw_internal (insn)
2830 rtx insn;
2831 {
2832 struct eh_region *region;
2833 tree type_thrown;
2834 rtx note;
2835
2836 if (! INSN_P (insn))
2837 return false;
2838
2839 if (GET_CODE (insn) == INSN
2840 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2841 insn = XVECEXP (PATTERN (insn), 0, 0);
2842
2843 if (GET_CODE (insn) == CALL_INSN
2844 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2845 {
2846 int i;
2847 for (i = 0; i < 3; ++i)
2848 {
2849 rtx sub = XEXP (PATTERN (insn), i);
2850 for (; sub ; sub = NEXT_INSN (sub))
2851 if (can_throw_internal (sub))
2852 return true;
2853 }
2854 return false;
2855 }
2856
2857 /* Every insn that might throw has an EH_REGION note. */
2858 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2859 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2860 return false;
2861
2862 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2863
2864 type_thrown = NULL_TREE;
2865 if (region->type == ERT_THROW)
2866 {
2867 type_thrown = region->u.throw.type;
2868 region = region->outer;
2869 }
2870
2871 /* If this exception is ignored by each and every containing region,
2872 then control passes straight out. The runtime may handle some
2873 regions, which also do not require processing internally. */
2874 for (; region; region = region->outer)
2875 {
2876 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2877 if (how == RNL_BLOCKED)
2878 return false;
2879 if (how != RNL_NOT_CAUGHT)
2880 return true;
2881 }
2882
2883 return false;
2884 }
2885
2886 /* Determine if the given INSN can throw an exception that is
2887 visible outside the function. */
2888
2889 bool
2890 can_throw_external (insn)
2891 rtx insn;
2892 {
2893 struct eh_region *region;
2894 tree type_thrown;
2895 rtx note;
2896
2897 if (! INSN_P (insn))
2898 return false;
2899
2900 if (GET_CODE (insn) == INSN
2901 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2902 insn = XVECEXP (PATTERN (insn), 0, 0);
2903
2904 if (GET_CODE (insn) == CALL_INSN
2905 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2906 {
2907 int i;
2908 for (i = 0; i < 3; ++i)
2909 {
2910 rtx sub = XEXP (PATTERN (insn), i);
2911 for (; sub ; sub = NEXT_INSN (sub))
2912 if (can_throw_external (sub))
2913 return true;
2914 }
2915 return false;
2916 }
2917
2918 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2919 if (!note)
2920 {
2921 /* Calls (and trapping insns) without notes are outside any
2922 exception handling region in this function. We have to
2923 assume it might throw. Given that the front end and middle
2924 ends mark known NOTHROW functions, this isn't so wildly
2925 inaccurate. */
2926 return (GET_CODE (insn) == CALL_INSN
2927 || (flag_non_call_exceptions
2928 && may_trap_p (PATTERN (insn))));
2929 }
2930 if (INTVAL (XEXP (note, 0)) <= 0)
2931 return false;
2932
2933 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2934
2935 type_thrown = NULL_TREE;
2936 if (region->type == ERT_THROW)
2937 {
2938 type_thrown = region->u.throw.type;
2939 region = region->outer;
2940 }
2941
2942 /* If the exception is caught or blocked by any containing region,
2943 then it is not seen by any calling function. */
2944 for (; region ; region = region->outer)
2945 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2946 return false;
2947
2948 return true;
2949 }
2950
2951 /* True if nothing in this function can throw outside this function. */
2952
2953 bool
2954 nothrow_function_p ()
2955 {
2956 rtx insn;
2957
2958 if (! flag_exceptions)
2959 return true;
2960
2961 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2962 if (can_throw_external (insn))
2963 return false;
2964 for (insn = current_function_epilogue_delay_list; insn;
2965 insn = XEXP (insn, 1))
2966 if (can_throw_external (insn))
2967 return false;
2968
2969 return true;
2970 }
2971
2972 \f
2973 /* Various hooks for unwind library. */
2974
2975 /* Do any necessary initialization to access arbitrary stack frames.
2976 On the SPARC, this means flushing the register windows. */
2977
2978 void
2979 expand_builtin_unwind_init ()
2980 {
2981 /* Set this so all the registers get saved in our frame; we need to be
2982 able to copy the saved values for any registers from frames we unwind. */
2983 current_function_has_nonlocal_label = 1;
2984
2985 #ifdef SETUP_FRAME_ADDRESSES
2986 SETUP_FRAME_ADDRESSES ();
2987 #endif
2988 }
2989
2990 rtx
2991 expand_builtin_eh_return_data_regno (arglist)
2992 tree arglist;
2993 {
2994 tree which = TREE_VALUE (arglist);
2995 unsigned HOST_WIDE_INT iwhich;
2996
2997 if (TREE_CODE (which) != INTEGER_CST)
2998 {
2999 error ("argument of `__builtin_eh_return_regno' must be constant");
3000 return constm1_rtx;
3001 }
3002
3003 iwhich = tree_low_cst (which, 1);
3004 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3005 if (iwhich == INVALID_REGNUM)
3006 return constm1_rtx;
3007
3008 #ifdef DWARF_FRAME_REGNUM
3009 iwhich = DWARF_FRAME_REGNUM (iwhich);
3010 #else
3011 iwhich = DBX_REGISTER_NUMBER (iwhich);
3012 #endif
3013
3014 return GEN_INT (iwhich);
3015 }
3016
3017 /* Given a value extracted from the return address register or stack slot,
3018 return the actual address encoded in that value. */
3019
3020 rtx
3021 expand_builtin_extract_return_addr (addr_tree)
3022 tree addr_tree;
3023 {
3024 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3025
3026 /* First mask out any unwanted bits. */
3027 #ifdef MASK_RETURN_ADDR
3028 expand_and (addr, MASK_RETURN_ADDR, addr);
3029 #endif
3030
3031 /* Then adjust to find the real return address. */
3032 #if defined (RETURN_ADDR_OFFSET)
3033 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3034 #endif
3035
3036 return addr;
3037 }
3038
3039 /* Given an actual address in addr_tree, do any necessary encoding
3040 and return the value to be stored in the return address register or
3041 stack slot so the epilogue will return to that address. */
3042
3043 rtx
3044 expand_builtin_frob_return_addr (addr_tree)
3045 tree addr_tree;
3046 {
3047 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3048
3049 #ifdef POINTERS_EXTEND_UNSIGNED
3050 if (GET_MODE (addr) != Pmode)
3051 addr = convert_memory_address (Pmode, addr);
3052 #endif
3053
3054 #ifdef RETURN_ADDR_OFFSET
3055 addr = force_reg (Pmode, addr);
3056 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3057 #endif
3058
3059 return addr;
3060 }
3061
3062 /* Set up the epilogue with the magic bits we'll need to return to the
3063 exception handler. */
3064
3065 void
3066 expand_builtin_eh_return (stackadj_tree, handler_tree)
3067 tree stackadj_tree, handler_tree;
3068 {
3069 rtx stackadj, handler;
3070
3071 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3072 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3073
3074 #ifdef POINTERS_EXTEND_UNSIGNED
3075 if (GET_MODE (stackadj) != Pmode)
3076 stackadj = convert_memory_address (Pmode, stackadj);
3077
3078 if (GET_MODE (handler) != Pmode)
3079 handler = convert_memory_address (Pmode, handler);
3080 #endif
3081
3082 if (! cfun->eh->ehr_label)
3083 {
3084 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3085 cfun->eh->ehr_handler = copy_to_reg (handler);
3086 cfun->eh->ehr_label = gen_label_rtx ();
3087 }
3088 else
3089 {
3090 if (stackadj != cfun->eh->ehr_stackadj)
3091 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3092 if (handler != cfun->eh->ehr_handler)
3093 emit_move_insn (cfun->eh->ehr_handler, handler);
3094 }
3095
3096 emit_jump (cfun->eh->ehr_label);
3097 }
3098
3099 void
3100 expand_eh_return ()
3101 {
3102 rtx sa, ra, around_label;
3103
3104 if (! cfun->eh->ehr_label)
3105 return;
3106
3107 sa = EH_RETURN_STACKADJ_RTX;
3108 if (! sa)
3109 {
3110 error ("__builtin_eh_return not supported on this target");
3111 return;
3112 }
3113
3114 current_function_calls_eh_return = 1;
3115
3116 around_label = gen_label_rtx ();
3117 emit_move_insn (sa, const0_rtx);
3118 emit_jump (around_label);
3119
3120 emit_label (cfun->eh->ehr_label);
3121 clobber_return_register ();
3122
3123 #ifdef HAVE_eh_return
3124 if (HAVE_eh_return)
3125 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3126 else
3127 #endif
3128 {
3129 ra = EH_RETURN_HANDLER_RTX;
3130 if (! ra)
3131 {
3132 error ("__builtin_eh_return not supported on this target");
3133 ra = gen_reg_rtx (Pmode);
3134 }
3135
3136 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3137 emit_move_insn (ra, cfun->eh->ehr_handler);
3138 }
3139
3140 emit_label (around_label);
3141 }
3142 \f
3143 /* In the following functions, we represent entries in the action table
3144 as 1-based indices. Special cases are:
3145
3146 0: null action record, non-null landing pad; implies cleanups
3147 -1: null action record, null landing pad; implies no action
3148 -2: no call-site entry; implies must_not_throw
3149 -3: we have yet to process outer regions
3150
3151 Further, no special cases apply to the "next" field of the record.
3152 For next, 0 means end of list. */
3153
3154 struct action_record
3155 {
3156 int offset;
3157 int filter;
3158 int next;
3159 };
3160
3161 static int
3162 action_record_eq (pentry, pdata)
3163 const PTR pentry;
3164 const PTR pdata;
3165 {
3166 const struct action_record *entry = (const struct action_record *) pentry;
3167 const struct action_record *data = (const struct action_record *) pdata;
3168 return entry->filter == data->filter && entry->next == data->next;
3169 }
3170
3171 static hashval_t
3172 action_record_hash (pentry)
3173 const PTR pentry;
3174 {
3175 const struct action_record *entry = (const struct action_record *) pentry;
3176 return entry->next * 1009 + entry->filter;
3177 }
3178
3179 static int
3180 add_action_record (ar_hash, filter, next)
3181 htab_t ar_hash;
3182 int filter, next;
3183 {
3184 struct action_record **slot, *new, tmp;
3185
3186 tmp.filter = filter;
3187 tmp.next = next;
3188 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3189
3190 if ((new = *slot) == NULL)
3191 {
3192 new = (struct action_record *) xmalloc (sizeof (*new));
3193 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3194 new->filter = filter;
3195 new->next = next;
3196 *slot = new;
3197
3198 /* The filter value goes in untouched. The link to the next
3199 record is a "self-relative" byte offset, or zero to indicate
3200 that there is no next record. So convert the absolute 1 based
3201 indices we've been carrying around into a displacement. */
3202
3203 push_sleb128 (&cfun->eh->action_record_data, filter);
3204 if (next)
3205 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3206 push_sleb128 (&cfun->eh->action_record_data, next);
3207 }
3208
3209 return new->offset;
3210 }
3211
3212 static int
3213 collect_one_action_chain (ar_hash, region)
3214 htab_t ar_hash;
3215 struct eh_region *region;
3216 {
3217 struct eh_region *c;
3218 int next;
3219
3220 /* If we've reached the top of the region chain, then we have
3221 no actions, and require no landing pad. */
3222 if (region == NULL)
3223 return -1;
3224
3225 switch (region->type)
3226 {
3227 case ERT_CLEANUP:
3228 /* A cleanup adds a zero filter to the beginning of the chain, but
3229 there are special cases to look out for. If there are *only*
3230 cleanups along a path, then it compresses to a zero action.
3231 Further, if there are multiple cleanups along a path, we only
3232 need to represent one of them, as that is enough to trigger
3233 entry to the landing pad at runtime. */
3234 next = collect_one_action_chain (ar_hash, region->outer);
3235 if (next <= 0)
3236 return 0;
3237 for (c = region->outer; c ; c = c->outer)
3238 if (c->type == ERT_CLEANUP)
3239 return next;
3240 return add_action_record (ar_hash, 0, next);
3241
3242 case ERT_TRY:
3243 /* Process the associated catch regions in reverse order.
3244 If there's a catch-all handler, then we don't need to
3245 search outer regions. Use a magic -3 value to record
3246 that we haven't done the outer search. */
3247 next = -3;
3248 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3249 {
3250 if (c->u.catch.type_list == NULL)
3251 {
3252 /* Retrieve the filter from the head of the filter list
3253 where we have stored it (see assign_filter_values). */
3254 int filter
3255 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3256
3257 next = add_action_record (ar_hash, filter, 0);
3258 }
3259 else
3260 {
3261 /* Once the outer search is done, trigger an action record for
3262 each filter we have. */
3263 tree flt_node;
3264
3265 if (next == -3)
3266 {
3267 next = collect_one_action_chain (ar_hash, region->outer);
3268
3269 /* If there is no next action, terminate the chain. */
3270 if (next == -1)
3271 next = 0;
3272 /* If all outer actions are cleanups or must_not_throw,
3273 we'll have no action record for it, since we had wanted
3274 to encode these states in the call-site record directly.
3275 Add a cleanup action to the chain to catch these. */
3276 else if (next <= 0)
3277 next = add_action_record (ar_hash, 0, 0);
3278 }
3279
3280 flt_node = c->u.catch.filter_list;
3281 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3282 {
3283 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3284 next = add_action_record (ar_hash, filter, next);
3285 }
3286 }
3287 }
3288 return next;
3289
3290 case ERT_ALLOWED_EXCEPTIONS:
3291 /* An exception specification adds its filter to the
3292 beginning of the chain. */
3293 next = collect_one_action_chain (ar_hash, region->outer);
3294 return add_action_record (ar_hash, region->u.allowed.filter,
3295 next < 0 ? 0 : next);
3296
3297 case ERT_MUST_NOT_THROW:
3298 /* A must-not-throw region with no inner handlers or cleanups
3299 requires no call-site entry. Note that this differs from
3300 the no handler or cleanup case in that we do require an lsda
3301 to be generated. Return a magic -2 value to record this. */
3302 return -2;
3303
3304 case ERT_CATCH:
3305 case ERT_THROW:
3306 /* CATCH regions are handled in TRY above. THROW regions are
3307 for optimization information only and produce no output. */
3308 return collect_one_action_chain (ar_hash, region->outer);
3309
3310 default:
3311 abort ();
3312 }
3313 }
3314
3315 static int
3316 add_call_site (landing_pad, action)
3317 rtx landing_pad;
3318 int action;
3319 {
3320 struct call_site_record *data = cfun->eh->call_site_data;
3321 int used = cfun->eh->call_site_data_used;
3322 int size = cfun->eh->call_site_data_size;
3323
3324 if (used >= size)
3325 {
3326 size = (size ? size * 2 : 64);
3327 data = (struct call_site_record *)
3328 xrealloc (data, sizeof (*data) * size);
3329 cfun->eh->call_site_data = data;
3330 cfun->eh->call_site_data_size = size;
3331 }
3332
3333 data[used].landing_pad = landing_pad;
3334 data[used].action = action;
3335
3336 cfun->eh->call_site_data_used = used + 1;
3337
3338 return used + call_site_base;
3339 }
3340
3341 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3342 The new note numbers will not refer to region numbers, but
3343 instead to call site entries. */
3344
3345 void
3346 convert_to_eh_region_ranges ()
3347 {
3348 rtx insn, iter, note;
3349 htab_t ar_hash;
3350 int last_action = -3;
3351 rtx last_action_insn = NULL_RTX;
3352 rtx last_landing_pad = NULL_RTX;
3353 rtx first_no_action_insn = NULL_RTX;
3354 int call_site = 0;
3355
3356 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3357 return;
3358
3359 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3360
3361 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3362
3363 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3364 if (INSN_P (iter))
3365 {
3366 struct eh_region *region;
3367 int this_action;
3368 rtx this_landing_pad;
3369
3370 insn = iter;
3371 if (GET_CODE (insn) == INSN
3372 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3373 insn = XVECEXP (PATTERN (insn), 0, 0);
3374
3375 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3376 if (!note)
3377 {
3378 if (! (GET_CODE (insn) == CALL_INSN
3379 || (flag_non_call_exceptions
3380 && may_trap_p (PATTERN (insn)))))
3381 continue;
3382 this_action = -1;
3383 region = NULL;
3384 }
3385 else
3386 {
3387 if (INTVAL (XEXP (note, 0)) <= 0)
3388 continue;
3389 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3390 this_action = collect_one_action_chain (ar_hash, region);
3391 }
3392
3393 /* Existence of catch handlers, or must-not-throw regions
3394 implies that an lsda is needed (even if empty). */
3395 if (this_action != -1)
3396 cfun->uses_eh_lsda = 1;
3397
3398 /* Delay creation of region notes for no-action regions
3399 until we're sure that an lsda will be required. */
3400 else if (last_action == -3)
3401 {
3402 first_no_action_insn = iter;
3403 last_action = -1;
3404 }
3405
3406 /* Cleanups and handlers may share action chains but not
3407 landing pads. Collect the landing pad for this region. */
3408 if (this_action >= 0)
3409 {
3410 struct eh_region *o;
3411 for (o = region; ! o->landing_pad ; o = o->outer)
3412 continue;
3413 this_landing_pad = o->landing_pad;
3414 }
3415 else
3416 this_landing_pad = NULL_RTX;
3417
3418 /* Differing actions or landing pads implies a change in call-site
3419 info, which implies some EH_REGION note should be emitted. */
3420 if (last_action != this_action
3421 || last_landing_pad != this_landing_pad)
3422 {
3423 /* If we'd not seen a previous action (-3) or the previous
3424 action was must-not-throw (-2), then we do not need an
3425 end note. */
3426 if (last_action >= -1)
3427 {
3428 /* If we delayed the creation of the begin, do it now. */
3429 if (first_no_action_insn)
3430 {
3431 call_site = add_call_site (NULL_RTX, 0);
3432 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3433 first_no_action_insn);
3434 NOTE_EH_HANDLER (note) = call_site;
3435 first_no_action_insn = NULL_RTX;
3436 }
3437
3438 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3439 last_action_insn);
3440 NOTE_EH_HANDLER (note) = call_site;
3441 }
3442
3443 /* If the new action is must-not-throw, then no region notes
3444 are created. */
3445 if (this_action >= -1)
3446 {
3447 call_site = add_call_site (this_landing_pad,
3448 this_action < 0 ? 0 : this_action);
3449 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3450 NOTE_EH_HANDLER (note) = call_site;
3451 }
3452
3453 last_action = this_action;
3454 last_landing_pad = this_landing_pad;
3455 }
3456 last_action_insn = iter;
3457 }
3458
3459 if (last_action >= -1 && ! first_no_action_insn)
3460 {
3461 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3462 NOTE_EH_HANDLER (note) = call_site;
3463 }
3464
3465 htab_delete (ar_hash);
3466 }
3467
3468 \f
3469 static void
3470 push_uleb128 (data_area, value)
3471 varray_type *data_area;
3472 unsigned int value;
3473 {
3474 do
3475 {
3476 unsigned char byte = value & 0x7f;
3477 value >>= 7;
3478 if (value)
3479 byte |= 0x80;
3480 VARRAY_PUSH_UCHAR (*data_area, byte);
3481 }
3482 while (value);
3483 }
3484
3485 static void
3486 push_sleb128 (data_area, value)
3487 varray_type *data_area;
3488 int value;
3489 {
3490 unsigned char byte;
3491 int more;
3492
3493 do
3494 {
3495 byte = value & 0x7f;
3496 value >>= 7;
3497 more = ! ((value == 0 && (byte & 0x40) == 0)
3498 || (value == -1 && (byte & 0x40) != 0));
3499 if (more)
3500 byte |= 0x80;
3501 VARRAY_PUSH_UCHAR (*data_area, byte);
3502 }
3503 while (more);
3504 }
3505
3506 \f
3507 #ifndef HAVE_AS_LEB128
3508 static int
3509 dw2_size_of_call_site_table ()
3510 {
3511 int n = cfun->eh->call_site_data_used;
3512 int size = n * (4 + 4 + 4);
3513 int i;
3514
3515 for (i = 0; i < n; ++i)
3516 {
3517 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3518 size += size_of_uleb128 (cs->action);
3519 }
3520
3521 return size;
3522 }
3523
3524 static int
3525 sjlj_size_of_call_site_table ()
3526 {
3527 int n = cfun->eh->call_site_data_used;
3528 int size = 0;
3529 int i;
3530
3531 for (i = 0; i < n; ++i)
3532 {
3533 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3534 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3535 size += size_of_uleb128 (cs->action);
3536 }
3537
3538 return size;
3539 }
3540 #endif
3541
3542 static void
3543 dw2_output_call_site_table ()
3544 {
3545 const char *const function_start_lab
3546 = IDENTIFIER_POINTER (current_function_func_begin_label);
3547 int n = cfun->eh->call_site_data_used;
3548 int i;
3549
3550 for (i = 0; i < n; ++i)
3551 {
3552 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3553 char reg_start_lab[32];
3554 char reg_end_lab[32];
3555 char landing_pad_lab[32];
3556
3557 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3558 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3559
3560 if (cs->landing_pad)
3561 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3562 CODE_LABEL_NUMBER (cs->landing_pad));
3563
3564 /* ??? Perhaps use insn length scaling if the assembler supports
3565 generic arithmetic. */
3566 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3567 data4 if the function is small enough. */
3568 #ifdef HAVE_AS_LEB128
3569 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3570 "region %d start", i);
3571 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3572 "length");
3573 if (cs->landing_pad)
3574 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3575 "landing pad");
3576 else
3577 dw2_asm_output_data_uleb128 (0, "landing pad");
3578 #else
3579 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3580 "region %d start", i);
3581 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3582 if (cs->landing_pad)
3583 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3584 "landing pad");
3585 else
3586 dw2_asm_output_data (4, 0, "landing pad");
3587 #endif
3588 dw2_asm_output_data_uleb128 (cs->action, "action");
3589 }
3590
3591 call_site_base += n;
3592 }
3593
3594 static void
3595 sjlj_output_call_site_table ()
3596 {
3597 int n = cfun->eh->call_site_data_used;
3598 int i;
3599
3600 for (i = 0; i < n; ++i)
3601 {
3602 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3603
3604 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3605 "region %d landing pad", i);
3606 dw2_asm_output_data_uleb128 (cs->action, "action");
3607 }
3608
3609 call_site_base += n;
3610 }
3611
3612 void
3613 output_function_exception_table ()
3614 {
3615 int tt_format, cs_format, lp_format, i, n;
3616 #ifdef HAVE_AS_LEB128
3617 char ttype_label[32];
3618 char cs_after_size_label[32];
3619 char cs_end_label[32];
3620 #else
3621 int call_site_len;
3622 #endif
3623 int have_tt_data;
3624 int funcdef_number;
3625 int tt_format_size = 0;
3626
3627 /* Not all functions need anything. */
3628 if (! cfun->uses_eh_lsda)
3629 return;
3630
3631 funcdef_number = (USING_SJLJ_EXCEPTIONS
3632 ? sjlj_funcdef_number
3633 : current_funcdef_number);
3634
3635 #ifdef IA64_UNWIND_INFO
3636 fputs ("\t.personality\t", asm_out_file);
3637 output_addr_const (asm_out_file, eh_personality_libfunc);
3638 fputs ("\n\t.handlerdata\n", asm_out_file);
3639 /* Note that varasm still thinks we're in the function's code section.
3640 The ".endp" directive that will immediately follow will take us back. */
3641 #else
3642 (*targetm.asm_out.exception_section) ();
3643 #endif
3644
3645 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3646 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3647
3648 /* Indicate the format of the @TType entries. */
3649 if (! have_tt_data)
3650 tt_format = DW_EH_PE_omit;
3651 else
3652 {
3653 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3654 #ifdef HAVE_AS_LEB128
3655 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3656 #endif
3657 tt_format_size = size_of_encoded_value (tt_format);
3658
3659 assemble_align (tt_format_size * BITS_PER_UNIT);
3660 }
3661
3662 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3663
3664 /* The LSDA header. */
3665
3666 /* Indicate the format of the landing pad start pointer. An omitted
3667 field implies @LPStart == @Start. */
3668 /* Currently we always put @LPStart == @Start. This field would
3669 be most useful in moving the landing pads completely out of
3670 line to another section, but it could also be used to minimize
3671 the size of uleb128 landing pad offsets. */
3672 lp_format = DW_EH_PE_omit;
3673 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3674 eh_data_format_name (lp_format));
3675
3676 /* @LPStart pointer would go here. */
3677
3678 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3679 eh_data_format_name (tt_format));
3680
3681 #ifndef HAVE_AS_LEB128
3682 if (USING_SJLJ_EXCEPTIONS)
3683 call_site_len = sjlj_size_of_call_site_table ();
3684 else
3685 call_site_len = dw2_size_of_call_site_table ();
3686 #endif
3687
3688 /* A pc-relative 4-byte displacement to the @TType data. */
3689 if (have_tt_data)
3690 {
3691 #ifdef HAVE_AS_LEB128
3692 char ttype_after_disp_label[32];
3693 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3694 funcdef_number);
3695 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3696 "@TType base offset");
3697 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3698 #else
3699 /* Ug. Alignment queers things. */
3700 unsigned int before_disp, after_disp, last_disp, disp;
3701
3702 before_disp = 1 + 1;
3703 after_disp = (1 + size_of_uleb128 (call_site_len)
3704 + call_site_len
3705 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3706 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3707 * tt_format_size));
3708
3709 disp = after_disp;
3710 do
3711 {
3712 unsigned int disp_size, pad;
3713
3714 last_disp = disp;
3715 disp_size = size_of_uleb128 (disp);
3716 pad = before_disp + disp_size + after_disp;
3717 if (pad % tt_format_size)
3718 pad = tt_format_size - (pad % tt_format_size);
3719 else
3720 pad = 0;
3721 disp = after_disp + pad;
3722 }
3723 while (disp != last_disp);
3724
3725 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3726 #endif
3727 }
3728
3729 /* Indicate the format of the call-site offsets. */
3730 #ifdef HAVE_AS_LEB128
3731 cs_format = DW_EH_PE_uleb128;
3732 #else
3733 cs_format = DW_EH_PE_udata4;
3734 #endif
3735 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3736 eh_data_format_name (cs_format));
3737
3738 #ifdef HAVE_AS_LEB128
3739 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3740 funcdef_number);
3741 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3742 funcdef_number);
3743 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3744 "Call-site table length");
3745 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3746 if (USING_SJLJ_EXCEPTIONS)
3747 sjlj_output_call_site_table ();
3748 else
3749 dw2_output_call_site_table ();
3750 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3751 #else
3752 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3753 if (USING_SJLJ_EXCEPTIONS)
3754 sjlj_output_call_site_table ();
3755 else
3756 dw2_output_call_site_table ();
3757 #endif
3758
3759 /* ??? Decode and interpret the data for flag_debug_asm. */
3760 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3761 for (i = 0; i < n; ++i)
3762 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3763 (i ? NULL : "Action record table"));
3764
3765 if (have_tt_data)
3766 assemble_align (tt_format_size * BITS_PER_UNIT);
3767
3768 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3769 while (i-- > 0)
3770 {
3771 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3772 rtx value;
3773
3774 if (type == NULL_TREE)
3775 type = integer_zero_node;
3776 else
3777 type = lookup_type_for_runtime (type);
3778
3779 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3780 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3781 assemble_integer (value, tt_format_size,
3782 tt_format_size * BITS_PER_UNIT, 1);
3783 else
3784 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3785 }
3786
3787 #ifdef HAVE_AS_LEB128
3788 if (have_tt_data)
3789 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3790 #endif
3791
3792 /* ??? Decode and interpret the data for flag_debug_asm. */
3793 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3794 for (i = 0; i < n; ++i)
3795 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3796 (i ? NULL : "Exception specification table"));
3797
3798 function_section (current_function_decl);
3799
3800 if (USING_SJLJ_EXCEPTIONS)
3801 sjlj_funcdef_number += 1;
3802 }