except.c (remove_eh_handler): Make static.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80
81 /* Provide defaults for stuff that may not be defined when using
82 sjlj exceptions. */
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 #endif
86
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
90
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
93
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
96
97 /* A hash table of label to region number. */
98
99 struct ehl_map_entry GTY(())
100 {
101 rtx label;
102 struct eh_region *region;
103 };
104
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
108
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
116 \f
117 /* Describes one exception region. */
118 struct eh_region GTY(())
119 {
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
122
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
126
127 /* An identifier for this region. */
128 int region_number;
129
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
132 bitmap aka;
133
134 /* Each region does exactly one thing. */
135 enum eh_region_type
136 {
137 ERT_UNKNOWN = 0,
138 ERT_CLEANUP,
139 ERT_TRY,
140 ERT_CATCH,
141 ERT_ALLOWED_EXCEPTIONS,
142 ERT_MUST_NOT_THROW,
143 ERT_THROW
144 } type;
145
146 /* Holds the action to perform based on the preceding type. */
147 union eh_region_u {
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *eh_catch;
152 struct eh_region *last_catch;
153 } GTY ((tag ("ERT_TRY"))) eh_try;
154
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch {
158 struct eh_region *next_catch;
159 struct eh_region *prev_catch;
160 tree type_list;
161 tree filter_list;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch;
163
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed {
166 tree type_list;
167 int filter;
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
169
170 /* The type given by a call to "throw foo();", or discovered
171 for a throw. */
172 struct eh_region_u_throw {
173 tree type;
174 } GTY ((tag ("ERT_THROW"))) eh_throw;
175
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup {
179 struct eh_region *prev_try;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
181 } GTY ((desc ("%0.type"))) u;
182
183 /* Entry point for this region's handler before landing pads are built. */
184 rtx label;
185 tree tree_label;
186
187 /* Entry point for this region's handler from the runtime eh library. */
188 rtx landing_pad;
189
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
192
193 /* The RESX insn for handing off control to the next outermost handler,
194 if appropriate. */
195 rtx resume;
196
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw : 1;
199 };
200
201 typedef struct eh_region *eh_region;
202
203 struct call_site_record GTY(())
204 {
205 rtx landing_pad;
206 int action;
207 };
208
209 DEF_VEC_P(eh_region);
210 DEF_VEC_ALLOC_P(eh_region, gc);
211
212 /* Used to save exception status for each function. */
213 struct eh_status GTY(())
214 {
215 /* The tree of all regions for this function. */
216 struct eh_region *region_tree;
217
218 /* The same information as an indexable array. */
219 VEC(eh_region,gc) *region_array;
220 int last_region_number;
221
222 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
223 };
224 \f
225 static int t2r_eq (const void *, const void *);
226 static hashval_t t2r_hash (const void *);
227 static void add_type_for_runtime (tree);
228 static tree lookup_type_for_runtime (tree);
229
230 static int ttypes_filter_eq (const void *, const void *);
231 static hashval_t ttypes_filter_hash (const void *);
232 static int ehspec_filter_eq (const void *, const void *);
233 static hashval_t ehspec_filter_hash (const void *);
234 static int add_ttypes_entry (htab_t, tree);
235 static int add_ehspec_entry (htab_t, htab_t, tree);
236 static void assign_filter_values (void);
237 static void build_post_landing_pads (void);
238 static void connect_post_landing_pads (void);
239 static void dw2_build_landing_pads (void);
240
241 struct sjlj_lp_info;
242 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
243 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
244 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
245 static void sjlj_emit_function_enter (rtx);
246 static void sjlj_emit_function_exit (void);
247 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
248 static void sjlj_build_landing_pads (void);
249
250 static hashval_t ehl_hash (const void *);
251 static int ehl_eq (const void *, const void *);
252 static void add_ehl_entry (rtx, struct eh_region *);
253 static void remove_exception_handler_label (rtx);
254 static void remove_eh_handler (struct eh_region *);
255 static int for_each_eh_label_1 (void **, void *);
256
257 /* The return value of reachable_next_level. */
258 enum reachable_code
259 {
260 /* The given exception is not processed by the given region. */
261 RNL_NOT_CAUGHT,
262 /* The given exception may need processing by the given region. */
263 RNL_MAYBE_CAUGHT,
264 /* The given exception is completely processed by the given region. */
265 RNL_CAUGHT,
266 /* The given exception is completely processed by the runtime. */
267 RNL_BLOCKED
268 };
269
270 struct reachable_info;
271 static enum reachable_code reachable_next_level (struct eh_region *, tree,
272 struct reachable_info *, bool);
273
274 static int action_record_eq (const void *, const void *);
275 static hashval_t action_record_hash (const void *);
276 static int add_action_record (htab_t, int, int);
277 static int collect_one_action_chain (htab_t, struct eh_region *);
278 static int add_call_site (rtx, int);
279
280 static void push_uleb128 (varray_type *, unsigned int);
281 static void push_sleb128 (varray_type *, int);
282 #ifndef HAVE_AS_LEB128
283 static int dw2_size_of_call_site_table (void);
284 static int sjlj_size_of_call_site_table (void);
285 #endif
286 static void dw2_output_call_site_table (void);
287 static void sjlj_output_call_site_table (void);
288
289 \f
290 /* Routine to see if exception handling is turned on.
291 DO_WARN is nonzero if we want to inform the user that exception
292 handling is turned off.
293
294 This is used to ensure that -fexceptions has been specified if the
295 compiler tries to use any exception-specific functions. */
296
297 int
298 doing_eh (int do_warn)
299 {
300 if (! flag_exceptions)
301 {
302 static int warned = 0;
303 if (! warned && do_warn)
304 {
305 error ("exception handling disabled, use -fexceptions to enable");
306 warned = 1;
307 }
308 return 0;
309 }
310 return 1;
311 }
312
313 \f
314 void
315 init_eh (void)
316 {
317 if (! flag_exceptions)
318 return;
319
320 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
321
322 /* Create the SjLj_Function_Context structure. This should match
323 the definition in unwind-sjlj.c. */
324 if (USING_SJLJ_EXCEPTIONS)
325 {
326 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
327
328 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
329
330 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
331 build_pointer_type (sjlj_fc_type_node));
332 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
333
334 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
335 integer_type_node);
336 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
337
338 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
339 tmp = build_array_type (lang_hooks.types.type_for_mode
340 (targetm.unwind_word_mode (), 1),
341 tmp);
342 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
343 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
344
345 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
346 ptr_type_node);
347 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
348
349 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
350 ptr_type_node);
351 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
352
353 #ifdef DONT_USE_BUILTIN_SETJMP
354 #ifdef JMP_BUF_SIZE
355 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
356 #else
357 /* Should be large enough for most systems, if it is not,
358 JMP_BUF_SIZE should be defined with the proper value. It will
359 also tend to be larger than necessary for most systems, a more
360 optimal port will define JMP_BUF_SIZE. */
361 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
362 #endif
363 #else
364 /* builtin_setjmp takes a pointer to 5 words. */
365 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
366 #endif
367 tmp = build_index_type (tmp);
368 tmp = build_array_type (ptr_type_node, tmp);
369 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
370 #ifdef DONT_USE_BUILTIN_SETJMP
371 /* We don't know what the alignment requirements of the
372 runtime's jmp_buf has. Overestimate. */
373 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
374 DECL_USER_ALIGN (f_jbuf) = 1;
375 #endif
376 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
377
378 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
379 TREE_CHAIN (f_prev) = f_cs;
380 TREE_CHAIN (f_cs) = f_data;
381 TREE_CHAIN (f_data) = f_per;
382 TREE_CHAIN (f_per) = f_lsda;
383 TREE_CHAIN (f_lsda) = f_jbuf;
384
385 layout_type (sjlj_fc_type_node);
386
387 /* Cache the interesting field offsets so that we have
388 easy access from rtl. */
389 sjlj_fc_call_site_ofs
390 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
391 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
392 sjlj_fc_data_ofs
393 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
394 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
395 sjlj_fc_personality_ofs
396 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
397 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
398 sjlj_fc_lsda_ofs
399 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
400 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
401 sjlj_fc_jbuf_ofs
402 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
403 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
404 }
405 }
406
407 void
408 init_eh_for_function (void)
409 {
410 cfun->eh = GGC_CNEW (struct eh_status);
411 }
412 \f
413 /* Routines to generate the exception tree somewhat directly.
414 These are used from tree-eh.c when processing exception related
415 nodes during tree optimization. */
416
417 static struct eh_region *
418 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
419 {
420 struct eh_region *new_eh;
421
422 #ifdef ENABLE_CHECKING
423 gcc_assert (doing_eh (0));
424 #endif
425
426 /* Insert a new blank region as a leaf in the tree. */
427 new_eh = GGC_CNEW (struct eh_region);
428 new_eh->type = type;
429 new_eh->outer = outer;
430 if (outer)
431 {
432 new_eh->next_peer = outer->inner;
433 outer->inner = new_eh;
434 }
435 else
436 {
437 new_eh->next_peer = cfun->eh->region_tree;
438 cfun->eh->region_tree = new_eh;
439 }
440
441 new_eh->region_number = ++cfun->eh->last_region_number;
442
443 return new_eh;
444 }
445
446 struct eh_region *
447 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
448 {
449 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
450 cleanup->u.cleanup.prev_try = prev_try;
451 return cleanup;
452 }
453
454 struct eh_region *
455 gen_eh_region_try (struct eh_region *outer)
456 {
457 return gen_eh_region (ERT_TRY, outer);
458 }
459
460 struct eh_region *
461 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
462 {
463 struct eh_region *c, *l;
464 tree type_list, type_node;
465
466 /* Ensure to always end up with a type list to normalize further
467 processing, then register each type against the runtime types map. */
468 type_list = type_or_list;
469 if (type_or_list)
470 {
471 if (TREE_CODE (type_or_list) != TREE_LIST)
472 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
473
474 type_node = type_list;
475 for (; type_node; type_node = TREE_CHAIN (type_node))
476 add_type_for_runtime (TREE_VALUE (type_node));
477 }
478
479 c = gen_eh_region (ERT_CATCH, t->outer);
480 c->u.eh_catch.type_list = type_list;
481 l = t->u.eh_try.last_catch;
482 c->u.eh_catch.prev_catch = l;
483 if (l)
484 l->u.eh_catch.next_catch = c;
485 else
486 t->u.eh_try.eh_catch = c;
487 t->u.eh_try.last_catch = c;
488
489 return c;
490 }
491
492 struct eh_region *
493 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
494 {
495 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
496 region->u.allowed.type_list = allowed;
497
498 for (; allowed ; allowed = TREE_CHAIN (allowed))
499 add_type_for_runtime (TREE_VALUE (allowed));
500
501 return region;
502 }
503
504 struct eh_region *
505 gen_eh_region_must_not_throw (struct eh_region *outer)
506 {
507 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
508 }
509
510 int
511 get_eh_region_number (struct eh_region *region)
512 {
513 return region->region_number;
514 }
515
516 bool
517 get_eh_region_may_contain_throw (struct eh_region *region)
518 {
519 return region->may_contain_throw;
520 }
521
522 tree
523 get_eh_region_tree_label (struct eh_region *region)
524 {
525 return region->tree_label;
526 }
527
528 tree
529 get_eh_region_no_tree_label (int region)
530 {
531 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
532 }
533
534 void
535 set_eh_region_tree_label (struct eh_region *region, tree lab)
536 {
537 region->tree_label = lab;
538 }
539 \f
540 void
541 expand_resx_expr (tree exp)
542 {
543 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
544 struct eh_region *reg = VEC_index (eh_region,
545 cfun->eh->region_array, region_nr);
546
547 gcc_assert (!reg->resume);
548 do_pending_stack_adjust ();
549 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
550 emit_barrier ();
551 }
552
553 /* Note that the current EH region (if any) may contain a throw, or a
554 call to a function which itself may contain a throw. */
555
556 void
557 note_eh_region_may_contain_throw (struct eh_region *region)
558 {
559 while (region && !region->may_contain_throw)
560 {
561 region->may_contain_throw = 1;
562 region = region->outer;
563 }
564 }
565
566
567 /* Return an rtl expression for a pointer to the exception object
568 within a handler. */
569
570 rtx
571 get_exception_pointer (void)
572 {
573 if (! crtl->eh.exc_ptr)
574 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
575 return crtl->eh.exc_ptr;
576 }
577
578 /* Return an rtl expression for the exception dispatch filter
579 within a handler. */
580
581 rtx
582 get_exception_filter (void)
583 {
584 if (! crtl->eh.filter)
585 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
586 return crtl->eh.filter;
587 }
588 \f
589 /* This section is for the exception handling specific optimization pass. */
590
591 /* Random access the exception region tree. */
592
593 void
594 collect_eh_region_array (void)
595 {
596 struct eh_region *i;
597
598 i = cfun->eh->region_tree;
599 if (! i)
600 return;
601
602 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
603 cfun->eh->last_region_number + 1);
604 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
605
606 while (1)
607 {
608 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
609
610 /* If there are sub-regions, process them. */
611 if (i->inner)
612 i = i->inner;
613 /* If there are peers, process them. */
614 else if (i->next_peer)
615 i = i->next_peer;
616 /* Otherwise, step back up the tree to the next peer. */
617 else
618 {
619 do {
620 i = i->outer;
621 if (i == NULL)
622 return;
623 } while (i->next_peer == NULL);
624 i = i->next_peer;
625 }
626 }
627 }
628
629 /* R is MUST_NOT_THROW region that is not reachable via local
630 RESX instructions. It still must be kept in the tree in case runtime
631 can unwind through it, or we will eliminate out terminate call
632 runtime would do otherwise. Return TRUE if R contains throwing statements
633 or some of the exceptions in inner regions can be unwound up to R.
634
635 CONTAINS_STMT is bitmap of all regions that contains some throwing
636 statements.
637
638 Function looks O(^3) at first sight. In fact the function is called at most
639 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
640 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
641 the outer loop examines every region at most once. The inner loop
642 is doing unwinding from the throwing statement same way as we do during
643 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
644 of CFG. In practice Eh trees are wide, not deep, so this is not
645 a problem. */
646
647 static bool
648 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
649 {
650 struct eh_region *i = r->inner;
651 unsigned n;
652 bitmap_iterator bi;
653
654 if (TEST_BIT (contains_stmt, r->region_number))
655 return true;
656 if (r->aka)
657 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
658 if (TEST_BIT (contains_stmt, n))
659 return true;
660 if (!i)
661 return false;
662 while (1)
663 {
664 /* It is pointless to look into MUST_NOT_THROW
665 or dive into subregions. They never unwind up. */
666 if (i->type != ERT_MUST_NOT_THROW)
667 {
668 bool found = TEST_BIT (contains_stmt, i->region_number);
669 if (!found)
670 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
671 if (TEST_BIT (contains_stmt, n))
672 {
673 found = true;
674 break;
675 }
676 /* We have nested region that contains throwing statement.
677 See if resuming might lead up to the resx or we get locally
678 caught sooner. If we get locally caught sooner, we either
679 know region R is not reachable or it would have direct edge
680 from the EH resx and thus consider region reachable at
681 firest place. */
682 if (found)
683 {
684 struct eh_region *i1 = i;
685 tree type_thrown = NULL_TREE;
686
687 if (i1->type == ERT_THROW)
688 {
689 type_thrown = i1->u.eh_throw.type;
690 i1 = i1->outer;
691 }
692 for (; i1 != r; i1 = i1->outer)
693 if (reachable_next_level (i1, type_thrown, NULL,
694 false) >= RNL_CAUGHT)
695 break;
696 if (i1 == r)
697 return true;
698 }
699 }
700 /* If there are sub-regions, process them. */
701 if (i->type != ERT_MUST_NOT_THROW && i->inner)
702 i = i->inner;
703 /* If there are peers, process them. */
704 else if (i->next_peer)
705 i = i->next_peer;
706 /* Otherwise, step back up the tree to the next peer. */
707 else
708 {
709 do
710 {
711 i = i->outer;
712 if (i == r)
713 return false;
714 }
715 while (i->next_peer == NULL);
716 i = i->next_peer;
717 }
718 }
719 }
720
721 /* Remove all regions whose labels are not reachable.
722 REACHABLE is bitmap of all regions that are used by the function
723 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
724 void
725 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
726 {
727 int i;
728 struct eh_region *r;
729
730 for (i = cfun->eh->last_region_number; i > 0; --i)
731 {
732 r = VEC_index (eh_region, cfun->eh->region_array, i);
733 if (!r)
734 continue;
735 if (r->region_number == i && !TEST_BIT (reachable, i) && !r->resume)
736 {
737 bool kill_it = true;
738
739 r->tree_label = NULL;
740 switch (r->type)
741 {
742 case ERT_THROW:
743 /* Don't remove ERT_THROW regions if their outer region
744 is reachable. */
745 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
746 kill_it = false;
747 break;
748 case ERT_MUST_NOT_THROW:
749 /* MUST_NOT_THROW regions are implementable solely in the
750 runtime, but we need them when inlining function.
751
752 Keep them if outer region is not MUST_NOT_THROW a well
753 and if they contain some statement that might unwind through
754 them. */
755 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
756 && (!contains_stmt
757 || can_be_reached_by_runtime (contains_stmt, r)))
758 kill_it = false;
759 break;
760 case ERT_TRY:
761 {
762 /* TRY regions are reachable if any of its CATCH regions
763 are reachable. */
764 struct eh_region *c;
765 for (c = r->u.eh_try.eh_catch; c;
766 c = c->u.eh_catch.next_catch)
767 if (TEST_BIT (reachable, c->region_number))
768 {
769 kill_it = false;
770 break;
771 }
772 break;
773 }
774
775 default:
776 break;
777 }
778
779 if (kill_it)
780 {
781 if (dump_file)
782 fprintf (dump_file, "Removing unreachable eh region %i\n",
783 r->region_number);
784 remove_eh_handler (r);
785 }
786 }
787 }
788 #ifdef ENABLE_CHECKING
789 verify_eh_tree (cfun);
790 #endif
791 }
792
793 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
794 is identical to label. */
795
796 VEC(int,heap) *
797 label_to_region_map (void)
798 {
799 VEC(int,heap) * label_to_region = NULL;
800 int i;
801
802 VEC_safe_grow_cleared (int, heap, label_to_region,
803 cfun->cfg->last_label_uid + 1);
804 for (i = cfun->eh->last_region_number; i > 0; --i)
805 {
806 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
807 if (r && r->region_number == i
808 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
809 {
810 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
811 i);
812 }
813 }
814 return label_to_region;
815 }
816
817 /* Return number of EH regions. */
818 int
819 num_eh_regions (void)
820 {
821 return cfun->eh->last_region_number + 1;
822 }
823
824 /* Remove all regions whose labels are not reachable from insns. */
825
826 static void
827 rtl_remove_unreachable_regions (rtx insns)
828 {
829 int i, *uid_region_num;
830 sbitmap reachable;
831 struct eh_region *r;
832 rtx insn;
833
834 uid_region_num = XCNEWVEC (int, get_max_uid ());
835 reachable = sbitmap_alloc (cfun->eh->last_region_number + 1);
836 sbitmap_zero (reachable);
837
838 for (i = cfun->eh->last_region_number; i > 0; --i)
839 {
840 r = VEC_index (eh_region, cfun->eh->region_array, i);
841 if (!r || r->region_number != i)
842 continue;
843
844 if (r->resume)
845 {
846 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
847 uid_region_num[INSN_UID (r->resume)] = i;
848 }
849 if (r->label)
850 {
851 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
852 uid_region_num[INSN_UID (r->label)] = i;
853 }
854 }
855
856 for (insn = insns; insn; insn = NEXT_INSN (insn))
857 SET_BIT (reachable, uid_region_num[INSN_UID (insn)]);
858
859 remove_unreachable_regions (reachable, NULL);
860
861 sbitmap_free (reachable);
862 free (uid_region_num);
863 }
864
865 /* Set up EH labels for RTL. */
866
867 void
868 convert_from_eh_region_ranges (void)
869 {
870 rtx insns = get_insns ();
871 int i, n = cfun->eh->last_region_number;
872
873 /* Most of the work is already done at the tree level. All we need to
874 do is collect the rtl labels that correspond to the tree labels that
875 collect the rtl labels that correspond to the tree labels
876 we allocated earlier. */
877 for (i = 1; i <= n; ++i)
878 {
879 struct eh_region *region;
880
881 region = VEC_index (eh_region, cfun->eh->region_array, i);
882 if (region && region->tree_label)
883 region->label = DECL_RTL_IF_SET (region->tree_label);
884 }
885
886 rtl_remove_unreachable_regions (insns);
887 }
888
889 static void
890 add_ehl_entry (rtx label, struct eh_region *region)
891 {
892 struct ehl_map_entry **slot, *entry;
893
894 LABEL_PRESERVE_P (label) = 1;
895
896 entry = GGC_NEW (struct ehl_map_entry);
897 entry->label = label;
898 entry->region = region;
899
900 slot = (struct ehl_map_entry **)
901 htab_find_slot (crtl->eh.exception_handler_label_map, entry, INSERT);
902
903 /* Before landing pad creation, each exception handler has its own
904 label. After landing pad creation, the exception handlers may
905 share landing pads. This is ok, since maybe_remove_eh_handler
906 only requires the 1-1 mapping before landing pad creation. */
907 gcc_assert (!*slot || crtl->eh.built_landing_pads);
908
909 *slot = entry;
910 }
911
912 void
913 find_exception_handler_labels (void)
914 {
915 int i;
916
917 if (crtl->eh.exception_handler_label_map)
918 htab_empty (crtl->eh.exception_handler_label_map);
919 else
920 {
921 /* ??? The expansion factor here (3/2) must be greater than the htab
922 occupancy factor (4/3) to avoid unnecessary resizing. */
923 crtl->eh.exception_handler_label_map
924 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
925 ehl_hash, ehl_eq, NULL);
926 }
927
928 if (cfun->eh->region_tree == NULL)
929 return;
930
931 for (i = cfun->eh->last_region_number; i > 0; --i)
932 {
933 struct eh_region *region;
934 rtx lab;
935
936 region = VEC_index (eh_region, cfun->eh->region_array, i);
937 if (! region || region->region_number != i)
938 continue;
939 if (crtl->eh.built_landing_pads)
940 lab = region->landing_pad;
941 else
942 lab = region->label;
943
944 if (lab)
945 add_ehl_entry (lab, region);
946 }
947
948 /* For sjlj exceptions, need the return label to remain live until
949 after landing pad generation. */
950 if (USING_SJLJ_EXCEPTIONS && ! crtl->eh.built_landing_pads)
951 add_ehl_entry (return_label, NULL);
952 }
953
954 /* Returns true if the current function has exception handling regions. */
955
956 bool
957 current_function_has_exception_handlers (void)
958 {
959 int i;
960
961 for (i = cfun->eh->last_region_number; i > 0; --i)
962 {
963 struct eh_region *region;
964
965 region = VEC_index (eh_region, cfun->eh->region_array, i);
966 if (region
967 && region->region_number == i
968 && region->type != ERT_THROW)
969 return true;
970 }
971
972 return false;
973 }
974 \f
975 /* A subroutine of duplicate_eh_regions. Search the region tree under O
976 for the minimum and maximum region numbers. Update *MIN and *MAX. */
977
978 static void
979 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
980 {
981 int i;
982
983 if (o->aka)
984 {
985 i = bitmap_first_set_bit (o->aka);
986 if (i < *min)
987 *min = i;
988 i = bitmap_last_set_bit (o->aka);
989 if (i > *max)
990 *max = i;
991 }
992 if (o->region_number < *min)
993 *min = o->region_number;
994 if (o->region_number > *max)
995 *max = o->region_number;
996
997 if (o->inner)
998 {
999 o = o->inner;
1000 duplicate_eh_regions_0 (o, min, max);
1001 while (o->next_peer)
1002 {
1003 o = o->next_peer;
1004 duplicate_eh_regions_0 (o, min, max);
1005 }
1006 }
1007 }
1008
1009 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1010 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1011 about the other internal pointers just yet, just the tree-like pointers. */
1012
1013 static eh_region
1014 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1015 {
1016 eh_region ret, n;
1017
1018 ret = n = GGC_NEW (struct eh_region);
1019
1020 *n = *old;
1021 n->outer = outer;
1022 n->next_peer = NULL;
1023 if (old->aka)
1024 {
1025 unsigned i;
1026 bitmap_iterator bi;
1027 n->aka = BITMAP_GGC_ALLOC ();
1028
1029 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1030 {
1031 bitmap_set_bit (n->aka, i + eh_offset);
1032 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1033 }
1034 }
1035
1036 n->region_number += eh_offset;
1037 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1038
1039 if (old->inner)
1040 {
1041 old = old->inner;
1042 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1043 while (old->next_peer)
1044 {
1045 old = old->next_peer;
1046 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1047 }
1048 }
1049
1050 return ret;
1051 }
1052
1053 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1054 function and root the tree below OUTER_REGION. Remap labels using MAP
1055 callback. The special case of COPY_REGION of 0 means all regions. */
1056
1057 int
1058 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1059 void *data, int copy_region, int outer_region)
1060 {
1061 eh_region cur, prev_try, outer, *splice;
1062 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1063 int num_regions;
1064
1065 if (!ifun->eh)
1066 return 0;
1067 #ifdef ENABLE_CHECKING
1068 verify_eh_tree (ifun);
1069 #endif
1070
1071 /* Find the range of region numbers to be copied. The interface we
1072 provide here mandates a single offset to find new number from old,
1073 which means we must look at the numbers present, instead of the
1074 count or something else. */
1075 if (copy_region > 0)
1076 {
1077 min_region = INT_MAX;
1078 max_region = 0;
1079
1080 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1081 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1082 }
1083 else
1084 min_region = 1, max_region = ifun->eh->last_region_number;
1085 num_regions = max_region - min_region + 1;
1086 cfun_last_region_number = cfun->eh->last_region_number;
1087 eh_offset = cfun_last_region_number + 1 - min_region;
1088
1089 /* If we've not yet created a region array, do so now. */
1090 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1091 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1092 cfun->eh->last_region_number + 1);
1093
1094 /* Locate the spot at which to insert the new tree. */
1095 if (outer_region > 0)
1096 {
1097 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1098 if (outer)
1099 splice = &outer->inner;
1100 else
1101 splice = &cfun->eh->region_tree;
1102 }
1103 else
1104 {
1105 outer = NULL;
1106 splice = &cfun->eh->region_tree;
1107 }
1108 while (*splice)
1109 splice = &(*splice)->next_peer;
1110
1111 if (!ifun->eh->region_tree)
1112 {
1113 if (outer)
1114 for (i = cfun_last_region_number + 1;
1115 i <= cfun->eh->last_region_number; i++)
1116 {
1117 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1118 if (outer->aka == NULL)
1119 outer->aka = BITMAP_GGC_ALLOC ();
1120 bitmap_set_bit (outer->aka, i);
1121 }
1122 return eh_offset;
1123 }
1124
1125 /* Copy all the regions in the subtree. */
1126 if (copy_region > 0)
1127 {
1128 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1129 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1130 }
1131 else
1132 {
1133 eh_region n;
1134
1135 cur = ifun->eh->region_tree;
1136 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1137 while (cur->next_peer)
1138 {
1139 cur = cur->next_peer;
1140 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1141 }
1142 }
1143
1144 /* Remap all the labels in the new regions. */
1145 for (i = cfun_last_region_number + 1;
1146 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1147 if (cur && cur->tree_label)
1148 cur->tree_label = map (cur->tree_label, data);
1149
1150 /* Search for the containing ERT_TRY region to fix up
1151 the prev_try short-cuts for ERT_CLEANUP regions. */
1152 prev_try = NULL;
1153 if (outer_region > 0)
1154 for (prev_try =
1155 VEC_index (eh_region, cfun->eh->region_array, outer_region);
1156 prev_try && prev_try->type != ERT_TRY; prev_try = prev_try->outer)
1157 if (prev_try->type == ERT_MUST_NOT_THROW
1158 || (prev_try->type == ERT_ALLOWED_EXCEPTIONS
1159 && !prev_try->u.allowed.type_list))
1160 {
1161 prev_try = NULL;
1162 break;
1163 }
1164
1165 /* Remap all of the internal catch and cleanup linkages. Since we
1166 duplicate entire subtrees, all of the referenced regions will have
1167 been copied too. And since we renumbered them as a block, a simple
1168 bit of arithmetic finds us the index for the replacement region. */
1169 for (i = cfun_last_region_number + 1;
1170 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1171 {
1172 /* All removed EH that is toplevel in input function is now
1173 in outer EH of output function. */
1174 if (cur == NULL)
1175 {
1176 gcc_assert (VEC_index
1177 (eh_region, ifun->eh->region_array,
1178 i - eh_offset) == NULL);
1179 if (outer)
1180 {
1181 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1182 if (outer->aka == NULL)
1183 outer->aka = BITMAP_GGC_ALLOC ();
1184 bitmap_set_bit (outer->aka, i);
1185 }
1186 continue;
1187 }
1188 if (i != cur->region_number)
1189 continue;
1190
1191 #define REMAP(REG) \
1192 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1193 (REG)->region_number + eh_offset)
1194
1195 switch (cur->type)
1196 {
1197 case ERT_TRY:
1198 if (cur->u.eh_try.eh_catch)
1199 REMAP (cur->u.eh_try.eh_catch);
1200 if (cur->u.eh_try.last_catch)
1201 REMAP (cur->u.eh_try.last_catch);
1202 break;
1203
1204 case ERT_CATCH:
1205 if (cur->u.eh_catch.next_catch)
1206 REMAP (cur->u.eh_catch.next_catch);
1207 if (cur->u.eh_catch.prev_catch)
1208 REMAP (cur->u.eh_catch.prev_catch);
1209 break;
1210
1211 case ERT_CLEANUP:
1212 if (cur->u.cleanup.prev_try)
1213 REMAP (cur->u.cleanup.prev_try);
1214 else
1215 cur->u.cleanup.prev_try = prev_try;
1216 break;
1217
1218 default:
1219 break;
1220 }
1221
1222 #undef REMAP
1223 }
1224 #ifdef ENABLE_CHECKING
1225 verify_eh_tree (cfun);
1226 #endif
1227
1228 return eh_offset;
1229 }
1230
1231 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1232
1233 bool
1234 eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1235 {
1236 struct eh_region *rp_a, *rp_b;
1237
1238 gcc_assert (ifun->eh->last_region_number > 0);
1239 gcc_assert (ifun->eh->region_tree);
1240
1241 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1242 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1243 gcc_assert (rp_a != NULL);
1244 gcc_assert (rp_b != NULL);
1245
1246 do
1247 {
1248 if (rp_a == rp_b)
1249 return true;
1250 rp_b = rp_b->outer;
1251 }
1252 while (rp_b);
1253
1254 return false;
1255 }
1256
1257 /* Return region number of region that is outer to both if REGION_A and
1258 REGION_B in IFUN. */
1259
1260 int
1261 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1262 {
1263 struct eh_region *rp_a, *rp_b;
1264 sbitmap b_outer;
1265
1266 gcc_assert (ifun->eh->last_region_number > 0);
1267 gcc_assert (ifun->eh->region_tree);
1268
1269 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1270 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1271 gcc_assert (rp_a != NULL);
1272 gcc_assert (rp_b != NULL);
1273
1274 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1275 sbitmap_zero (b_outer);
1276
1277 do
1278 {
1279 SET_BIT (b_outer, rp_b->region_number);
1280 rp_b = rp_b->outer;
1281 }
1282 while (rp_b);
1283
1284 do
1285 {
1286 if (TEST_BIT (b_outer, rp_a->region_number))
1287 {
1288 sbitmap_free (b_outer);
1289 return rp_a->region_number;
1290 }
1291 rp_a = rp_a->outer;
1292 }
1293 while (rp_a);
1294
1295 sbitmap_free (b_outer);
1296 return -1;
1297 }
1298 \f
1299 static int
1300 t2r_eq (const void *pentry, const void *pdata)
1301 {
1302 const_tree const entry = (const_tree) pentry;
1303 const_tree const data = (const_tree) pdata;
1304
1305 return TREE_PURPOSE (entry) == data;
1306 }
1307
1308 static hashval_t
1309 t2r_hash (const void *pentry)
1310 {
1311 const_tree const entry = (const_tree) pentry;
1312 return TREE_HASH (TREE_PURPOSE (entry));
1313 }
1314
1315 static void
1316 add_type_for_runtime (tree type)
1317 {
1318 tree *slot;
1319
1320 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1321 TREE_HASH (type), INSERT);
1322 if (*slot == NULL)
1323 {
1324 tree runtime = (*lang_eh_runtime_type) (type);
1325 *slot = tree_cons (type, runtime, NULL_TREE);
1326 }
1327 }
1328
1329 static tree
1330 lookup_type_for_runtime (tree type)
1331 {
1332 tree *slot;
1333
1334 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1335 TREE_HASH (type), NO_INSERT);
1336
1337 /* We should have always inserted the data earlier. */
1338 return TREE_VALUE (*slot);
1339 }
1340
1341 \f
1342 /* Represent an entry in @TTypes for either catch actions
1343 or exception filter actions. */
1344 struct ttypes_filter GTY(())
1345 {
1346 tree t;
1347 int filter;
1348 };
1349
1350 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1351 (a tree) for a @TTypes type node we are thinking about adding. */
1352
1353 static int
1354 ttypes_filter_eq (const void *pentry, const void *pdata)
1355 {
1356 const struct ttypes_filter *const entry
1357 = (const struct ttypes_filter *) pentry;
1358 const_tree const data = (const_tree) pdata;
1359
1360 return entry->t == data;
1361 }
1362
1363 static hashval_t
1364 ttypes_filter_hash (const void *pentry)
1365 {
1366 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1367 return TREE_HASH (entry->t);
1368 }
1369
1370 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1371 exception specification list we are thinking about adding. */
1372 /* ??? Currently we use the type lists in the order given. Someone
1373 should put these in some canonical order. */
1374
1375 static int
1376 ehspec_filter_eq (const void *pentry, const void *pdata)
1377 {
1378 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1379 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1380
1381 return type_list_equal (entry->t, data->t);
1382 }
1383
1384 /* Hash function for exception specification lists. */
1385
1386 static hashval_t
1387 ehspec_filter_hash (const void *pentry)
1388 {
1389 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1390 hashval_t h = 0;
1391 tree list;
1392
1393 for (list = entry->t; list ; list = TREE_CHAIN (list))
1394 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1395 return h;
1396 }
1397
1398 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1399 to speed up the search. Return the filter value to be used. */
1400
1401 static int
1402 add_ttypes_entry (htab_t ttypes_hash, tree type)
1403 {
1404 struct ttypes_filter **slot, *n;
1405
1406 slot = (struct ttypes_filter **)
1407 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1408
1409 if ((n = *slot) == NULL)
1410 {
1411 /* Filter value is a 1 based table index. */
1412
1413 n = XNEW (struct ttypes_filter);
1414 n->t = type;
1415 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1416 *slot = n;
1417
1418 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1419 }
1420
1421 return n->filter;
1422 }
1423
1424 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1425 to speed up the search. Return the filter value to be used. */
1426
1427 static int
1428 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1429 {
1430 struct ttypes_filter **slot, *n;
1431 struct ttypes_filter dummy;
1432
1433 dummy.t = list;
1434 slot = (struct ttypes_filter **)
1435 htab_find_slot (ehspec_hash, &dummy, INSERT);
1436
1437 if ((n = *slot) == NULL)
1438 {
1439 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1440
1441 n = XNEW (struct ttypes_filter);
1442 n->t = list;
1443 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1444 *slot = n;
1445
1446 /* Generate a 0 terminated list of filter values. */
1447 for (; list ; list = TREE_CHAIN (list))
1448 {
1449 if (targetm.arm_eabi_unwinder)
1450 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1451 else
1452 {
1453 /* Look up each type in the list and encode its filter
1454 value as a uleb128. */
1455 push_uleb128 (&crtl->eh.ehspec_data,
1456 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1457 }
1458 }
1459 if (targetm.arm_eabi_unwinder)
1460 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1461 else
1462 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1463 }
1464
1465 return n->filter;
1466 }
1467
1468 /* Generate the action filter values to be used for CATCH and
1469 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1470 we use lots of landing pads, and so every type or list can share
1471 the same filter value, which saves table space. */
1472
1473 static void
1474 assign_filter_values (void)
1475 {
1476 int i;
1477 htab_t ttypes, ehspec;
1478
1479 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1480 if (targetm.arm_eabi_unwinder)
1481 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1482 else
1483 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1484
1485 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1486 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1487
1488 for (i = cfun->eh->last_region_number; i > 0; --i)
1489 {
1490 struct eh_region *r;
1491
1492 r = VEC_index (eh_region, cfun->eh->region_array, i);
1493
1494 /* Mind we don't process a region more than once. */
1495 if (!r || r->region_number != i)
1496 continue;
1497
1498 switch (r->type)
1499 {
1500 case ERT_CATCH:
1501 /* Whatever type_list is (NULL or true list), we build a list
1502 of filters for the region. */
1503 r->u.eh_catch.filter_list = NULL_TREE;
1504
1505 if (r->u.eh_catch.type_list != NULL)
1506 {
1507 /* Get a filter value for each of the types caught and store
1508 them in the region's dedicated list. */
1509 tree tp_node = r->u.eh_catch.type_list;
1510
1511 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1512 {
1513 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1514 tree flt_node = build_int_cst (NULL_TREE, flt);
1515
1516 r->u.eh_catch.filter_list
1517 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1518 }
1519 }
1520 else
1521 {
1522 /* Get a filter value for the NULL list also since it will need
1523 an action record anyway. */
1524 int flt = add_ttypes_entry (ttypes, NULL);
1525 tree flt_node = build_int_cst (NULL_TREE, flt);
1526
1527 r->u.eh_catch.filter_list
1528 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1529 }
1530
1531 break;
1532
1533 case ERT_ALLOWED_EXCEPTIONS:
1534 r->u.allowed.filter
1535 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1536 break;
1537
1538 default:
1539 break;
1540 }
1541 }
1542
1543 htab_delete (ttypes);
1544 htab_delete (ehspec);
1545 }
1546
1547 /* Emit SEQ into basic block just before INSN (that is assumed to be
1548 first instruction of some existing BB and return the newly
1549 produced block. */
1550 static basic_block
1551 emit_to_new_bb_before (rtx seq, rtx insn)
1552 {
1553 rtx last;
1554 basic_block bb;
1555 edge e;
1556 edge_iterator ei;
1557
1558 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1559 call), we don't want it to go into newly created landing pad or other EH
1560 construct. */
1561 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1562 if (e->flags & EDGE_FALLTHRU)
1563 force_nonfallthru (e);
1564 else
1565 ei_next (&ei);
1566 last = emit_insn_before (seq, insn);
1567 if (BARRIER_P (last))
1568 last = PREV_INSN (last);
1569 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1570 update_bb_for_insn (bb);
1571 bb->flags |= BB_SUPERBLOCK;
1572 return bb;
1573 }
1574
1575 /* Generate the code to actually handle exceptions, which will follow the
1576 landing pads. */
1577
1578 static void
1579 build_post_landing_pads (void)
1580 {
1581 int i;
1582
1583 for (i = cfun->eh->last_region_number; i > 0; --i)
1584 {
1585 struct eh_region *region;
1586 rtx seq;
1587
1588 region = VEC_index (eh_region, cfun->eh->region_array, i);
1589 /* Mind we don't process a region more than once. */
1590 if (!region || region->region_number != i)
1591 continue;
1592
1593 switch (region->type)
1594 {
1595 case ERT_TRY:
1596 /* ??? Collect the set of all non-overlapping catch handlers
1597 all the way up the chain until blocked by a cleanup. */
1598 /* ??? Outer try regions can share landing pads with inner
1599 try regions if the types are completely non-overlapping,
1600 and there are no intervening cleanups. */
1601
1602 region->post_landing_pad = gen_label_rtx ();
1603
1604 start_sequence ();
1605
1606 emit_label (region->post_landing_pad);
1607
1608 /* ??? It is mighty inconvenient to call back into the
1609 switch statement generation code in expand_end_case.
1610 Rapid prototyping sez a sequence of ifs. */
1611 {
1612 struct eh_region *c;
1613 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1614 {
1615 if (c->u.eh_catch.type_list == NULL)
1616 emit_jump (c->label);
1617 else
1618 {
1619 /* Need for one cmp/jump per type caught. Each type
1620 list entry has a matching entry in the filter list
1621 (see assign_filter_values). */
1622 tree tp_node = c->u.eh_catch.type_list;
1623 tree flt_node = c->u.eh_catch.filter_list;
1624
1625 for (; tp_node; )
1626 {
1627 emit_cmp_and_jump_insns
1628 (crtl->eh.filter,
1629 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1630 EQ, NULL_RTX,
1631 targetm.eh_return_filter_mode (), 0, c->label);
1632
1633 tp_node = TREE_CHAIN (tp_node);
1634 flt_node = TREE_CHAIN (flt_node);
1635 }
1636 }
1637 }
1638 }
1639
1640 /* We delay the generation of the _Unwind_Resume until we generate
1641 landing pads. We emit a marker here so as to get good control
1642 flow data in the meantime. */
1643 region->resume
1644 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1645 emit_barrier ();
1646
1647 seq = get_insns ();
1648 end_sequence ();
1649
1650 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1651
1652 break;
1653
1654 case ERT_ALLOWED_EXCEPTIONS:
1655 region->post_landing_pad = gen_label_rtx ();
1656
1657 start_sequence ();
1658
1659 emit_label (region->post_landing_pad);
1660
1661 emit_cmp_and_jump_insns (crtl->eh.filter,
1662 GEN_INT (region->u.allowed.filter),
1663 EQ, NULL_RTX,
1664 targetm.eh_return_filter_mode (), 0, region->label);
1665
1666 /* We delay the generation of the _Unwind_Resume until we generate
1667 landing pads. We emit a marker here so as to get good control
1668 flow data in the meantime. */
1669 region->resume
1670 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1671 emit_barrier ();
1672
1673 seq = get_insns ();
1674 end_sequence ();
1675
1676 emit_to_new_bb_before (seq, region->label);
1677 break;
1678
1679 case ERT_CLEANUP:
1680 case ERT_MUST_NOT_THROW:
1681 region->post_landing_pad = region->label;
1682 break;
1683
1684 case ERT_CATCH:
1685 case ERT_THROW:
1686 /* Nothing to do. */
1687 break;
1688
1689 default:
1690 gcc_unreachable ();
1691 }
1692 }
1693 }
1694
1695 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1696 _Unwind_Resume otherwise. */
1697
1698 static void
1699 connect_post_landing_pads (void)
1700 {
1701 int i;
1702
1703 for (i = cfun->eh->last_region_number; i > 0; --i)
1704 {
1705 struct eh_region *region;
1706 struct eh_region *outer;
1707 rtx seq;
1708 rtx barrier;
1709
1710 region = VEC_index (eh_region, cfun->eh->region_array, i);
1711 /* Mind we don't process a region more than once. */
1712 if (!region || region->region_number != i)
1713 continue;
1714
1715 /* If there is no RESX, or it has been deleted by flow, there's
1716 nothing to fix up. */
1717 if (! region->resume || INSN_DELETED_P (region->resume))
1718 continue;
1719
1720 /* Search for another landing pad in this function. */
1721 for (outer = region->outer; outer ; outer = outer->outer)
1722 if (outer->post_landing_pad)
1723 break;
1724
1725 start_sequence ();
1726
1727 if (outer)
1728 {
1729 edge e;
1730 basic_block src, dest;
1731
1732 emit_jump (outer->post_landing_pad);
1733 src = BLOCK_FOR_INSN (region->resume);
1734 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1735 while (EDGE_COUNT (src->succs) > 0)
1736 remove_edge (EDGE_SUCC (src, 0));
1737 e = make_edge (src, dest, 0);
1738 e->probability = REG_BR_PROB_BASE;
1739 e->count = src->count;
1740 }
1741 else
1742 {
1743 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1744 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1745
1746 /* What we just emitted was a throwing libcall, so it got a
1747 barrier automatically added after it. If the last insn in
1748 the libcall sequence isn't the barrier, it's because the
1749 target emits multiple insns for a call, and there are insns
1750 after the actual call insn (which are redundant and would be
1751 optimized away). The barrier is inserted exactly after the
1752 call insn, so let's go get that and delete the insns after
1753 it, because below we need the barrier to be the last insn in
1754 the sequence. */
1755 delete_insns_since (NEXT_INSN (last_call_insn ()));
1756 }
1757
1758 seq = get_insns ();
1759 end_sequence ();
1760 barrier = emit_insn_before (seq, region->resume);
1761 /* Avoid duplicate barrier. */
1762 gcc_assert (BARRIER_P (barrier));
1763 delete_insn (barrier);
1764 delete_insn (region->resume);
1765
1766 /* ??? From tree-ssa we can wind up with catch regions whose
1767 label is not instantiated, but whose resx is present. Now
1768 that we've dealt with the resx, kill the region. */
1769 if (region->label == NULL && region->type == ERT_CLEANUP)
1770 remove_eh_handler (region);
1771 }
1772 }
1773
1774 \f
1775 static void
1776 dw2_build_landing_pads (void)
1777 {
1778 int i;
1779
1780 for (i = cfun->eh->last_region_number; i > 0; --i)
1781 {
1782 struct eh_region *region;
1783 rtx seq;
1784 basic_block bb;
1785 edge e;
1786
1787 region = VEC_index (eh_region, cfun->eh->region_array, i);
1788 /* Mind we don't process a region more than once. */
1789 if (!region || region->region_number != i)
1790 continue;
1791
1792 if (region->type != ERT_CLEANUP
1793 && region->type != ERT_TRY
1794 && region->type != ERT_ALLOWED_EXCEPTIONS)
1795 continue;
1796
1797 start_sequence ();
1798
1799 region->landing_pad = gen_label_rtx ();
1800 emit_label (region->landing_pad);
1801
1802 #ifdef HAVE_exception_receiver
1803 if (HAVE_exception_receiver)
1804 emit_insn (gen_exception_receiver ());
1805 else
1806 #endif
1807 #ifdef HAVE_nonlocal_goto_receiver
1808 if (HAVE_nonlocal_goto_receiver)
1809 emit_insn (gen_nonlocal_goto_receiver ());
1810 else
1811 #endif
1812 { /* Nothing */ }
1813
1814 emit_move_insn (crtl->eh.exc_ptr,
1815 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1816 emit_move_insn (crtl->eh.filter,
1817 gen_rtx_REG (targetm.eh_return_filter_mode (),
1818 EH_RETURN_DATA_REGNO (1)));
1819
1820 seq = get_insns ();
1821 end_sequence ();
1822
1823 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1824 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1825 e->count = bb->count;
1826 e->probability = REG_BR_PROB_BASE;
1827 }
1828 }
1829
1830 \f
1831 struct sjlj_lp_info
1832 {
1833 int directly_reachable;
1834 int action_index;
1835 int dispatch_index;
1836 int call_site_index;
1837 };
1838
1839 static bool
1840 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1841 {
1842 rtx insn;
1843 bool found_one = false;
1844
1845 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1846 {
1847 struct eh_region *region;
1848 enum reachable_code rc;
1849 tree type_thrown;
1850 rtx note;
1851
1852 if (! INSN_P (insn))
1853 continue;
1854
1855 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1856 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1857 continue;
1858
1859 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1860
1861 type_thrown = NULL_TREE;
1862 if (region->type == ERT_THROW)
1863 {
1864 type_thrown = region->u.eh_throw.type;
1865 region = region->outer;
1866 }
1867
1868 /* Find the first containing region that might handle the exception.
1869 That's the landing pad to which we will transfer control. */
1870 rc = RNL_NOT_CAUGHT;
1871 for (; region; region = region->outer)
1872 {
1873 rc = reachable_next_level (region, type_thrown, NULL, false);
1874 if (rc != RNL_NOT_CAUGHT)
1875 break;
1876 }
1877 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1878 {
1879 lp_info[region->region_number].directly_reachable = 1;
1880 found_one = true;
1881 }
1882 }
1883
1884 return found_one;
1885 }
1886
1887 static void
1888 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1889 {
1890 htab_t ar_hash;
1891 int i, index;
1892
1893 /* First task: build the action table. */
1894
1895 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1896 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1897
1898 for (i = cfun->eh->last_region_number; i > 0; --i)
1899 if (lp_info[i].directly_reachable)
1900 {
1901 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1902
1903 r->landing_pad = dispatch_label;
1904 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1905 if (lp_info[i].action_index != -1)
1906 crtl->uses_eh_lsda = 1;
1907 }
1908
1909 htab_delete (ar_hash);
1910
1911 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1912 landing pad label for the region. For sjlj though, there is one
1913 common landing pad from which we dispatch to the post-landing pads.
1914
1915 A region receives a dispatch index if it is directly reachable
1916 and requires in-function processing. Regions that share post-landing
1917 pads may share dispatch indices. */
1918 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1919 (see build_post_landing_pads) so we don't bother checking for it. */
1920
1921 index = 0;
1922 for (i = cfun->eh->last_region_number; i > 0; --i)
1923 if (lp_info[i].directly_reachable)
1924 lp_info[i].dispatch_index = index++;
1925
1926 /* Finally: assign call-site values. If dwarf2 terms, this would be
1927 the region number assigned by convert_to_eh_region_ranges, but
1928 handles no-action and must-not-throw differently. */
1929
1930 call_site_base = 1;
1931 for (i = cfun->eh->last_region_number; i > 0; --i)
1932 if (lp_info[i].directly_reachable)
1933 {
1934 int action = lp_info[i].action_index;
1935
1936 /* Map must-not-throw to otherwise unused call-site index 0. */
1937 if (action == -2)
1938 index = 0;
1939 /* Map no-action to otherwise unused call-site index -1. */
1940 else if (action == -1)
1941 index = -1;
1942 /* Otherwise, look it up in the table. */
1943 else
1944 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1945
1946 lp_info[i].call_site_index = index;
1947 }
1948 }
1949
1950 static void
1951 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1952 {
1953 int last_call_site = -2;
1954 rtx insn, mem;
1955
1956 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1957 {
1958 struct eh_region *region;
1959 int this_call_site;
1960 rtx note, before, p;
1961
1962 /* Reset value tracking at extended basic block boundaries. */
1963 if (LABEL_P (insn))
1964 last_call_site = -2;
1965
1966 if (! INSN_P (insn))
1967 continue;
1968
1969 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1970 if (!note)
1971 {
1972 /* Calls (and trapping insns) without notes are outside any
1973 exception handling region in this function. Mark them as
1974 no action. */
1975 if (CALL_P (insn)
1976 || (flag_non_call_exceptions
1977 && may_trap_p (PATTERN (insn))))
1978 this_call_site = -1;
1979 else
1980 continue;
1981 }
1982 else
1983 {
1984 /* Calls that are known to not throw need not be marked. */
1985 if (INTVAL (XEXP (note, 0)) <= 0)
1986 continue;
1987
1988 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1989 this_call_site = lp_info[region->region_number].call_site_index;
1990 }
1991
1992 if (this_call_site == last_call_site)
1993 continue;
1994
1995 /* Don't separate a call from it's argument loads. */
1996 before = insn;
1997 if (CALL_P (insn))
1998 before = find_first_parameter_load (insn, NULL_RTX);
1999
2000 start_sequence ();
2001 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2002 sjlj_fc_call_site_ofs);
2003 emit_move_insn (mem, GEN_INT (this_call_site));
2004 p = get_insns ();
2005 end_sequence ();
2006
2007 emit_insn_before (p, before);
2008 last_call_site = this_call_site;
2009 }
2010 }
2011
2012 /* Construct the SjLj_Function_Context. */
2013
2014 static void
2015 sjlj_emit_function_enter (rtx dispatch_label)
2016 {
2017 rtx fn_begin, fc, mem, seq;
2018 bool fn_begin_outside_block;
2019
2020 fc = crtl->eh.sjlj_fc;
2021
2022 start_sequence ();
2023
2024 /* We're storing this libcall's address into memory instead of
2025 calling it directly. Thus, we must call assemble_external_libcall
2026 here, as we can not depend on emit_library_call to do it for us. */
2027 assemble_external_libcall (eh_personality_libfunc);
2028 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2029 emit_move_insn (mem, eh_personality_libfunc);
2030
2031 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2032 if (crtl->uses_eh_lsda)
2033 {
2034 char buf[20];
2035 rtx sym;
2036
2037 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2038 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2039 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2040 emit_move_insn (mem, sym);
2041 }
2042 else
2043 emit_move_insn (mem, const0_rtx);
2044
2045 #ifdef DONT_USE_BUILTIN_SETJMP
2046 {
2047 rtx x;
2048 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2049 TYPE_MODE (integer_type_node), 1,
2050 plus_constant (XEXP (fc, 0),
2051 sjlj_fc_jbuf_ofs), Pmode);
2052
2053 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2054 TYPE_MODE (integer_type_node), 0, dispatch_label);
2055 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2056 }
2057 #else
2058 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2059 dispatch_label);
2060 #endif
2061
2062 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2063 1, XEXP (fc, 0), Pmode);
2064
2065 seq = get_insns ();
2066 end_sequence ();
2067
2068 /* ??? Instead of doing this at the beginning of the function,
2069 do this in a block that is at loop level 0 and dominates all
2070 can_throw_internal instructions. */
2071
2072 fn_begin_outside_block = true;
2073 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2074 if (NOTE_P (fn_begin))
2075 {
2076 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2077 break;
2078 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2079 fn_begin_outside_block = false;
2080 }
2081
2082 if (fn_begin_outside_block)
2083 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2084 else
2085 emit_insn_after (seq, fn_begin);
2086 }
2087
2088 /* Call back from expand_function_end to know where we should put
2089 the call to unwind_sjlj_unregister_libfunc if needed. */
2090
2091 void
2092 sjlj_emit_function_exit_after (rtx after)
2093 {
2094 crtl->eh.sjlj_exit_after = after;
2095 }
2096
2097 static void
2098 sjlj_emit_function_exit (void)
2099 {
2100 rtx seq;
2101 edge e;
2102 edge_iterator ei;
2103
2104 start_sequence ();
2105
2106 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2107 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2108
2109 seq = get_insns ();
2110 end_sequence ();
2111
2112 /* ??? Really this can be done in any block at loop level 0 that
2113 post-dominates all can_throw_internal instructions. This is
2114 the last possible moment. */
2115
2116 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
2117 if (e->flags & EDGE_FALLTHRU)
2118 break;
2119 if (e)
2120 {
2121 rtx insn;
2122
2123 /* Figure out whether the place we are supposed to insert libcall
2124 is inside the last basic block or after it. In the other case
2125 we need to emit to edge. */
2126 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
2127 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
2128 {
2129 if (insn == crtl->eh.sjlj_exit_after)
2130 {
2131 if (LABEL_P (insn))
2132 insn = NEXT_INSN (insn);
2133 emit_insn_after (seq, insn);
2134 return;
2135 }
2136 if (insn == BB_END (e->src))
2137 break;
2138 }
2139 insert_insn_on_edge (seq, e);
2140 }
2141 }
2142
2143 static void
2144 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2145 {
2146 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2147 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2148 int i, first_reachable;
2149 rtx mem, dispatch, seq, fc;
2150 rtx before;
2151 basic_block bb;
2152 edge e;
2153
2154 fc = crtl->eh.sjlj_fc;
2155
2156 start_sequence ();
2157
2158 emit_label (dispatch_label);
2159
2160 #ifndef DONT_USE_BUILTIN_SETJMP
2161 expand_builtin_setjmp_receiver (dispatch_label);
2162 #endif
2163
2164 /* Load up dispatch index, exc_ptr and filter values from the
2165 function context. */
2166 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2167 sjlj_fc_call_site_ofs);
2168 dispatch = copy_to_reg (mem);
2169
2170 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2171 if (unwind_word_mode != ptr_mode)
2172 {
2173 #ifdef POINTERS_EXTEND_UNSIGNED
2174 mem = convert_memory_address (ptr_mode, mem);
2175 #else
2176 mem = convert_to_mode (ptr_mode, mem, 0);
2177 #endif
2178 }
2179 emit_move_insn (crtl->eh.exc_ptr, mem);
2180
2181 mem = adjust_address (fc, unwind_word_mode,
2182 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2183 if (unwind_word_mode != filter_mode)
2184 mem = convert_to_mode (filter_mode, mem, 0);
2185 emit_move_insn (crtl->eh.filter, mem);
2186
2187 /* Jump to one of the directly reachable regions. */
2188 /* ??? This really ought to be using a switch statement. */
2189
2190 first_reachable = 0;
2191 for (i = cfun->eh->last_region_number; i > 0; --i)
2192 {
2193 if (! lp_info[i].directly_reachable)
2194 continue;
2195
2196 if (! first_reachable)
2197 {
2198 first_reachable = i;
2199 continue;
2200 }
2201
2202 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2203 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2204 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2205 ->post_landing_pad);
2206 }
2207
2208 seq = get_insns ();
2209 end_sequence ();
2210
2211 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2212 ->post_landing_pad);
2213
2214 bb = emit_to_new_bb_before (seq, before);
2215 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2216 e->count = bb->count;
2217 e->probability = REG_BR_PROB_BASE;
2218 }
2219
2220 static void
2221 sjlj_build_landing_pads (void)
2222 {
2223 struct sjlj_lp_info *lp_info;
2224
2225 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2226
2227 if (sjlj_find_directly_reachable_regions (lp_info))
2228 {
2229 rtx dispatch_label = gen_label_rtx ();
2230 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2231 TYPE_MODE (sjlj_fc_type_node),
2232 TYPE_ALIGN (sjlj_fc_type_node));
2233 crtl->eh.sjlj_fc
2234 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2235 int_size_in_bytes (sjlj_fc_type_node),
2236 align);
2237
2238 sjlj_assign_call_site_values (dispatch_label, lp_info);
2239 sjlj_mark_call_sites (lp_info);
2240
2241 sjlj_emit_function_enter (dispatch_label);
2242 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2243 sjlj_emit_function_exit ();
2244 }
2245
2246 free (lp_info);
2247 }
2248
2249 void
2250 finish_eh_generation (void)
2251 {
2252 basic_block bb;
2253
2254 /* Nothing to do if no regions created. */
2255 if (cfun->eh->region_tree == NULL)
2256 return;
2257
2258 /* The object here is to provide find_basic_blocks with detailed
2259 information (via reachable_handlers) on how exception control
2260 flows within the function. In this first pass, we can include
2261 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2262 regions, and hope that it will be useful in deleting unreachable
2263 handlers. Subsequently, we will generate landing pads which will
2264 connect many of the handlers, and then type information will not
2265 be effective. Still, this is a win over previous implementations. */
2266
2267 /* These registers are used by the landing pads. Make sure they
2268 have been generated. */
2269 get_exception_pointer ();
2270 get_exception_filter ();
2271
2272 /* Construct the landing pads. */
2273
2274 assign_filter_values ();
2275 build_post_landing_pads ();
2276 connect_post_landing_pads ();
2277 if (USING_SJLJ_EXCEPTIONS)
2278 sjlj_build_landing_pads ();
2279 else
2280 dw2_build_landing_pads ();
2281
2282 crtl->eh.built_landing_pads = 1;
2283
2284 /* We've totally changed the CFG. Start over. */
2285 find_exception_handler_labels ();
2286 break_superblocks ();
2287 if (USING_SJLJ_EXCEPTIONS
2288 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2289 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2290 commit_edge_insertions ();
2291 FOR_EACH_BB (bb)
2292 {
2293 edge e;
2294 edge_iterator ei;
2295 bool eh = false;
2296 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2297 {
2298 if (e->flags & EDGE_EH)
2299 {
2300 remove_edge (e);
2301 eh = true;
2302 }
2303 else
2304 ei_next (&ei);
2305 }
2306 if (eh)
2307 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2308 }
2309 }
2310 \f
2311 static hashval_t
2312 ehl_hash (const void *pentry)
2313 {
2314 const struct ehl_map_entry *const entry
2315 = (const struct ehl_map_entry *) pentry;
2316
2317 /* 2^32 * ((sqrt(5) - 1) / 2) */
2318 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2319 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2320 }
2321
2322 static int
2323 ehl_eq (const void *pentry, const void *pdata)
2324 {
2325 const struct ehl_map_entry *const entry
2326 = (const struct ehl_map_entry *) pentry;
2327 const struct ehl_map_entry *const data
2328 = (const struct ehl_map_entry *) pdata;
2329
2330 return entry->label == data->label;
2331 }
2332
2333 /* This section handles removing dead code for flow. */
2334
2335 /* Remove LABEL from exception_handler_label_map. */
2336
2337 static void
2338 remove_exception_handler_label (rtx label)
2339 {
2340 struct ehl_map_entry **slot, tmp;
2341
2342 /* If exception_handler_label_map was not built yet,
2343 there is nothing to do. */
2344 if (crtl->eh.exception_handler_label_map == NULL)
2345 return;
2346
2347 tmp.label = label;
2348 slot = (struct ehl_map_entry **)
2349 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2350 gcc_assert (slot);
2351
2352 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2353 }
2354
2355 /* Splice REGION from the region tree etc. */
2356
2357 static void
2358 remove_eh_handler (struct eh_region *region)
2359 {
2360 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2361 rtx lab;
2362
2363 /* For the benefit of efficiently handling REG_EH_REGION notes,
2364 replace this region in the region array with its containing
2365 region. Note that previous region deletions may result in
2366 multiple copies of this region in the array, so we have a
2367 list of alternate numbers by which we are known. */
2368
2369 outer = region->outer;
2370 VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2371 if (region->aka)
2372 {
2373 unsigned i;
2374 bitmap_iterator bi;
2375
2376 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2377 {
2378 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2379 }
2380 }
2381
2382 if (outer)
2383 {
2384 if (!outer->aka)
2385 outer->aka = BITMAP_GGC_ALLOC ();
2386 if (region->aka)
2387 bitmap_ior_into (outer->aka, region->aka);
2388 bitmap_set_bit (outer->aka, region->region_number);
2389 }
2390
2391 if (crtl->eh.built_landing_pads)
2392 lab = region->landing_pad;
2393 else
2394 lab = region->label;
2395 if (lab)
2396 remove_exception_handler_label (lab);
2397
2398 if (outer)
2399 pp_start = &outer->inner;
2400 else
2401 pp_start = &cfun->eh->region_tree;
2402 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2403 continue;
2404 *pp = region->next_peer;
2405
2406 inner = region->inner;
2407 if (inner)
2408 {
2409 for (p = inner; p->next_peer ; p = p->next_peer)
2410 p->outer = outer;
2411 p->outer = outer;
2412
2413 p->next_peer = *pp_start;
2414 *pp_start = inner;
2415 }
2416
2417 if (region->type == ERT_CATCH)
2418 {
2419 struct eh_region *eh_try, *next, *prev;
2420
2421 for (eh_try = region->next_peer;
2422 eh_try->type == ERT_CATCH;
2423 eh_try = eh_try->next_peer)
2424 continue;
2425 gcc_assert (eh_try->type == ERT_TRY);
2426
2427 next = region->u.eh_catch.next_catch;
2428 prev = region->u.eh_catch.prev_catch;
2429
2430 if (next)
2431 next->u.eh_catch.prev_catch = prev;
2432 else
2433 eh_try->u.eh_try.last_catch = prev;
2434 if (prev)
2435 prev->u.eh_catch.next_catch = next;
2436 else
2437 {
2438 eh_try->u.eh_try.eh_catch = next;
2439 if (! next)
2440 remove_eh_handler (eh_try);
2441 }
2442 }
2443 }
2444
2445 /* LABEL heads a basic block that is about to be deleted. If this
2446 label corresponds to an exception region, we may be able to
2447 delete the region. */
2448
2449 void
2450 maybe_remove_eh_handler (rtx label)
2451 {
2452 struct ehl_map_entry **slot, tmp;
2453 struct eh_region *region;
2454
2455 /* ??? After generating landing pads, it's not so simple to determine
2456 if the region data is completely unused. One must examine the
2457 landing pad and the post landing pad, and whether an inner try block
2458 is referencing the catch handlers directly. */
2459 if (crtl->eh.built_landing_pads)
2460 return;
2461
2462 tmp.label = label;
2463 slot = (struct ehl_map_entry **)
2464 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2465 if (! slot)
2466 return;
2467 region = (*slot)->region;
2468 if (! region)
2469 return;
2470
2471 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2472 because there is no path to the fallback call to terminate.
2473 But the region continues to affect call-site data until there
2474 are no more contained calls, which we don't see here. */
2475 if (region->type == ERT_MUST_NOT_THROW)
2476 {
2477 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2478 region->label = NULL_RTX;
2479 }
2480 else
2481 remove_eh_handler (region);
2482 }
2483
2484 /* Remove Eh region R that has turned out to have no code in its handler. */
2485
2486 void
2487 remove_eh_region (int r)
2488 {
2489 struct eh_region *region;
2490
2491 region = VEC_index (eh_region, cfun->eh->region_array, r);
2492 remove_eh_handler (region);
2493 }
2494
2495 /* Invokes CALLBACK for every exception handler label. Only used by old
2496 loop hackery; should not be used by new code. */
2497
2498 void
2499 for_each_eh_label (void (*callback) (rtx))
2500 {
2501 htab_traverse (crtl->eh.exception_handler_label_map, for_each_eh_label_1,
2502 (void *) &callback);
2503 }
2504
2505 static int
2506 for_each_eh_label_1 (void **pentry, void *data)
2507 {
2508 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2509 void (*callback) (rtx) = *(void (**) (rtx)) data;
2510
2511 (*callback) (entry->label);
2512 return 1;
2513 }
2514
2515 /* Invoke CALLBACK for every exception region in the current function. */
2516
2517 void
2518 for_each_eh_region (void (*callback) (struct eh_region *))
2519 {
2520 int i, n = cfun->eh->last_region_number;
2521 for (i = 1; i <= n; ++i)
2522 {
2523 struct eh_region *region;
2524
2525 region = VEC_index (eh_region, cfun->eh->region_array, i);
2526 if (region)
2527 (*callback) (region);
2528 }
2529 }
2530 \f
2531 /* This section describes CFG exception edges for flow. */
2532
2533 /* For communicating between calls to reachable_next_level. */
2534 struct reachable_info
2535 {
2536 tree types_caught;
2537 tree types_allowed;
2538 void (*callback) (struct eh_region *, void *);
2539 void *callback_data;
2540 };
2541
2542 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2543 base class of TYPE, is in HANDLED. */
2544
2545 static int
2546 check_handled (tree handled, tree type)
2547 {
2548 tree t;
2549
2550 /* We can check for exact matches without front-end help. */
2551 if (! lang_eh_type_covers)
2552 {
2553 for (t = handled; t ; t = TREE_CHAIN (t))
2554 if (TREE_VALUE (t) == type)
2555 return 1;
2556 }
2557 else
2558 {
2559 for (t = handled; t ; t = TREE_CHAIN (t))
2560 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2561 return 1;
2562 }
2563
2564 return 0;
2565 }
2566
2567 /* A subroutine of reachable_next_level. If we are collecting a list
2568 of handlers, add one. After landing pad generation, reference
2569 it instead of the handlers themselves. Further, the handlers are
2570 all wired together, so by referencing one, we've got them all.
2571 Before landing pad generation we reference each handler individually.
2572
2573 LP_REGION contains the landing pad; REGION is the handler. */
2574
2575 static void
2576 add_reachable_handler (struct reachable_info *info,
2577 struct eh_region *lp_region, struct eh_region *region)
2578 {
2579 if (! info)
2580 return;
2581
2582 if (crtl->eh.built_landing_pads)
2583 info->callback (lp_region, info->callback_data);
2584 else
2585 info->callback (region, info->callback_data);
2586 }
2587
2588 /* Process one level of exception regions for reachability.
2589 If TYPE_THROWN is non-null, then it is the *exact* type being
2590 propagated. If INFO is non-null, then collect handler labels
2591 and caught/allowed type information between invocations. */
2592
2593 static enum reachable_code
2594 reachable_next_level (struct eh_region *region, tree type_thrown,
2595 struct reachable_info *info,
2596 bool maybe_resx)
2597 {
2598 switch (region->type)
2599 {
2600 case ERT_CLEANUP:
2601 /* Before landing-pad generation, we model control flow
2602 directly to the individual handlers. In this way we can
2603 see that catch handler types may shadow one another. */
2604 add_reachable_handler (info, region, region);
2605 return RNL_MAYBE_CAUGHT;
2606
2607 case ERT_TRY:
2608 {
2609 struct eh_region *c;
2610 enum reachable_code ret = RNL_NOT_CAUGHT;
2611
2612 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2613 {
2614 /* A catch-all handler ends the search. */
2615 if (c->u.eh_catch.type_list == NULL)
2616 {
2617 add_reachable_handler (info, region, c);
2618 return RNL_CAUGHT;
2619 }
2620
2621 if (type_thrown)
2622 {
2623 /* If we have at least one type match, end the search. */
2624 tree tp_node = c->u.eh_catch.type_list;
2625
2626 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2627 {
2628 tree type = TREE_VALUE (tp_node);
2629
2630 if (type == type_thrown
2631 || (lang_eh_type_covers
2632 && (*lang_eh_type_covers) (type, type_thrown)))
2633 {
2634 add_reachable_handler (info, region, c);
2635 return RNL_CAUGHT;
2636 }
2637 }
2638
2639 /* If we have definitive information of a match failure,
2640 the catch won't trigger. */
2641 if (lang_eh_type_covers)
2642 return RNL_NOT_CAUGHT;
2643 }
2644
2645 /* At this point, we either don't know what type is thrown or
2646 don't have front-end assistance to help deciding if it is
2647 covered by one of the types in the list for this region.
2648
2649 We'd then like to add this region to the list of reachable
2650 handlers since it is indeed potentially reachable based on the
2651 information we have.
2652
2653 Actually, this handler is for sure not reachable if all the
2654 types it matches have already been caught. That is, it is only
2655 potentially reachable if at least one of the types it catches
2656 has not been previously caught. */
2657
2658 if (! info)
2659 ret = RNL_MAYBE_CAUGHT;
2660 else
2661 {
2662 tree tp_node = c->u.eh_catch.type_list;
2663 bool maybe_reachable = false;
2664
2665 /* Compute the potential reachability of this handler and
2666 update the list of types caught at the same time. */
2667 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2668 {
2669 tree type = TREE_VALUE (tp_node);
2670
2671 if (! check_handled (info->types_caught, type))
2672 {
2673 info->types_caught
2674 = tree_cons (NULL, type, info->types_caught);
2675
2676 maybe_reachable = true;
2677 }
2678 }
2679
2680 if (maybe_reachable)
2681 {
2682 add_reachable_handler (info, region, c);
2683
2684 /* ??? If the catch type is a base class of every allowed
2685 type, then we know we can stop the search. */
2686 ret = RNL_MAYBE_CAUGHT;
2687 }
2688 }
2689 }
2690
2691 return ret;
2692 }
2693
2694 case ERT_ALLOWED_EXCEPTIONS:
2695 /* An empty list of types definitely ends the search. */
2696 if (region->u.allowed.type_list == NULL_TREE)
2697 {
2698 add_reachable_handler (info, region, region);
2699 return RNL_CAUGHT;
2700 }
2701
2702 /* Collect a list of lists of allowed types for use in detecting
2703 when a catch may be transformed into a catch-all. */
2704 if (info)
2705 info->types_allowed = tree_cons (NULL_TREE,
2706 region->u.allowed.type_list,
2707 info->types_allowed);
2708
2709 /* If we have definitive information about the type hierarchy,
2710 then we can tell if the thrown type will pass through the
2711 filter. */
2712 if (type_thrown && lang_eh_type_covers)
2713 {
2714 if (check_handled (region->u.allowed.type_list, type_thrown))
2715 return RNL_NOT_CAUGHT;
2716 else
2717 {
2718 add_reachable_handler (info, region, region);
2719 return RNL_CAUGHT;
2720 }
2721 }
2722
2723 add_reachable_handler (info, region, region);
2724 return RNL_MAYBE_CAUGHT;
2725
2726 case ERT_CATCH:
2727 /* Catch regions are handled by their controlling try region. */
2728 return RNL_NOT_CAUGHT;
2729
2730 case ERT_MUST_NOT_THROW:
2731 /* Here we end our search, since no exceptions may propagate.
2732
2733 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2734 only via locally handled RESX instructions.
2735
2736 When we inline a function call, we can bring in new handlers. In order
2737 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2738 assume that such handlers exists prior for any inlinable call prior
2739 inlining decisions are fixed. */
2740
2741 if (maybe_resx)
2742 {
2743 add_reachable_handler (info, region, region);
2744 return RNL_CAUGHT;
2745 }
2746 else
2747 return RNL_BLOCKED;
2748
2749 case ERT_THROW:
2750 case ERT_UNKNOWN:
2751 /* Shouldn't see these here. */
2752 gcc_unreachable ();
2753 break;
2754 default:
2755 gcc_unreachable ();
2756 }
2757 }
2758
2759 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2760
2761 void
2762 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2763 void (*callback) (struct eh_region *, void *),
2764 void *callback_data)
2765 {
2766 struct reachable_info info;
2767 struct eh_region *region;
2768 tree type_thrown;
2769
2770 memset (&info, 0, sizeof (info));
2771 info.callback = callback;
2772 info.callback_data = callback_data;
2773
2774 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2775 if (!region)
2776 return;
2777
2778 type_thrown = NULL_TREE;
2779 if (is_resx)
2780 {
2781 /* A RESX leaves a region instead of entering it. Thus the
2782 region itself may have been deleted out from under us. */
2783 if (region == NULL)
2784 return;
2785 region = region->outer;
2786 }
2787 else if (region->type == ERT_THROW)
2788 {
2789 type_thrown = region->u.eh_throw.type;
2790 region = region->outer;
2791 }
2792
2793 while (region)
2794 {
2795 if (reachable_next_level (region, type_thrown, &info,
2796 inlinable_call || is_resx) >= RNL_CAUGHT)
2797 break;
2798 /* If we have processed one cleanup, there is no point in
2799 processing any more of them. Each cleanup will have an edge
2800 to the next outer cleanup region, so the flow graph will be
2801 accurate. */
2802 if (region->type == ERT_CLEANUP)
2803 region = region->u.cleanup.prev_try;
2804 else
2805 region = region->outer;
2806 }
2807 }
2808
2809 /* Retrieve a list of labels of exception handlers which can be
2810 reached by a given insn. */
2811
2812 static void
2813 arh_to_landing_pad (struct eh_region *region, void *data)
2814 {
2815 rtx *p_handlers = (rtx *) data;
2816 if (! *p_handlers)
2817 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2818 }
2819
2820 static void
2821 arh_to_label (struct eh_region *region, void *data)
2822 {
2823 rtx *p_handlers = (rtx *) data;
2824 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2825 }
2826
2827 rtx
2828 reachable_handlers (rtx insn)
2829 {
2830 bool is_resx = false;
2831 rtx handlers = NULL;
2832 int region_number;
2833
2834 if (JUMP_P (insn)
2835 && GET_CODE (PATTERN (insn)) == RESX)
2836 {
2837 region_number = XINT (PATTERN (insn), 0);
2838 is_resx = true;
2839 }
2840 else
2841 {
2842 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2843 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2844 return NULL;
2845 region_number = INTVAL (XEXP (note, 0));
2846 }
2847
2848 foreach_reachable_handler (region_number, is_resx, false,
2849 (crtl->eh.built_landing_pads
2850 ? arh_to_landing_pad
2851 : arh_to_label),
2852 &handlers);
2853
2854 return handlers;
2855 }
2856
2857 /* Determine if the given INSN can throw an exception that is caught
2858 within the function. */
2859
2860 bool
2861 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2862 {
2863 struct eh_region *region;
2864 tree type_thrown;
2865
2866 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2867 if (!region)
2868 return false;
2869
2870 type_thrown = NULL_TREE;
2871 if (is_resx)
2872 region = region->outer;
2873 else if (region->type == ERT_THROW)
2874 {
2875 type_thrown = region->u.eh_throw.type;
2876 region = region->outer;
2877 }
2878
2879 /* If this exception is ignored by each and every containing region,
2880 then control passes straight out. The runtime may handle some
2881 regions, which also do not require processing internally. */
2882 for (; region; region = region->outer)
2883 {
2884 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2885 inlinable_call || is_resx);
2886 if (how == RNL_BLOCKED)
2887 return false;
2888 if (how != RNL_NOT_CAUGHT)
2889 return true;
2890 }
2891
2892 return false;
2893 }
2894
2895 bool
2896 can_throw_internal (const_rtx insn)
2897 {
2898 rtx note;
2899
2900 if (! INSN_P (insn))
2901 return false;
2902
2903 if (JUMP_P (insn)
2904 && GET_CODE (PATTERN (insn)) == RESX
2905 && XINT (PATTERN (insn), 0) > 0)
2906 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2907
2908 if (NONJUMP_INSN_P (insn)
2909 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2910 insn = XVECEXP (PATTERN (insn), 0, 0);
2911
2912 /* Every insn that might throw has an EH_REGION note. */
2913 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2914 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2915 return false;
2916
2917 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2918 }
2919
2920 /* Determine if the given INSN can throw an exception that is
2921 visible outside the function. */
2922
2923 bool
2924 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2925 {
2926 struct eh_region *region;
2927 tree type_thrown;
2928
2929 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2930 if (!region)
2931 return true;
2932
2933 type_thrown = NULL_TREE;
2934 if (is_resx)
2935 region = region->outer;
2936 else if (region->type == ERT_THROW)
2937 {
2938 type_thrown = region->u.eh_throw.type;
2939 region = region->outer;
2940 }
2941
2942 /* If the exception is caught or blocked by any containing region,
2943 then it is not seen by any calling function. */
2944 for (; region ; region = region->outer)
2945 if (reachable_next_level (region, type_thrown, NULL,
2946 inlinable_call || is_resx) >= RNL_CAUGHT)
2947 return false;
2948
2949 return true;
2950 }
2951
2952 bool
2953 can_throw_external (const_rtx insn)
2954 {
2955 rtx note;
2956
2957 if (! INSN_P (insn))
2958 return false;
2959
2960 if (JUMP_P (insn)
2961 && GET_CODE (PATTERN (insn)) == RESX
2962 && XINT (PATTERN (insn), 0) > 0)
2963 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2964
2965 if (NONJUMP_INSN_P (insn)
2966 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2967 insn = XVECEXP (PATTERN (insn), 0, 0);
2968
2969 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2970 if (!note)
2971 {
2972 /* Calls (and trapping insns) without notes are outside any
2973 exception handling region in this function. We have to
2974 assume it might throw. Given that the front end and middle
2975 ends mark known NOTHROW functions, this isn't so wildly
2976 inaccurate. */
2977 return (CALL_P (insn)
2978 || (flag_non_call_exceptions
2979 && may_trap_p (PATTERN (insn))));
2980 }
2981 if (INTVAL (XEXP (note, 0)) <= 0)
2982 return false;
2983
2984 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2985 }
2986
2987 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2988
2989 unsigned int
2990 set_nothrow_function_flags (void)
2991 {
2992 rtx insn;
2993
2994 crtl->nothrow = 1;
2995
2996 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2997 something that can throw an exception. We specifically exempt
2998 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2999 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3000 is optimistic. */
3001
3002 crtl->all_throwers_are_sibcalls = 1;
3003
3004 /* If we don't know that this implementation of the function will
3005 actually be used, then we must not set TREE_NOTHROW, since
3006 callers must not assume that this function does not throw. */
3007 if (TREE_NOTHROW (current_function_decl))
3008 return 0;
3009
3010 if (! flag_exceptions)
3011 return 0;
3012
3013 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3014 if (can_throw_external (insn))
3015 {
3016 crtl->nothrow = 0;
3017
3018 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3019 {
3020 crtl->all_throwers_are_sibcalls = 0;
3021 return 0;
3022 }
3023 }
3024
3025 for (insn = crtl->epilogue_delay_list; insn;
3026 insn = XEXP (insn, 1))
3027 if (can_throw_external (insn))
3028 {
3029 crtl->nothrow = 0;
3030
3031 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3032 {
3033 crtl->all_throwers_are_sibcalls = 0;
3034 return 0;
3035 }
3036 }
3037 if (crtl->nothrow
3038 && (cgraph_function_body_availability (cgraph_node (current_function_decl))
3039 >= AVAIL_AVAILABLE))
3040 TREE_NOTHROW (current_function_decl) = 1;
3041 return 0;
3042 }
3043
3044 struct rtl_opt_pass pass_set_nothrow_function_flags =
3045 {
3046 {
3047 RTL_PASS,
3048 NULL, /* name */
3049 NULL, /* gate */
3050 set_nothrow_function_flags, /* execute */
3051 NULL, /* sub */
3052 NULL, /* next */
3053 0, /* static_pass_number */
3054 0, /* tv_id */
3055 0, /* properties_required */
3056 0, /* properties_provided */
3057 0, /* properties_destroyed */
3058 0, /* todo_flags_start */
3059 0, /* todo_flags_finish */
3060 }
3061 };
3062
3063 \f
3064 /* Various hooks for unwind library. */
3065
3066 /* Do any necessary initialization to access arbitrary stack frames.
3067 On the SPARC, this means flushing the register windows. */
3068
3069 void
3070 expand_builtin_unwind_init (void)
3071 {
3072 /* Set this so all the registers get saved in our frame; we need to be
3073 able to copy the saved values for any registers from frames we unwind. */
3074 crtl->saves_all_registers = 1;
3075
3076 #ifdef SETUP_FRAME_ADDRESSES
3077 SETUP_FRAME_ADDRESSES ();
3078 #endif
3079 }
3080
3081 rtx
3082 expand_builtin_eh_return_data_regno (tree exp)
3083 {
3084 tree which = CALL_EXPR_ARG (exp, 0);
3085 unsigned HOST_WIDE_INT iwhich;
3086
3087 if (TREE_CODE (which) != INTEGER_CST)
3088 {
3089 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3090 return constm1_rtx;
3091 }
3092
3093 iwhich = tree_low_cst (which, 1);
3094 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3095 if (iwhich == INVALID_REGNUM)
3096 return constm1_rtx;
3097
3098 #ifdef DWARF_FRAME_REGNUM
3099 iwhich = DWARF_FRAME_REGNUM (iwhich);
3100 #else
3101 iwhich = DBX_REGISTER_NUMBER (iwhich);
3102 #endif
3103
3104 return GEN_INT (iwhich);
3105 }
3106
3107 /* Given a value extracted from the return address register or stack slot,
3108 return the actual address encoded in that value. */
3109
3110 rtx
3111 expand_builtin_extract_return_addr (tree addr_tree)
3112 {
3113 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3114
3115 if (GET_MODE (addr) != Pmode
3116 && GET_MODE (addr) != VOIDmode)
3117 {
3118 #ifdef POINTERS_EXTEND_UNSIGNED
3119 addr = convert_memory_address (Pmode, addr);
3120 #else
3121 addr = convert_to_mode (Pmode, addr, 0);
3122 #endif
3123 }
3124
3125 /* First mask out any unwanted bits. */
3126 #ifdef MASK_RETURN_ADDR
3127 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3128 #endif
3129
3130 /* Then adjust to find the real return address. */
3131 #if defined (RETURN_ADDR_OFFSET)
3132 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3133 #endif
3134
3135 return addr;
3136 }
3137
3138 /* Given an actual address in addr_tree, do any necessary encoding
3139 and return the value to be stored in the return address register or
3140 stack slot so the epilogue will return to that address. */
3141
3142 rtx
3143 expand_builtin_frob_return_addr (tree addr_tree)
3144 {
3145 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3146
3147 addr = convert_memory_address (Pmode, addr);
3148
3149 #ifdef RETURN_ADDR_OFFSET
3150 addr = force_reg (Pmode, addr);
3151 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3152 #endif
3153
3154 return addr;
3155 }
3156
3157 /* Set up the epilogue with the magic bits we'll need to return to the
3158 exception handler. */
3159
3160 void
3161 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3162 tree handler_tree)
3163 {
3164 rtx tmp;
3165
3166 #ifdef EH_RETURN_STACKADJ_RTX
3167 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3168 VOIDmode, EXPAND_NORMAL);
3169 tmp = convert_memory_address (Pmode, tmp);
3170 if (!crtl->eh.ehr_stackadj)
3171 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3172 else if (tmp != crtl->eh.ehr_stackadj)
3173 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3174 #endif
3175
3176 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3177 VOIDmode, EXPAND_NORMAL);
3178 tmp = convert_memory_address (Pmode, tmp);
3179 if (!crtl->eh.ehr_handler)
3180 crtl->eh.ehr_handler = copy_to_reg (tmp);
3181 else if (tmp != crtl->eh.ehr_handler)
3182 emit_move_insn (crtl->eh.ehr_handler, tmp);
3183
3184 if (!crtl->eh.ehr_label)
3185 crtl->eh.ehr_label = gen_label_rtx ();
3186 emit_jump (crtl->eh.ehr_label);
3187 }
3188
3189 void
3190 expand_eh_return (void)
3191 {
3192 rtx around_label;
3193
3194 if (! crtl->eh.ehr_label)
3195 return;
3196
3197 crtl->calls_eh_return = 1;
3198
3199 #ifdef EH_RETURN_STACKADJ_RTX
3200 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3201 #endif
3202
3203 around_label = gen_label_rtx ();
3204 emit_jump (around_label);
3205
3206 emit_label (crtl->eh.ehr_label);
3207 clobber_return_register ();
3208
3209 #ifdef EH_RETURN_STACKADJ_RTX
3210 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3211 #endif
3212
3213 #ifdef HAVE_eh_return
3214 if (HAVE_eh_return)
3215 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3216 else
3217 #endif
3218 {
3219 #ifdef EH_RETURN_HANDLER_RTX
3220 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3221 #else
3222 error ("__builtin_eh_return not supported on this target");
3223 #endif
3224 }
3225
3226 emit_label (around_label);
3227 }
3228
3229 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3230 POINTERS_EXTEND_UNSIGNED and return it. */
3231
3232 rtx
3233 expand_builtin_extend_pointer (tree addr_tree)
3234 {
3235 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3236 int extend;
3237
3238 #ifdef POINTERS_EXTEND_UNSIGNED
3239 extend = POINTERS_EXTEND_UNSIGNED;
3240 #else
3241 /* The previous EH code did an unsigned extend by default, so we do this also
3242 for consistency. */
3243 extend = 1;
3244 #endif
3245
3246 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3247 }
3248 \f
3249 /* In the following functions, we represent entries in the action table
3250 as 1-based indices. Special cases are:
3251
3252 0: null action record, non-null landing pad; implies cleanups
3253 -1: null action record, null landing pad; implies no action
3254 -2: no call-site entry; implies must_not_throw
3255 -3: we have yet to process outer regions
3256
3257 Further, no special cases apply to the "next" field of the record.
3258 For next, 0 means end of list. */
3259
3260 struct action_record
3261 {
3262 int offset;
3263 int filter;
3264 int next;
3265 };
3266
3267 static int
3268 action_record_eq (const void *pentry, const void *pdata)
3269 {
3270 const struct action_record *entry = (const struct action_record *) pentry;
3271 const struct action_record *data = (const struct action_record *) pdata;
3272 return entry->filter == data->filter && entry->next == data->next;
3273 }
3274
3275 static hashval_t
3276 action_record_hash (const void *pentry)
3277 {
3278 const struct action_record *entry = (const struct action_record *) pentry;
3279 return entry->next * 1009 + entry->filter;
3280 }
3281
3282 static int
3283 add_action_record (htab_t ar_hash, int filter, int next)
3284 {
3285 struct action_record **slot, *new_ar, tmp;
3286
3287 tmp.filter = filter;
3288 tmp.next = next;
3289 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3290
3291 if ((new_ar = *slot) == NULL)
3292 {
3293 new_ar = XNEW (struct action_record);
3294 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3295 new_ar->filter = filter;
3296 new_ar->next = next;
3297 *slot = new_ar;
3298
3299 /* The filter value goes in untouched. The link to the next
3300 record is a "self-relative" byte offset, or zero to indicate
3301 that there is no next record. So convert the absolute 1 based
3302 indices we've been carrying around into a displacement. */
3303
3304 push_sleb128 (&crtl->eh.action_record_data, filter);
3305 if (next)
3306 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3307 push_sleb128 (&crtl->eh.action_record_data, next);
3308 }
3309
3310 return new_ar->offset;
3311 }
3312
3313 static int
3314 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3315 {
3316 struct eh_region *c;
3317 int next;
3318
3319 /* If we've reached the top of the region chain, then we have
3320 no actions, and require no landing pad. */
3321 if (region == NULL)
3322 return -1;
3323
3324 switch (region->type)
3325 {
3326 case ERT_CLEANUP:
3327 /* A cleanup adds a zero filter to the beginning of the chain, but
3328 there are special cases to look out for. If there are *only*
3329 cleanups along a path, then it compresses to a zero action.
3330 Further, if there are multiple cleanups along a path, we only
3331 need to represent one of them, as that is enough to trigger
3332 entry to the landing pad at runtime. */
3333 next = collect_one_action_chain (ar_hash, region->outer);
3334 if (next <= 0)
3335 return 0;
3336 for (c = region->outer; c ; c = c->outer)
3337 if (c->type == ERT_CLEANUP)
3338 return next;
3339 return add_action_record (ar_hash, 0, next);
3340
3341 case ERT_TRY:
3342 /* Process the associated catch regions in reverse order.
3343 If there's a catch-all handler, then we don't need to
3344 search outer regions. Use a magic -3 value to record
3345 that we haven't done the outer search. */
3346 next = -3;
3347 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3348 {
3349 if (c->u.eh_catch.type_list == NULL)
3350 {
3351 /* Retrieve the filter from the head of the filter list
3352 where we have stored it (see assign_filter_values). */
3353 int filter
3354 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3355
3356 next = add_action_record (ar_hash, filter, 0);
3357 }
3358 else
3359 {
3360 /* Once the outer search is done, trigger an action record for
3361 each filter we have. */
3362 tree flt_node;
3363
3364 if (next == -3)
3365 {
3366 next = collect_one_action_chain (ar_hash, region->outer);
3367
3368 /* If there is no next action, terminate the chain. */
3369 if (next == -1)
3370 next = 0;
3371 /* If all outer actions are cleanups or must_not_throw,
3372 we'll have no action record for it, since we had wanted
3373 to encode these states in the call-site record directly.
3374 Add a cleanup action to the chain to catch these. */
3375 else if (next <= 0)
3376 next = add_action_record (ar_hash, 0, 0);
3377 }
3378
3379 flt_node = c->u.eh_catch.filter_list;
3380 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3381 {
3382 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3383 next = add_action_record (ar_hash, filter, next);
3384 }
3385 }
3386 }
3387 return next;
3388
3389 case ERT_ALLOWED_EXCEPTIONS:
3390 /* An exception specification adds its filter to the
3391 beginning of the chain. */
3392 next = collect_one_action_chain (ar_hash, region->outer);
3393
3394 /* If there is no next action, terminate the chain. */
3395 if (next == -1)
3396 next = 0;
3397 /* If all outer actions are cleanups or must_not_throw,
3398 we'll have no action record for it, since we had wanted
3399 to encode these states in the call-site record directly.
3400 Add a cleanup action to the chain to catch these. */
3401 else if (next <= 0)
3402 next = add_action_record (ar_hash, 0, 0);
3403
3404 return add_action_record (ar_hash, region->u.allowed.filter, next);
3405
3406 case ERT_MUST_NOT_THROW:
3407 /* A must-not-throw region with no inner handlers or cleanups
3408 requires no call-site entry. Note that this differs from
3409 the no handler or cleanup case in that we do require an lsda
3410 to be generated. Return a magic -2 value to record this. */
3411 return -2;
3412
3413 case ERT_CATCH:
3414 case ERT_THROW:
3415 /* CATCH regions are handled in TRY above. THROW regions are
3416 for optimization information only and produce no output. */
3417 return collect_one_action_chain (ar_hash, region->outer);
3418
3419 default:
3420 gcc_unreachable ();
3421 }
3422 }
3423
3424 static int
3425 add_call_site (rtx landing_pad, int action)
3426 {
3427 call_site_record record;
3428
3429 record = GGC_NEW (struct call_site_record);
3430 record->landing_pad = landing_pad;
3431 record->action = action;
3432
3433 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3434
3435 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3436 }
3437
3438 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3439 The new note numbers will not refer to region numbers, but
3440 instead to call site entries. */
3441
3442 unsigned int
3443 convert_to_eh_region_ranges (void)
3444 {
3445 rtx insn, iter, note;
3446 htab_t ar_hash;
3447 int last_action = -3;
3448 rtx last_action_insn = NULL_RTX;
3449 rtx last_landing_pad = NULL_RTX;
3450 rtx first_no_action_insn = NULL_RTX;
3451 int call_site = 0;
3452
3453 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3454 return 0;
3455
3456 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3457
3458 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3459
3460 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3461 if (INSN_P (iter))
3462 {
3463 struct eh_region *region;
3464 int this_action;
3465 rtx this_landing_pad;
3466
3467 insn = iter;
3468 if (NONJUMP_INSN_P (insn)
3469 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3470 insn = XVECEXP (PATTERN (insn), 0, 0);
3471
3472 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3473 if (!note)
3474 {
3475 if (! (CALL_P (insn)
3476 || (flag_non_call_exceptions
3477 && may_trap_p (PATTERN (insn)))))
3478 continue;
3479 this_action = -1;
3480 region = NULL;
3481 }
3482 else
3483 {
3484 if (INTVAL (XEXP (note, 0)) <= 0)
3485 continue;
3486 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3487 this_action = collect_one_action_chain (ar_hash, region);
3488 }
3489
3490 /* Existence of catch handlers, or must-not-throw regions
3491 implies that an lsda is needed (even if empty). */
3492 if (this_action != -1)
3493 crtl->uses_eh_lsda = 1;
3494
3495 /* Delay creation of region notes for no-action regions
3496 until we're sure that an lsda will be required. */
3497 else if (last_action == -3)
3498 {
3499 first_no_action_insn = iter;
3500 last_action = -1;
3501 }
3502
3503 /* Cleanups and handlers may share action chains but not
3504 landing pads. Collect the landing pad for this region. */
3505 if (this_action >= 0)
3506 {
3507 struct eh_region *o;
3508 for (o = region; ! o->landing_pad ; o = o->outer)
3509 continue;
3510 this_landing_pad = o->landing_pad;
3511 }
3512 else
3513 this_landing_pad = NULL_RTX;
3514
3515 /* Differing actions or landing pads implies a change in call-site
3516 info, which implies some EH_REGION note should be emitted. */
3517 if (last_action != this_action
3518 || last_landing_pad != this_landing_pad)
3519 {
3520 /* If we'd not seen a previous action (-3) or the previous
3521 action was must-not-throw (-2), then we do not need an
3522 end note. */
3523 if (last_action >= -1)
3524 {
3525 /* If we delayed the creation of the begin, do it now. */
3526 if (first_no_action_insn)
3527 {
3528 call_site = add_call_site (NULL_RTX, 0);
3529 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3530 first_no_action_insn);
3531 NOTE_EH_HANDLER (note) = call_site;
3532 first_no_action_insn = NULL_RTX;
3533 }
3534
3535 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3536 last_action_insn);
3537 NOTE_EH_HANDLER (note) = call_site;
3538 }
3539
3540 /* If the new action is must-not-throw, then no region notes
3541 are created. */
3542 if (this_action >= -1)
3543 {
3544 call_site = add_call_site (this_landing_pad,
3545 this_action < 0 ? 0 : this_action);
3546 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3547 NOTE_EH_HANDLER (note) = call_site;
3548 }
3549
3550 last_action = this_action;
3551 last_landing_pad = this_landing_pad;
3552 }
3553 last_action_insn = iter;
3554 }
3555
3556 if (last_action >= -1 && ! first_no_action_insn)
3557 {
3558 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3559 NOTE_EH_HANDLER (note) = call_site;
3560 }
3561
3562 htab_delete (ar_hash);
3563 return 0;
3564 }
3565
3566 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3567 {
3568 {
3569 RTL_PASS,
3570 "eh_ranges", /* name */
3571 NULL, /* gate */
3572 convert_to_eh_region_ranges, /* execute */
3573 NULL, /* sub */
3574 NULL, /* next */
3575 0, /* static_pass_number */
3576 0, /* tv_id */
3577 0, /* properties_required */
3578 0, /* properties_provided */
3579 0, /* properties_destroyed */
3580 0, /* todo_flags_start */
3581 TODO_dump_func, /* todo_flags_finish */
3582 }
3583 };
3584
3585 \f
3586 static void
3587 push_uleb128 (varray_type *data_area, unsigned int value)
3588 {
3589 do
3590 {
3591 unsigned char byte = value & 0x7f;
3592 value >>= 7;
3593 if (value)
3594 byte |= 0x80;
3595 VARRAY_PUSH_UCHAR (*data_area, byte);
3596 }
3597 while (value);
3598 }
3599
3600 static void
3601 push_sleb128 (varray_type *data_area, int value)
3602 {
3603 unsigned char byte;
3604 int more;
3605
3606 do
3607 {
3608 byte = value & 0x7f;
3609 value >>= 7;
3610 more = ! ((value == 0 && (byte & 0x40) == 0)
3611 || (value == -1 && (byte & 0x40) != 0));
3612 if (more)
3613 byte |= 0x80;
3614 VARRAY_PUSH_UCHAR (*data_area, byte);
3615 }
3616 while (more);
3617 }
3618
3619 \f
3620 #ifndef HAVE_AS_LEB128
3621 static int
3622 dw2_size_of_call_site_table (void)
3623 {
3624 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3625 int size = n * (4 + 4 + 4);
3626 int i;
3627
3628 for (i = 0; i < n; ++i)
3629 {
3630 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3631 size += size_of_uleb128 (cs->action);
3632 }
3633
3634 return size;
3635 }
3636
3637 static int
3638 sjlj_size_of_call_site_table (void)
3639 {
3640 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3641 int size = 0;
3642 int i;
3643
3644 for (i = 0; i < n; ++i)
3645 {
3646 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3647 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3648 size += size_of_uleb128 (cs->action);
3649 }
3650
3651 return size;
3652 }
3653 #endif
3654
3655 static void
3656 dw2_output_call_site_table (void)
3657 {
3658 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3659 int i;
3660
3661 for (i = 0; i < n; ++i)
3662 {
3663 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3664 char reg_start_lab[32];
3665 char reg_end_lab[32];
3666 char landing_pad_lab[32];
3667
3668 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3669 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3670
3671 if (cs->landing_pad)
3672 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3673 CODE_LABEL_NUMBER (cs->landing_pad));
3674
3675 /* ??? Perhaps use insn length scaling if the assembler supports
3676 generic arithmetic. */
3677 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3678 data4 if the function is small enough. */
3679 #ifdef HAVE_AS_LEB128
3680 dw2_asm_output_delta_uleb128 (reg_start_lab,
3681 current_function_func_begin_label,
3682 "region %d start", i);
3683 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3684 "length");
3685 if (cs->landing_pad)
3686 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3687 current_function_func_begin_label,
3688 "landing pad");
3689 else
3690 dw2_asm_output_data_uleb128 (0, "landing pad");
3691 #else
3692 dw2_asm_output_delta (4, reg_start_lab,
3693 current_function_func_begin_label,
3694 "region %d start", i);
3695 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3696 if (cs->landing_pad)
3697 dw2_asm_output_delta (4, landing_pad_lab,
3698 current_function_func_begin_label,
3699 "landing pad");
3700 else
3701 dw2_asm_output_data (4, 0, "landing pad");
3702 #endif
3703 dw2_asm_output_data_uleb128 (cs->action, "action");
3704 }
3705
3706 call_site_base += n;
3707 }
3708
3709 static void
3710 sjlj_output_call_site_table (void)
3711 {
3712 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3713 int i;
3714
3715 for (i = 0; i < n; ++i)
3716 {
3717 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3718
3719 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3720 "region %d landing pad", i);
3721 dw2_asm_output_data_uleb128 (cs->action, "action");
3722 }
3723
3724 call_site_base += n;
3725 }
3726
3727 #ifndef TARGET_UNWIND_INFO
3728 /* Switch to the section that should be used for exception tables. */
3729
3730 static void
3731 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3732 {
3733 section *s;
3734
3735 if (exception_section)
3736 s = exception_section;
3737 else
3738 {
3739 /* Compute the section and cache it into exception_section,
3740 unless it depends on the function name. */
3741 if (targetm.have_named_sections)
3742 {
3743 int flags;
3744
3745 if (EH_TABLES_CAN_BE_READ_ONLY)
3746 {
3747 int tt_format =
3748 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3749 flags = ((! flag_pic
3750 || ((tt_format & 0x70) != DW_EH_PE_absptr
3751 && (tt_format & 0x70) != DW_EH_PE_aligned))
3752 ? 0 : SECTION_WRITE);
3753 }
3754 else
3755 flags = SECTION_WRITE;
3756
3757 #ifdef HAVE_LD_EH_GC_SECTIONS
3758 if (flag_function_sections)
3759 {
3760 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3761 sprintf (section_name, ".gcc_except_table.%s", fnname);
3762 s = get_section (section_name, flags, NULL);
3763 free (section_name);
3764 }
3765 else
3766 #endif
3767 exception_section
3768 = s = get_section (".gcc_except_table", flags, NULL);
3769 }
3770 else
3771 exception_section
3772 = s = flag_pic ? data_section : readonly_data_section;
3773 }
3774
3775 switch_to_section (s);
3776 }
3777 #endif
3778
3779
3780 /* Output a reference from an exception table to the type_info object TYPE.
3781 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3782 the value. */
3783
3784 static void
3785 output_ttype (tree type, int tt_format, int tt_format_size)
3786 {
3787 rtx value;
3788 bool is_public = true;
3789
3790 if (type == NULL_TREE)
3791 value = const0_rtx;
3792 else
3793 {
3794 struct varpool_node *node;
3795
3796 type = lookup_type_for_runtime (type);
3797 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3798
3799 /* Let cgraph know that the rtti decl is used. Not all of the
3800 paths below go through assemble_integer, which would take
3801 care of this for us. */
3802 STRIP_NOPS (type);
3803 if (TREE_CODE (type) == ADDR_EXPR)
3804 {
3805 type = TREE_OPERAND (type, 0);
3806 if (TREE_CODE (type) == VAR_DECL)
3807 {
3808 node = varpool_node (type);
3809 if (node)
3810 varpool_mark_needed_node (node);
3811 is_public = TREE_PUBLIC (type);
3812 }
3813 }
3814 else
3815 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3816 }
3817
3818 /* Allow the target to override the type table entry format. */
3819 if (targetm.asm_out.ttype (value))
3820 return;
3821
3822 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3823 assemble_integer (value, tt_format_size,
3824 tt_format_size * BITS_PER_UNIT, 1);
3825 else
3826 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3827 }
3828
3829 void
3830 output_function_exception_table (const char * ARG_UNUSED (fnname))
3831 {
3832 int tt_format, cs_format, lp_format, i, n;
3833 #ifdef HAVE_AS_LEB128
3834 char ttype_label[32];
3835 char cs_after_size_label[32];
3836 char cs_end_label[32];
3837 #else
3838 int call_site_len;
3839 #endif
3840 int have_tt_data;
3841 int tt_format_size = 0;
3842
3843 /* Not all functions need anything. */
3844 if (! crtl->uses_eh_lsda)
3845 return;
3846
3847 if (eh_personality_libfunc)
3848 assemble_external_libcall (eh_personality_libfunc);
3849
3850 #ifdef TARGET_UNWIND_INFO
3851 /* TODO: Move this into target file. */
3852 fputs ("\t.personality\t", asm_out_file);
3853 output_addr_const (asm_out_file, eh_personality_libfunc);
3854 fputs ("\n\t.handlerdata\n", asm_out_file);
3855 /* Note that varasm still thinks we're in the function's code section.
3856 The ".endp" directive that will immediately follow will take us back. */
3857 #else
3858 switch_to_exception_section (fnname);
3859 #endif
3860
3861 /* If the target wants a label to begin the table, emit it here. */
3862 targetm.asm_out.except_table_label (asm_out_file);
3863
3864 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3865 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3866
3867 /* Indicate the format of the @TType entries. */
3868 if (! have_tt_data)
3869 tt_format = DW_EH_PE_omit;
3870 else
3871 {
3872 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3873 #ifdef HAVE_AS_LEB128
3874 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3875 current_function_funcdef_no);
3876 #endif
3877 tt_format_size = size_of_encoded_value (tt_format);
3878
3879 assemble_align (tt_format_size * BITS_PER_UNIT);
3880 }
3881
3882 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3883 current_function_funcdef_no);
3884
3885 /* The LSDA header. */
3886
3887 /* Indicate the format of the landing pad start pointer. An omitted
3888 field implies @LPStart == @Start. */
3889 /* Currently we always put @LPStart == @Start. This field would
3890 be most useful in moving the landing pads completely out of
3891 line to another section, but it could also be used to minimize
3892 the size of uleb128 landing pad offsets. */
3893 lp_format = DW_EH_PE_omit;
3894 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3895 eh_data_format_name (lp_format));
3896
3897 /* @LPStart pointer would go here. */
3898
3899 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3900 eh_data_format_name (tt_format));
3901
3902 #ifndef HAVE_AS_LEB128
3903 if (USING_SJLJ_EXCEPTIONS)
3904 call_site_len = sjlj_size_of_call_site_table ();
3905 else
3906 call_site_len = dw2_size_of_call_site_table ();
3907 #endif
3908
3909 /* A pc-relative 4-byte displacement to the @TType data. */
3910 if (have_tt_data)
3911 {
3912 #ifdef HAVE_AS_LEB128
3913 char ttype_after_disp_label[32];
3914 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3915 current_function_funcdef_no);
3916 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3917 "@TType base offset");
3918 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3919 #else
3920 /* Ug. Alignment queers things. */
3921 unsigned int before_disp, after_disp, last_disp, disp;
3922
3923 before_disp = 1 + 1;
3924 after_disp = (1 + size_of_uleb128 (call_site_len)
3925 + call_site_len
3926 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3927 + (VEC_length (tree, crtl->eh.ttype_data)
3928 * tt_format_size));
3929
3930 disp = after_disp;
3931 do
3932 {
3933 unsigned int disp_size, pad;
3934
3935 last_disp = disp;
3936 disp_size = size_of_uleb128 (disp);
3937 pad = before_disp + disp_size + after_disp;
3938 if (pad % tt_format_size)
3939 pad = tt_format_size - (pad % tt_format_size);
3940 else
3941 pad = 0;
3942 disp = after_disp + pad;
3943 }
3944 while (disp != last_disp);
3945
3946 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3947 #endif
3948 }
3949
3950 /* Indicate the format of the call-site offsets. */
3951 #ifdef HAVE_AS_LEB128
3952 cs_format = DW_EH_PE_uleb128;
3953 #else
3954 cs_format = DW_EH_PE_udata4;
3955 #endif
3956 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3957 eh_data_format_name (cs_format));
3958
3959 #ifdef HAVE_AS_LEB128
3960 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3961 current_function_funcdef_no);
3962 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3963 current_function_funcdef_no);
3964 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3965 "Call-site table length");
3966 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3967 if (USING_SJLJ_EXCEPTIONS)
3968 sjlj_output_call_site_table ();
3969 else
3970 dw2_output_call_site_table ();
3971 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3972 #else
3973 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3974 if (USING_SJLJ_EXCEPTIONS)
3975 sjlj_output_call_site_table ();
3976 else
3977 dw2_output_call_site_table ();
3978 #endif
3979
3980 /* ??? Decode and interpret the data for flag_debug_asm. */
3981 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3982 for (i = 0; i < n; ++i)
3983 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3984 (i ? NULL : "Action record table"));
3985
3986 if (have_tt_data)
3987 assemble_align (tt_format_size * BITS_PER_UNIT);
3988
3989 i = VEC_length (tree, crtl->eh.ttype_data);
3990 while (i-- > 0)
3991 {
3992 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3993 output_ttype (type, tt_format, tt_format_size);
3994 }
3995
3996 #ifdef HAVE_AS_LEB128
3997 if (have_tt_data)
3998 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3999 #endif
4000
4001 /* ??? Decode and interpret the data for flag_debug_asm. */
4002 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4003 for (i = 0; i < n; ++i)
4004 {
4005 if (targetm.arm_eabi_unwinder)
4006 {
4007 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4008 output_ttype (type, tt_format, tt_format_size);
4009 }
4010 else
4011 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4012 (i ? NULL : "Exception specification table"));
4013 }
4014
4015 switch_to_section (current_function_section ());
4016 }
4017
4018 void
4019 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4020 {
4021 fun->eh->throw_stmt_table = table;
4022 }
4023
4024 htab_t
4025 get_eh_throw_stmt_table (struct function *fun)
4026 {
4027 return fun->eh->throw_stmt_table;
4028 }
4029
4030 /* Dump EH information to OUT. */
4031
4032 void
4033 dump_eh_tree (FILE * out, struct function *fun)
4034 {
4035 struct eh_region *i;
4036 int depth = 0;
4037 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4038 "allowed_exceptions", "must_not_throw",
4039 "throw"
4040 };
4041
4042 i = fun->eh->region_tree;
4043 if (!i)
4044 return;
4045
4046 fprintf (out, "Eh tree:\n");
4047 while (1)
4048 {
4049 fprintf (out, " %*s %i %s", depth * 2, "",
4050 i->region_number, type_name[(int) i->type]);
4051 if (i->tree_label)
4052 {
4053 fprintf (out, " tree_label:");
4054 print_generic_expr (out, i->tree_label, 0);
4055 }
4056 switch (i->type)
4057 {
4058 case ERT_CLEANUP:
4059 if (i->u.cleanup.prev_try)
4060 fprintf (out, " prev try:%i",
4061 i->u.cleanup.prev_try->region_number);
4062 break;
4063
4064 case ERT_TRY:
4065 {
4066 struct eh_region *c;
4067 fprintf (out, " catch regions:");
4068 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4069 fprintf (out, " %i", c->region_number);
4070 }
4071 break;
4072
4073 case ERT_CATCH:
4074 if (i->u.eh_catch.prev_catch)
4075 fprintf (out, " prev: %i",
4076 i->u.eh_catch.prev_catch->region_number);
4077 if (i->u.eh_catch.next_catch)
4078 fprintf (out, " next %i",
4079 i->u.eh_catch.next_catch->region_number);
4080 break;
4081
4082 case ERT_ALLOWED_EXCEPTIONS:
4083 fprintf (out, "filter :%i types:", i->u.allowed.filter);
4084 print_generic_expr (out, i->u.allowed.type_list, 0);
4085 break;
4086
4087 case ERT_THROW:
4088 fprintf (out, "type:");
4089 print_generic_expr (out, i->u.eh_throw.type, 0);
4090 break;
4091
4092 case ERT_MUST_NOT_THROW:
4093 break;
4094
4095 case ERT_UNKNOWN:
4096 break;
4097 }
4098 if (i->aka)
4099 {
4100 fprintf (out, " also known as:");
4101 dump_bitmap (out, i->aka);
4102 }
4103 else
4104 fprintf (out, "\n");
4105 /* If there are sub-regions, process them. */
4106 if (i->inner)
4107 i = i->inner, depth++;
4108 /* If there are peers, process them. */
4109 else if (i->next_peer)
4110 i = i->next_peer;
4111 /* Otherwise, step back up the tree to the next peer. */
4112 else
4113 {
4114 do
4115 {
4116 i = i->outer;
4117 depth--;
4118 if (i == NULL)
4119 return;
4120 }
4121 while (i->next_peer == NULL);
4122 i = i->next_peer;
4123 }
4124 }
4125 }
4126
4127 /* Verify some basic invariants on EH datastructures. Could be extended to
4128 catch more. */
4129 void
4130 verify_eh_tree (struct function *fun)
4131 {
4132 struct eh_region *i, *outer = NULL;
4133 bool err = false;
4134 int nvisited = 0;
4135 int count = 0;
4136 int j;
4137 int depth = 0;
4138
4139 if (!fun->eh->region_tree)
4140 return;
4141 for (j = fun->eh->last_region_number; j > 0; --j)
4142 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4143 {
4144 if (i->region_number == j)
4145 count++;
4146 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4147 {
4148 error ("region_array is corrupted for region %i",
4149 i->region_number);
4150 err = true;
4151 }
4152 }
4153 i = fun->eh->region_tree;
4154
4155 while (1)
4156 {
4157 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4158 {
4159 error ("region_array is corrupted for region %i", i->region_number);
4160 err = true;
4161 }
4162 if (i->outer != outer)
4163 {
4164 error ("outer block of region %i is wrong", i->region_number);
4165 err = true;
4166 }
4167 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4168 {
4169 error
4170 ("region %i may contain throw and is contained in region that may not",
4171 i->region_number);
4172 err = true;
4173 }
4174 if (depth < 0)
4175 {
4176 error ("negative nesting depth of region %i", i->region_number);
4177 err = true;
4178 }
4179 nvisited++;
4180 /* If there are sub-regions, process them. */
4181 if (i->inner)
4182 outer = i, i = i->inner, depth++;
4183 /* If there are peers, process them. */
4184 else if (i->next_peer)
4185 i = i->next_peer;
4186 /* Otherwise, step back up the tree to the next peer. */
4187 else
4188 {
4189 do
4190 {
4191 i = i->outer;
4192 depth--;
4193 if (i == NULL)
4194 {
4195 if (depth != -1)
4196 {
4197 error ("tree list ends on depth %i", depth + 1);
4198 err = true;
4199 }
4200 if (count != nvisited)
4201 {
4202 error ("array does not match the region tree");
4203 err = true;
4204 }
4205 if (err)
4206 {
4207 dump_eh_tree (stderr, fun);
4208 internal_error ("verify_eh_tree failed");
4209 }
4210 return;
4211 }
4212 outer = i->outer;
4213 }
4214 while (i->next_peer == NULL);
4215 i = i->next_peer;
4216 }
4217 }
4218 }
4219
4220 /* Initialize unwind_resume_libfunc. */
4221
4222 void
4223 default_init_unwind_resume_libfunc (void)
4224 {
4225 /* The default c++ routines aren't actually c++ specific, so use those. */
4226 unwind_resume_libfunc =
4227 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4228 : "_Unwind_Resume");
4229 }
4230
4231 \f
4232 static bool
4233 gate_handle_eh (void)
4234 {
4235 return doing_eh (0);
4236 }
4237
4238 /* Complete generation of exception handling code. */
4239 static unsigned int
4240 rest_of_handle_eh (void)
4241 {
4242 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4243 finish_eh_generation ();
4244 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4245 return 0;
4246 }
4247
4248 struct rtl_opt_pass pass_rtl_eh =
4249 {
4250 {
4251 RTL_PASS,
4252 "eh", /* name */
4253 gate_handle_eh, /* gate */
4254 rest_of_handle_eh, /* execute */
4255 NULL, /* sub */
4256 NULL, /* next */
4257 0, /* static_pass_number */
4258 TV_JUMP, /* tv_id */
4259 0, /* properties_required */
4260 0, /* properties_provided */
4261 0, /* properties_destroyed */
4262 0, /* todo_flags_start */
4263 TODO_dump_func /* todo_flags_finish */
4264 }
4265 };
4266
4267 #include "gt-except.h"