inclhack.def (hpux_imaginary_i): Remove spaces.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
81
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
87
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
91
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
94
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
97
98 /* A hash table of label to region number. */
99
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region_d *region;
103 };
104
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
108
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
116 \f
117
118 struct GTY(()) call_site_record_d
119 {
120 rtx landing_pad;
121 int action;
122 };
123 \f
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
126
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
137
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
146
147 static void remove_eh_handler (struct eh_region_d *);
148 static void remove_eh_handler_and_replace (struct eh_region_d *,
149 struct eh_region_d *, bool);
150
151 /* The return value of reachable_next_level. */
152 enum reachable_code
153 {
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
162 };
163
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region_d *, tree,
166 struct reachable_info *, bool);
167
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region_d *);
172 static int add_call_site (rtx, int, int);
173
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (int);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (int, int);
181 static void sjlj_output_call_site_table (void);
182
183 \f
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
187
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
190
191 int
192 doing_eh (int do_warn)
193 {
194 if (! flag_exceptions)
195 {
196 static int warned = 0;
197 if (! warned && do_warn)
198 {
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
201 }
202 return 0;
203 }
204 return 1;
205 }
206
207 \f
208 void
209 init_eh (void)
210 {
211 if (! flag_exceptions)
212 return;
213
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
215
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
219 {
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
221
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
223
224 f_prev = build_decl (BUILTINS_LOCATION,
225 FIELD_DECL, get_identifier ("__prev"),
226 build_pointer_type (sjlj_fc_type_node));
227 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
228
229 f_cs = build_decl (BUILTINS_LOCATION,
230 FIELD_DECL, get_identifier ("__call_site"),
231 integer_type_node);
232 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
233
234 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
235 tmp = build_array_type (lang_hooks.types.type_for_mode
236 (targetm.unwind_word_mode (), 1),
237 tmp);
238 f_data = build_decl (BUILTINS_LOCATION,
239 FIELD_DECL, get_identifier ("__data"), tmp);
240 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
241
242 f_per = build_decl (BUILTINS_LOCATION,
243 FIELD_DECL, get_identifier ("__personality"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
246
247 f_lsda = build_decl (BUILTINS_LOCATION,
248 FIELD_DECL, get_identifier ("__lsda"),
249 ptr_type_node);
250 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
251
252 #ifdef DONT_USE_BUILTIN_SETJMP
253 #ifdef JMP_BUF_SIZE
254 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
255 #else
256 /* Should be large enough for most systems, if it is not,
257 JMP_BUF_SIZE should be defined with the proper value. It will
258 also tend to be larger than necessary for most systems, a more
259 optimal port will define JMP_BUF_SIZE. */
260 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
261 #endif
262 #else
263 /* builtin_setjmp takes a pointer to 5 words. */
264 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
265 #endif
266 tmp = build_index_type (tmp);
267 tmp = build_array_type (ptr_type_node, tmp);
268 f_jbuf = build_decl (BUILTINS_LOCATION,
269 FIELD_DECL, get_identifier ("__jbuf"), tmp);
270 #ifdef DONT_USE_BUILTIN_SETJMP
271 /* We don't know what the alignment requirements of the
272 runtime's jmp_buf has. Overestimate. */
273 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
274 DECL_USER_ALIGN (f_jbuf) = 1;
275 #endif
276 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
277
278 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
279 TREE_CHAIN (f_prev) = f_cs;
280 TREE_CHAIN (f_cs) = f_data;
281 TREE_CHAIN (f_data) = f_per;
282 TREE_CHAIN (f_per) = f_lsda;
283 TREE_CHAIN (f_lsda) = f_jbuf;
284
285 layout_type (sjlj_fc_type_node);
286
287 /* Cache the interesting field offsets so that we have
288 easy access from rtl. */
289 sjlj_fc_call_site_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
292 sjlj_fc_data_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
295 sjlj_fc_personality_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
298 sjlj_fc_lsda_ofs
299 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
300 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
301 sjlj_fc_jbuf_ofs
302 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
303 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
304 }
305 }
306
307 void
308 init_eh_for_function (void)
309 {
310 cfun->eh = GGC_CNEW (struct eh_status);
311 }
312 \f
313 /* Routines to generate the exception tree somewhat directly.
314 These are used from tree-eh.c when processing exception related
315 nodes during tree optimization. */
316
317 static struct eh_region_d *
318 gen_eh_region (enum eh_region_type type, struct eh_region_d *outer)
319 {
320 struct eh_region_d *new_eh;
321
322 #ifdef ENABLE_CHECKING
323 gcc_assert (doing_eh (0));
324 #endif
325
326 /* Insert a new blank region as a leaf in the tree. */
327 new_eh = GGC_CNEW (struct eh_region_d);
328 new_eh->type = type;
329 new_eh->outer = outer;
330 if (outer)
331 {
332 new_eh->next_peer = outer->inner;
333 outer->inner = new_eh;
334 }
335 else
336 {
337 new_eh->next_peer = cfun->eh->region_tree;
338 cfun->eh->region_tree = new_eh;
339 }
340
341 new_eh->region_number = ++cfun->eh->last_region_number;
342
343 return new_eh;
344 }
345
346 struct eh_region_d *
347 gen_eh_region_cleanup (struct eh_region_d *outer)
348 {
349 struct eh_region_d *cleanup = gen_eh_region (ERT_CLEANUP, outer);
350 return cleanup;
351 }
352
353 struct eh_region_d *
354 gen_eh_region_try (struct eh_region_d *outer)
355 {
356 return gen_eh_region (ERT_TRY, outer);
357 }
358
359 struct eh_region_d *
360 gen_eh_region_catch (struct eh_region_d *t, tree type_or_list)
361 {
362 struct eh_region_d *c, *l;
363 tree type_list, type_node;
364
365 /* Ensure to always end up with a type list to normalize further
366 processing, then register each type against the runtime types map. */
367 type_list = type_or_list;
368 if (type_or_list)
369 {
370 if (TREE_CODE (type_or_list) != TREE_LIST)
371 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
372
373 type_node = type_list;
374 for (; type_node; type_node = TREE_CHAIN (type_node))
375 add_type_for_runtime (TREE_VALUE (type_node));
376 }
377
378 c = gen_eh_region (ERT_CATCH, t->outer);
379 c->u.eh_catch.type_list = type_list;
380 l = t->u.eh_try.last_catch;
381 c->u.eh_catch.prev_catch = l;
382 if (l)
383 l->u.eh_catch.next_catch = c;
384 else
385 t->u.eh_try.eh_catch = c;
386 t->u.eh_try.last_catch = c;
387
388 return c;
389 }
390
391 struct eh_region_d *
392 gen_eh_region_allowed (struct eh_region_d *outer, tree allowed)
393 {
394 struct eh_region_d *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
395 region->u.allowed.type_list = allowed;
396
397 for (; allowed ; allowed = TREE_CHAIN (allowed))
398 add_type_for_runtime (TREE_VALUE (allowed));
399
400 return region;
401 }
402
403 struct eh_region_d *
404 gen_eh_region_must_not_throw (struct eh_region_d *outer)
405 {
406 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
407 }
408
409 int
410 get_eh_region_number (struct eh_region_d *region)
411 {
412 return region->region_number;
413 }
414
415 bool
416 get_eh_region_may_contain_throw (struct eh_region_d *region)
417 {
418 return region->may_contain_throw;
419 }
420
421 tree
422 get_eh_region_tree_label (struct eh_region_d *region)
423 {
424 return region->tree_label;
425 }
426
427 tree
428 get_eh_region_no_tree_label (int region)
429 {
430 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
431 }
432
433 void
434 set_eh_region_tree_label (struct eh_region_d *region, tree lab)
435 {
436 region->tree_label = lab;
437 }
438 \f
439 void
440 expand_resx_expr (tree exp)
441 {
442 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
443 rtx insn;
444 struct eh_region_d *reg = VEC_index (eh_region,
445 cfun->eh->region_array, region_nr);
446
447 do_pending_stack_adjust ();
448 insn = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
449 if (reg->resume)
450 reg->resume = gen_rtx_INSN_LIST (VOIDmode, insn, reg->resume);
451 else
452 reg->resume = insn;
453 emit_barrier ();
454 }
455
456 /* Note that the current EH region (if any) may contain a throw, or a
457 call to a function which itself may contain a throw. */
458
459 void
460 note_eh_region_may_contain_throw (struct eh_region_d *region)
461 {
462 while (region && !region->may_contain_throw)
463 {
464 region->may_contain_throw = 1;
465 region = region->outer;
466 }
467 }
468
469
470 /* Return an rtl expression for a pointer to the exception object
471 within a handler. */
472
473 rtx
474 get_exception_pointer (void)
475 {
476 if (! crtl->eh.exc_ptr)
477 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
478 return crtl->eh.exc_ptr;
479 }
480
481 /* Return an rtl expression for the exception dispatch filter
482 within a handler. */
483
484 rtx
485 get_exception_filter (void)
486 {
487 if (! crtl->eh.filter)
488 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
489 return crtl->eh.filter;
490 }
491 \f
492 /* This section is for the exception handling specific optimization pass. */
493
494 /* Random access the exception region tree. */
495
496 void
497 collect_eh_region_array (void)
498 {
499 struct eh_region_d *i;
500
501 i = cfun->eh->region_tree;
502 if (! i)
503 return;
504
505 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
506 cfun->eh->last_region_number + 1);
507 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
508
509 while (1)
510 {
511 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
512
513 /* If there are sub-regions, process them. */
514 if (i->inner)
515 i = i->inner;
516 /* If there are peers, process them. */
517 else if (i->next_peer)
518 i = i->next_peer;
519 /* Otherwise, step back up the tree to the next peer. */
520 else
521 {
522 do {
523 i = i->outer;
524 if (i == NULL)
525 return;
526 } while (i->next_peer == NULL);
527 i = i->next_peer;
528 }
529 }
530 }
531
532 /* R is MUST_NOT_THROW region that is not reachable via local
533 RESX instructions. It still must be kept in the tree in case runtime
534 can unwind through it, or we will eliminate out terminate call
535 runtime would do otherwise. Return TRUE if R contains throwing statements
536 or some of the exceptions in inner regions can be unwound up to R.
537
538 CONTAINS_STMT is bitmap of all regions that contains some throwing
539 statements.
540
541 Function looks O(^3) at first sight. In fact the function is called at most
542 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
543 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
544 the outer loop examines every region at most once. The inner loop
545 is doing unwinding from the throwing statement same way as we do during
546 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
547 of CFG. In practice Eh trees are wide, not deep, so this is not
548 a problem. */
549
550 static bool
551 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region_d *r)
552 {
553 struct eh_region_d *i = r->inner;
554 unsigned n;
555 bitmap_iterator bi;
556
557 if (TEST_BIT (contains_stmt, r->region_number))
558 return true;
559 if (r->aka)
560 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
561 if (TEST_BIT (contains_stmt, n))
562 return true;
563 if (!i)
564 return false;
565 while (1)
566 {
567 /* It is pointless to look into MUST_NOT_THROW
568 or dive into subregions. They never unwind up. */
569 if (i->type != ERT_MUST_NOT_THROW)
570 {
571 bool found = TEST_BIT (contains_stmt, i->region_number);
572 if (!found && i->aka)
573 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
574 if (TEST_BIT (contains_stmt, n))
575 {
576 found = true;
577 break;
578 }
579 /* We have nested region that contains throwing statement.
580 See if resuming might lead up to the resx or we get locally
581 caught sooner. If we get locally caught sooner, we either
582 know region R is not reachable or it would have direct edge
583 from the EH resx and thus consider region reachable at
584 firest place. */
585 if (found)
586 {
587 struct eh_region_d *i1 = i;
588 tree type_thrown = NULL_TREE;
589
590 if (i1->type == ERT_THROW)
591 {
592 type_thrown = i1->u.eh_throw.type;
593 i1 = i1->outer;
594 }
595 for (; i1 != r; i1 = i1->outer)
596 if (reachable_next_level (i1, type_thrown, NULL,
597 false) >= RNL_CAUGHT)
598 break;
599 if (i1 == r)
600 return true;
601 }
602 }
603 /* If there are sub-regions, process them. */
604 if (i->type != ERT_MUST_NOT_THROW && i->inner)
605 i = i->inner;
606 /* If there are peers, process them. */
607 else if (i->next_peer)
608 i = i->next_peer;
609 /* Otherwise, step back up the tree to the next peer. */
610 else
611 {
612 do
613 {
614 i = i->outer;
615 if (i == r)
616 return false;
617 }
618 while (i->next_peer == NULL);
619 i = i->next_peer;
620 }
621 }
622 }
623
624 /* Bring region R to the root of tree. */
625
626 static void
627 bring_to_root (struct eh_region_d *r)
628 {
629 struct eh_region_d **pp;
630 struct eh_region_d *outer = r->outer;
631 if (!r->outer)
632 return;
633 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
634 continue;
635 *pp = r->next_peer;
636 r->outer = NULL;
637 r->next_peer = cfun->eh->region_tree;
638 cfun->eh->region_tree = r;
639 }
640
641 /* Return true if region R2 can be replaced by R1. */
642
643 static bool
644 eh_region_replaceable_by_p (const struct eh_region_d *r1,
645 const struct eh_region_d *r2)
646 {
647 /* Regions are semantically same if they are of same type,
648 have same label and type. */
649 if (r1->type != r2->type)
650 return false;
651 if (r1->tree_label != r2->tree_label)
652 return false;
653
654 /* Verify that also region type dependent data are the same. */
655 switch (r1->type)
656 {
657 case ERT_MUST_NOT_THROW:
658 case ERT_CLEANUP:
659 break;
660 case ERT_TRY:
661 {
662 struct eh_region_d *c1, *c2;
663 for (c1 = r1->u.eh_try.eh_catch,
664 c2 = r2->u.eh_try.eh_catch;
665 c1 && c2;
666 c1 = c1->u.eh_catch.next_catch,
667 c2 = c2->u.eh_catch.next_catch)
668 if (!eh_region_replaceable_by_p (c1, c2))
669 return false;
670 if (c1 || c2)
671 return false;
672 }
673 break;
674 case ERT_CATCH:
675 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
676 return false;
677 if (!list_equal_p (r1->u.eh_catch.filter_list,
678 r2->u.eh_catch.filter_list))
679 return false;
680 break;
681 case ERT_ALLOWED_EXCEPTIONS:
682 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
683 return false;
684 if (r1->u.allowed.filter != r2->u.allowed.filter)
685 return false;
686 break;
687 case ERT_THROW:
688 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
689 return false;
690 break;
691 default:
692 gcc_unreachable ();
693 }
694 if (dump_file && (dump_flags & TDF_DETAILS))
695 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
696 r2->region_number);
697 return true;
698 }
699
700 /* Replace region R2 by R1. */
701
702 static void
703 replace_region (struct eh_region_d *r1, struct eh_region_d *r2)
704 {
705 struct eh_region_d *next1 = r1->u.eh_try.eh_catch;
706 struct eh_region_d *next2 = r2->u.eh_try.eh_catch;
707 bool is_try = r1->type == ERT_TRY;
708
709 gcc_assert (r1->type != ERT_CATCH);
710 remove_eh_handler_and_replace (r2, r1, false);
711 if (is_try)
712 {
713 while (next1)
714 {
715 r1 = next1;
716 r2 = next2;
717 gcc_assert (next1->type == ERT_CATCH);
718 gcc_assert (next2->type == ERT_CATCH);
719 next1 = next1->u.eh_catch.next_catch;
720 next2 = next2->u.eh_catch.next_catch;
721 remove_eh_handler_and_replace (r2, r1, false);
722 }
723 }
724 }
725
726 /* Return hash value of type list T. */
727
728 static hashval_t
729 hash_type_list (tree t)
730 {
731 hashval_t val = 0;
732 for (; t; t = TREE_CHAIN (t))
733 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
734 return val;
735 }
736
737 /* Hash EH regions so semantically same regions get same hash value. */
738
739 static hashval_t
740 hash_eh_region (const void *r)
741 {
742 const struct eh_region_d *region = (const struct eh_region_d *) r;
743 hashval_t val = region->type;
744
745 if (region->tree_label)
746 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
747 switch (region->type)
748 {
749 case ERT_MUST_NOT_THROW:
750 case ERT_CLEANUP:
751 break;
752 case ERT_TRY:
753 {
754 struct eh_region_d *c;
755 for (c = region->u.eh_try.eh_catch;
756 c; c = c->u.eh_catch.next_catch)
757 val = iterative_hash_hashval_t (hash_eh_region (c), val);
758 }
759 break;
760 case ERT_CATCH:
761 val = iterative_hash_hashval_t (hash_type_list
762 (region->u.eh_catch.type_list), val);
763 break;
764 case ERT_ALLOWED_EXCEPTIONS:
765 val = iterative_hash_hashval_t
766 (hash_type_list (region->u.allowed.type_list), val);
767 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
768 break;
769 case ERT_THROW:
770 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
771 break;
772 default:
773 gcc_unreachable ();
774 }
775 return val;
776 }
777
778 /* Return true if regions R1 and R2 are equal. */
779
780 static int
781 eh_regions_equal_p (const void *r1, const void *r2)
782 {
783 return eh_region_replaceable_by_p ((const struct eh_region_d *) r1,
784 (const struct eh_region_d *) r2);
785 }
786
787 /* Walk all peers of REGION and try to merge those regions
788 that are semantically equivalent. Look into subregions
789 recursively too. */
790
791 static bool
792 merge_peers (struct eh_region_d *region)
793 {
794 struct eh_region_d *r1, *r2, *outer = NULL, *next;
795 bool merged = false;
796 int num_regions = 0;
797 if (region)
798 outer = region->outer;
799 else
800 return false;
801
802 /* First see if there is inner region equivalent to region
803 in question. EH control flow is acyclic so we know we
804 can merge them. */
805 if (outer)
806 for (r1 = region; r1; r1 = next)
807 {
808 next = r1->next_peer;
809 if (r1->type == ERT_CATCH)
810 continue;
811 if (eh_region_replaceable_by_p (r1->outer, r1))
812 {
813 replace_region (r1->outer, r1);
814 merged = true;
815 }
816 else
817 num_regions ++;
818 }
819
820 /* Get new first region and try to match the peers
821 for equivalence. */
822 if (outer)
823 region = outer->inner;
824 else
825 region = cfun->eh->region_tree;
826
827 /* There are few regions to inspect:
828 N^2 loop matching each region with each region
829 will do the job well. */
830 if (num_regions < 10)
831 {
832 for (r1 = region; r1; r1 = r1->next_peer)
833 {
834 if (r1->type == ERT_CATCH)
835 continue;
836 for (r2 = r1->next_peer; r2; r2 = next)
837 {
838 next = r2->next_peer;
839 if (eh_region_replaceable_by_p (r1, r2))
840 {
841 replace_region (r1, r2);
842 merged = true;
843 }
844 }
845 }
846 }
847 /* Or use hashtable to avoid N^2 behaviour. */
848 else
849 {
850 htab_t hash;
851 hash = htab_create (num_regions, hash_eh_region,
852 eh_regions_equal_p, NULL);
853 for (r1 = region; r1; r1 = next)
854 {
855 void **slot;
856
857 next = r1->next_peer;
858 if (r1->type == ERT_CATCH)
859 continue;
860 slot = htab_find_slot (hash, r1, INSERT);
861 if (!*slot)
862 *slot = r1;
863 else
864 replace_region ((struct eh_region_d *) *slot, r1);
865 }
866 htab_delete (hash);
867 }
868 for (r1 = region; r1; r1 = r1->next_peer)
869 merged |= merge_peers (r1->inner);
870 return merged;
871 }
872
873 /* Remove all regions whose labels are not reachable.
874 REACHABLE is bitmap of all regions that are used by the function
875 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
876
877 void
878 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
879 {
880 int i;
881 struct eh_region_d *r;
882 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
883 struct eh_region_d *local_must_not_throw = NULL;
884 struct eh_region_d *first_must_not_throw = NULL;
885
886 for (i = cfun->eh->last_region_number; i > 0; --i)
887 {
888 r = VEC_index (eh_region, cfun->eh->region_array, i);
889 if (!r || r->region_number != i)
890 continue;
891 if (!TEST_BIT (reachable, i) && !r->resume)
892 {
893 bool kill_it = true;
894
895 r->tree_label = NULL;
896 switch (r->type)
897 {
898 case ERT_THROW:
899 /* Don't remove ERT_THROW regions if their outer region
900 is reachable. */
901 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
902 kill_it = false;
903 break;
904 case ERT_MUST_NOT_THROW:
905 /* MUST_NOT_THROW regions are implementable solely in the
906 runtime, but we need them when inlining function.
907
908 Keep them if outer region is not MUST_NOT_THROW a well
909 and if they contain some statement that might unwind through
910 them. */
911 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
912 && (!contains_stmt
913 || can_be_reached_by_runtime (contains_stmt, r)))
914 kill_it = false;
915 break;
916 case ERT_TRY:
917 {
918 /* TRY regions are reachable if any of its CATCH regions
919 are reachable. */
920 struct eh_region_d *c;
921 for (c = r->u.eh_try.eh_catch; c;
922 c = c->u.eh_catch.next_catch)
923 if (TEST_BIT (reachable, c->region_number))
924 {
925 kill_it = false;
926 break;
927 }
928 break;
929 }
930
931 default:
932 break;
933 }
934
935 if (kill_it)
936 {
937 if (dump_file)
938 fprintf (dump_file, "Removing unreachable eh region %i\n",
939 r->region_number);
940 remove_eh_handler (r);
941 }
942 else if (r->type == ERT_MUST_NOT_THROW)
943 {
944 if (!first_must_not_throw)
945 first_must_not_throw = r;
946 VEC_safe_push (eh_region, heap, must_not_throws, r);
947 }
948 }
949 else
950 if (r->type == ERT_MUST_NOT_THROW)
951 {
952 if (!local_must_not_throw)
953 local_must_not_throw = r;
954 if (r->outer)
955 VEC_safe_push (eh_region, heap, must_not_throws, r);
956 }
957 }
958
959 /* MUST_NOT_THROW regions without local handler are all the same; they
960 trigger terminate call in runtime.
961 MUST_NOT_THROW handled locally can differ in debug info associated
962 to std::terminate () call or if one is coming from Java and other
963 from C++ whether they call terminate or abort.
964
965 We merge all MUST_NOT_THROW regions handled by the run-time into one.
966 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
967 (since unwinding never continues to the outer region anyway).
968 If MUST_NOT_THROW with local handler is present in the tree, we use
969 that region to merge into, since it will remain in tree anyway;
970 otherwise we use first MUST_NOT_THROW.
971
972 Merging of locally handled regions needs changes to the CFG. Crossjumping
973 should take care of this, by looking at the actual code and
974 ensuring that the cleanup actions are really the same. */
975
976 if (local_must_not_throw)
977 first_must_not_throw = local_must_not_throw;
978
979 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
980 {
981 if (!r->label && !r->tree_label && r != first_must_not_throw)
982 {
983 if (dump_file)
984 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
985 r->region_number,
986 first_must_not_throw->region_number);
987 remove_eh_handler_and_replace (r, first_must_not_throw, false);
988 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
989 }
990 else
991 bring_to_root (r);
992 }
993 merge_peers (cfun->eh->region_tree);
994 #ifdef ENABLE_CHECKING
995 verify_eh_tree (cfun);
996 #endif
997 VEC_free (eh_region, heap, must_not_throws);
998 }
999
1000 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
1001 is identical to label. */
1002
1003 VEC (int, heap) *
1004 label_to_region_map (void)
1005 {
1006 VEC (int, heap) * label_to_region = NULL;
1007 int i;
1008 int idx;
1009
1010 VEC_safe_grow_cleared (int, heap, label_to_region,
1011 cfun->cfg->last_label_uid + 1);
1012 for (i = cfun->eh->last_region_number; i > 0; --i)
1013 {
1014 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
1015 if (r && r->region_number == i
1016 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1017 {
1018 if ((idx = VEC_index (int, label_to_region,
1019 LABEL_DECL_UID (r->tree_label))) != 0)
1020 r->next_region_sharing_label =
1021 VEC_index (eh_region, cfun->eh->region_array, idx);
1022 else
1023 r->next_region_sharing_label = NULL;
1024 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1025 i);
1026 }
1027 }
1028 return label_to_region;
1029 }
1030
1031 /* Return number of EH regions. */
1032 int
1033 num_eh_regions (void)
1034 {
1035 return cfun->eh->last_region_number + 1;
1036 }
1037
1038 /* Return next region sharing same label as REGION. */
1039
1040 int
1041 get_next_region_sharing_label (int region)
1042 {
1043 struct eh_region_d *r;
1044 if (!region)
1045 return 0;
1046 r = VEC_index (eh_region, cfun->eh->region_array, region);
1047 if (!r || !r->next_region_sharing_label)
1048 return 0;
1049 return r->next_region_sharing_label->region_number;
1050 }
1051
1052 /* Return bitmap of all labels that are handlers of must not throw regions. */
1053
1054 bitmap
1055 must_not_throw_labels (void)
1056 {
1057 struct eh_region_d *i;
1058 bitmap labels = BITMAP_ALLOC (NULL);
1059
1060 i = cfun->eh->region_tree;
1061 if (! i)
1062 return labels;
1063
1064 while (1)
1065 {
1066 if (i->type == ERT_MUST_NOT_THROW && i->tree_label
1067 && LABEL_DECL_UID (i->tree_label) >= 0)
1068 bitmap_set_bit (labels, LABEL_DECL_UID (i->tree_label));
1069
1070 /* If there are sub-regions, process them. */
1071 if (i->inner)
1072 i = i->inner;
1073 /* If there are peers, process them. */
1074 else if (i->next_peer)
1075 i = i->next_peer;
1076 /* Otherwise, step back up the tree to the next peer. */
1077 else
1078 {
1079 do {
1080 i = i->outer;
1081 if (i == NULL)
1082 return labels;
1083 } while (i->next_peer == NULL);
1084 i = i->next_peer;
1085 }
1086 }
1087 }
1088
1089 /* Set up EH labels for RTL. */
1090
1091 void
1092 convert_from_eh_region_ranges (void)
1093 {
1094 int i, n = cfun->eh->last_region_number;
1095
1096 /* Most of the work is already done at the tree level. All we need to
1097 do is collect the rtl labels that correspond to the tree labels that
1098 collect the rtl labels that correspond to the tree labels
1099 we allocated earlier. */
1100 for (i = 1; i <= n; ++i)
1101 {
1102 struct eh_region_d *region;
1103
1104 region = VEC_index (eh_region, cfun->eh->region_array, i);
1105 if (region && region->tree_label)
1106 region->label = DECL_RTL_IF_SET (region->tree_label);
1107 }
1108 }
1109
1110 void
1111 find_exception_handler_labels (void)
1112 {
1113 int i;
1114
1115 if (cfun->eh->region_tree == NULL)
1116 return;
1117
1118 for (i = cfun->eh->last_region_number; i > 0; --i)
1119 {
1120 struct eh_region_d *region;
1121 rtx lab;
1122
1123 region = VEC_index (eh_region, cfun->eh->region_array, i);
1124 if (! region || region->region_number != i)
1125 continue;
1126 if (crtl->eh.built_landing_pads)
1127 lab = region->landing_pad;
1128 else
1129 lab = region->label;
1130 }
1131 }
1132
1133 /* Returns true if the current function has exception handling regions. */
1134
1135 bool
1136 current_function_has_exception_handlers (void)
1137 {
1138 int i;
1139
1140 for (i = cfun->eh->last_region_number; i > 0; --i)
1141 {
1142 struct eh_region_d *region;
1143
1144 region = VEC_index (eh_region, cfun->eh->region_array, i);
1145 if (region
1146 && region->region_number == i
1147 && region->type != ERT_THROW)
1148 return true;
1149 }
1150
1151 return false;
1152 }
1153 \f
1154 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1155 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1156
1157 static void
1158 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1159 {
1160 int i;
1161
1162 if (o->aka)
1163 {
1164 i = bitmap_first_set_bit (o->aka);
1165 if (i < *min)
1166 *min = i;
1167 i = bitmap_last_set_bit (o->aka);
1168 if (i > *max)
1169 *max = i;
1170 }
1171 if (o->region_number < *min)
1172 *min = o->region_number;
1173 if (o->region_number > *max)
1174 *max = o->region_number;
1175
1176 if (o->inner)
1177 {
1178 o = o->inner;
1179 duplicate_eh_regions_0 (o, min, max);
1180 while (o->next_peer)
1181 {
1182 o = o->next_peer;
1183 duplicate_eh_regions_0 (o, min, max);
1184 }
1185 }
1186 }
1187
1188 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1189 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1190 about the other internal pointers just yet, just the tree-like pointers. */
1191
1192 static eh_region
1193 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1194 {
1195 eh_region ret, n;
1196
1197 ret = n = GGC_NEW (struct eh_region_d);
1198
1199 *n = *old;
1200 n->outer = outer;
1201 n->next_peer = NULL;
1202 if (old->aka)
1203 {
1204 unsigned i;
1205 bitmap_iterator bi;
1206 n->aka = BITMAP_GGC_ALLOC ();
1207
1208 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1209 {
1210 bitmap_set_bit (n->aka, i + eh_offset);
1211 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1212 }
1213 }
1214
1215 n->region_number += eh_offset;
1216 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1217
1218 if (old->inner)
1219 {
1220 old = old->inner;
1221 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1222 while (old->next_peer)
1223 {
1224 old = old->next_peer;
1225 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1226 }
1227 }
1228
1229 return ret;
1230 }
1231
1232 /* Look for first outer region of R (or R itself) that is
1233 TRY region. Return NULL if none. */
1234
1235 static struct eh_region_d *
1236 find_prev_try (struct eh_region_d * r)
1237 {
1238 for (; r && r->type != ERT_TRY; r = r->outer)
1239 if (r->type == ERT_MUST_NOT_THROW
1240 || (r->type == ERT_ALLOWED_EXCEPTIONS
1241 && !r->u.allowed.type_list))
1242 {
1243 r = NULL;
1244 break;
1245 }
1246 return r;
1247 }
1248
1249 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1250 function and root the tree below OUTER_REGION. Remap labels using MAP
1251 callback. The special case of COPY_REGION of 0 means all regions. */
1252
1253 int
1254 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1255 void *data, int copy_region, int outer_region)
1256 {
1257 eh_region cur, outer, *splice;
1258 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1259 int num_regions;
1260
1261 if (!ifun->eh)
1262 return 0;
1263 #ifdef ENABLE_CHECKING
1264 verify_eh_tree (ifun);
1265 #endif
1266
1267 /* Find the range of region numbers to be copied. The interface we
1268 provide here mandates a single offset to find new number from old,
1269 which means we must look at the numbers present, instead of the
1270 count or something else. */
1271 if (copy_region > 0)
1272 {
1273 min_region = INT_MAX;
1274 max_region = 0;
1275
1276 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1277 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1278 }
1279 else
1280 {
1281 min_region = 1;
1282 max_region = ifun->eh->last_region_number;
1283 }
1284 num_regions = max_region - min_region + 1;
1285 cfun_last_region_number = cfun->eh->last_region_number;
1286 eh_offset = cfun_last_region_number + 1 - min_region;
1287
1288 /* If we've not yet created a region array, do so now. */
1289 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1290 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1291 cfun->eh->last_region_number + 1);
1292
1293 /* Locate the spot at which to insert the new tree. */
1294 if (outer_region > 0)
1295 {
1296 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1297 if (outer)
1298 splice = &outer->inner;
1299 else
1300 splice = &cfun->eh->region_tree;
1301 }
1302 else
1303 {
1304 outer = NULL;
1305 splice = &cfun->eh->region_tree;
1306 }
1307 while (*splice)
1308 splice = &(*splice)->next_peer;
1309
1310 if (!ifun->eh->region_tree)
1311 {
1312 if (outer)
1313 for (i = cfun_last_region_number + 1;
1314 i <= cfun->eh->last_region_number; i++)
1315 {
1316 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1317 if (outer->aka == NULL)
1318 outer->aka = BITMAP_GGC_ALLOC ();
1319 bitmap_set_bit (outer->aka, i);
1320 }
1321 return eh_offset;
1322 }
1323
1324 /* Copy all the regions in the subtree. */
1325 if (copy_region > 0)
1326 {
1327 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1328 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1329 }
1330 else
1331 {
1332 eh_region n;
1333
1334 cur = ifun->eh->region_tree;
1335 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1336 while (cur->next_peer)
1337 {
1338 cur = cur->next_peer;
1339 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1340 }
1341 }
1342
1343 /* Remap all the labels in the new regions. */
1344 for (i = cfun_last_region_number + 1;
1345 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1346 if (cur && cur->tree_label)
1347 cur->tree_label = map (cur->tree_label, data);
1348
1349 /* Remap all of the internal catch and cleanup linkages. Since we
1350 duplicate entire subtrees, all of the referenced regions will have
1351 been copied too. And since we renumbered them as a block, a simple
1352 bit of arithmetic finds us the index for the replacement region. */
1353 for (i = cfun_last_region_number + 1;
1354 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1355 {
1356 /* All removed EH that is toplevel in input function is now
1357 in outer EH of output function. */
1358 if (cur == NULL)
1359 {
1360 gcc_assert (VEC_index
1361 (eh_region, ifun->eh->region_array,
1362 i - eh_offset) == NULL);
1363 if (outer)
1364 {
1365 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1366 if (outer->aka == NULL)
1367 outer->aka = BITMAP_GGC_ALLOC ();
1368 bitmap_set_bit (outer->aka, i);
1369 }
1370 continue;
1371 }
1372 if (i != cur->region_number)
1373 continue;
1374
1375 #define REMAP(REG) \
1376 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1377 (REG)->region_number + eh_offset)
1378
1379 switch (cur->type)
1380 {
1381 case ERT_TRY:
1382 if (cur->u.eh_try.eh_catch)
1383 REMAP (cur->u.eh_try.eh_catch);
1384 if (cur->u.eh_try.last_catch)
1385 REMAP (cur->u.eh_try.last_catch);
1386 break;
1387
1388 case ERT_CATCH:
1389 if (cur->u.eh_catch.next_catch)
1390 REMAP (cur->u.eh_catch.next_catch);
1391 if (cur->u.eh_catch.prev_catch)
1392 REMAP (cur->u.eh_catch.prev_catch);
1393 break;
1394
1395 default:
1396 break;
1397 }
1398
1399 #undef REMAP
1400 }
1401 #ifdef ENABLE_CHECKING
1402 verify_eh_tree (cfun);
1403 #endif
1404
1405 return eh_offset;
1406 }
1407
1408 /* Return new copy of eh region OLD inside region NEW_OUTER.
1409 Do not care about updating the tree otherwise. */
1410
1411 static struct eh_region_d *
1412 copy_eh_region_1 (struct eh_region_d *old, struct eh_region_d *new_outer)
1413 {
1414 struct eh_region_d *new_eh = gen_eh_region (old->type, new_outer);
1415 new_eh->u = old->u;
1416 new_eh->tree_label = old->tree_label;
1417 new_eh->may_contain_throw = old->may_contain_throw;
1418 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1419 cfun->eh->last_region_number + 1);
1420 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1422 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1423 return new_eh;
1424 }
1425
1426 /* Return new copy of eh region OLD inside region NEW_OUTER.
1427
1428 Copy whole catch-try chain if neccesary. */
1429
1430 static struct eh_region_d *
1431 copy_eh_region (struct eh_region_d *old, struct eh_region_d *new_outer)
1432 {
1433 struct eh_region_d *r, *n, *old_try, *new_try, *ret = NULL;
1434 VEC(eh_region,heap) *catch_list = NULL;
1435
1436 if (old->type != ERT_CATCH)
1437 {
1438 gcc_assert (old->type != ERT_TRY);
1439 r = copy_eh_region_1 (old, new_outer);
1440 return r;
1441 }
1442
1443 /* Locate and copy corresponding TRY. */
1444 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1445 continue;
1446 gcc_assert (old_try->type == ERT_TRY);
1447 new_try = gen_eh_region_try (new_outer);
1448 new_try->tree_label = old_try->tree_label;
1449 new_try->may_contain_throw = old_try->may_contain_throw;
1450 if (dump_file && (dump_flags & TDF_DETAILS))
1451 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1452 old_try->region_number, new_try->region_number);
1453 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1454 cfun->eh->last_region_number + 1);
1455 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1456
1457 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1458 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1459 VEC_safe_push (eh_region, heap, catch_list, r);
1460
1461 while (VEC_length (eh_region, catch_list))
1462 {
1463 r = VEC_pop (eh_region, catch_list);
1464
1465 /* Duplicate CATCH. */
1466 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1467 n->tree_label = r->tree_label;
1468 n->may_contain_throw = r->may_contain_throw;
1469 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1470 cfun->eh->last_region_number + 1);
1471 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1472 n->tree_label = r->tree_label;
1473
1474 if (dump_file && (dump_flags & TDF_DETAILS))
1475 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1476 r->region_number, n->region_number);
1477 if (r == old)
1478 ret = n;
1479 }
1480 VEC_free (eh_region, heap, catch_list);
1481 gcc_assert (ret);
1482 return ret;
1483 }
1484
1485 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1486
1487 static void
1488 push_reachable_handler (struct eh_region_d *region, void *data)
1489 {
1490 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1491 VEC_safe_push (eh_region, heap, *trace, region);
1492 }
1493
1494 /* Redirect EH edge E that to NEW_DEST_LABEL.
1495 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1496 foreach_reachable_handler. */
1497
1498 struct eh_region_d *
1499 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1500 bool inlinable_call, int region_number)
1501 {
1502 struct eh_region_d *outer;
1503 struct eh_region_d *region;
1504 VEC (eh_region, heap) * trace = NULL;
1505 int i;
1506 int start_here = -1;
1507 basic_block old_bb = e->dest;
1508 struct eh_region_d *old, *r = NULL;
1509 bool update_inplace = true;
1510 edge_iterator ei;
1511 edge e2;
1512
1513 /* If there is only one EH edge, we don't need to duplicate;
1514 just update labels in the tree. */
1515 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1516 if ((e2->flags & EDGE_EH) && e2 != e)
1517 {
1518 update_inplace = false;
1519 break;
1520 }
1521
1522 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1523 gcc_assert (region);
1524
1525 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1526 push_reachable_handler, &trace);
1527 if (dump_file && (dump_flags & TDF_DETAILS))
1528 {
1529 dump_eh_tree (dump_file, cfun);
1530 fprintf (dump_file, "Trace: ");
1531 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1532 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1533 fprintf (dump_file, " inplace: %i\n", update_inplace);
1534 }
1535
1536 if (update_inplace)
1537 {
1538 /* In easy route just walk trace and update all occurences of the label. */
1539 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1540 {
1541 r = VEC_index (eh_region, trace, i);
1542 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1543 {
1544 r->tree_label = new_dest_label;
1545 if (dump_file && (dump_flags & TDF_DETAILS))
1546 fprintf (dump_file, "Updating label for region %i\n",
1547 r->region_number);
1548 }
1549 }
1550 r = region;
1551 }
1552 else
1553 {
1554 /* Now look for outermost handler that reffers to the basic block in question.
1555 We start our duplication there. */
1556 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1557 {
1558 r = VEC_index (eh_region, trace, i);
1559 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1560 start_here = i;
1561 }
1562 outer = VEC_index (eh_region, trace, start_here)->outer;
1563 gcc_assert (start_here >= 0);
1564
1565 /* And now do the dirty job! */
1566 for (i = start_here; i >= 0; i--)
1567 {
1568 old = VEC_index (eh_region, trace, i);
1569 gcc_assert (!outer || old->outer != outer->outer);
1570
1571 /* Copy region and update label. */
1572 r = copy_eh_region (old, outer);
1573 VEC_replace (eh_region, trace, i, r);
1574 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1575 {
1576 r->tree_label = new_dest_label;
1577 if (dump_file && (dump_flags & TDF_DETAILS))
1578 fprintf (dump_file, "Updating label for region %i\n",
1579 r->region_number);
1580 }
1581
1582 /* We got into copying CATCH. copy_eh_region already did job
1583 of copying all catch blocks corresponding to the try. Now
1584 we need to update labels in all of them and see trace.
1585
1586 We continue nesting into TRY region corresponding to CATCH:
1587 When duplicating EH tree contaiing subregions of CATCH,
1588 the CATCH region itself is never inserted to trace so we
1589 never get here anyway. */
1590 if (r->type == ERT_CATCH)
1591 {
1592 /* Walk other catch regions we copied and update labels as needed. */
1593 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1594 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1595 {
1596 r->tree_label = new_dest_label;
1597 if (dump_file && (dump_flags & TDF_DETAILS))
1598 fprintf (dump_file, "Updating label for region %i\n",
1599 r->region_number);
1600 }
1601 gcc_assert (r->type == ERT_TRY);
1602
1603 /* Skip sibling catch regions from the trace.
1604 They are already updated. */
1605 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1606 {
1607 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1608 i--;
1609 }
1610 }
1611
1612 outer = r;
1613 }
1614
1615 if (is_resx || region->type == ERT_THROW)
1616 r = copy_eh_region (region, outer);
1617 }
1618
1619 VEC_free (eh_region, heap, trace);
1620 if (dump_file && (dump_flags & TDF_DETAILS))
1621 {
1622 dump_eh_tree (dump_file, cfun);
1623 fprintf (dump_file, "New region: %i\n", r->region_number);
1624 }
1625 return r;
1626 }
1627
1628 /* Return region number of region that is outer to both if REGION_A and
1629 REGION_B in IFUN. */
1630
1631 int
1632 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1633 {
1634 struct eh_region_d *rp_a, *rp_b;
1635 sbitmap b_outer;
1636
1637 gcc_assert (ifun->eh->last_region_number > 0);
1638 gcc_assert (ifun->eh->region_tree);
1639
1640 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1641 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1642 gcc_assert (rp_a != NULL);
1643 gcc_assert (rp_b != NULL);
1644
1645 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1646 sbitmap_zero (b_outer);
1647
1648 do
1649 {
1650 SET_BIT (b_outer, rp_b->region_number);
1651 rp_b = rp_b->outer;
1652 }
1653 while (rp_b);
1654
1655 do
1656 {
1657 if (TEST_BIT (b_outer, rp_a->region_number))
1658 {
1659 sbitmap_free (b_outer);
1660 return rp_a->region_number;
1661 }
1662 rp_a = rp_a->outer;
1663 }
1664 while (rp_a);
1665
1666 sbitmap_free (b_outer);
1667 return -1;
1668 }
1669 \f
1670 static int
1671 t2r_eq (const void *pentry, const void *pdata)
1672 {
1673 const_tree const entry = (const_tree) pentry;
1674 const_tree const data = (const_tree) pdata;
1675
1676 return TREE_PURPOSE (entry) == data;
1677 }
1678
1679 static hashval_t
1680 t2r_hash (const void *pentry)
1681 {
1682 const_tree const entry = (const_tree) pentry;
1683 return TREE_HASH (TREE_PURPOSE (entry));
1684 }
1685
1686 void
1687 add_type_for_runtime (tree type)
1688 {
1689 tree *slot;
1690
1691 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1692 TREE_HASH (type), INSERT);
1693 if (*slot == NULL)
1694 {
1695 tree runtime = (*lang_eh_runtime_type) (type);
1696 *slot = tree_cons (type, runtime, NULL_TREE);
1697 }
1698 }
1699
1700 tree
1701 lookup_type_for_runtime (tree type)
1702 {
1703 tree *slot;
1704
1705 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1706 TREE_HASH (type), NO_INSERT);
1707
1708 /* We should have always inserted the data earlier. */
1709 return TREE_VALUE (*slot);
1710 }
1711
1712 \f
1713 /* Represent an entry in @TTypes for either catch actions
1714 or exception filter actions. */
1715 struct GTY(()) ttypes_filter {
1716 tree t;
1717 int filter;
1718 };
1719
1720 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1721 (a tree) for a @TTypes type node we are thinking about adding. */
1722
1723 static int
1724 ttypes_filter_eq (const void *pentry, const void *pdata)
1725 {
1726 const struct ttypes_filter *const entry
1727 = (const struct ttypes_filter *) pentry;
1728 const_tree const data = (const_tree) pdata;
1729
1730 return entry->t == data;
1731 }
1732
1733 static hashval_t
1734 ttypes_filter_hash (const void *pentry)
1735 {
1736 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1737 return TREE_HASH (entry->t);
1738 }
1739
1740 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1741 exception specification list we are thinking about adding. */
1742 /* ??? Currently we use the type lists in the order given. Someone
1743 should put these in some canonical order. */
1744
1745 static int
1746 ehspec_filter_eq (const void *pentry, const void *pdata)
1747 {
1748 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1749 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1750
1751 return type_list_equal (entry->t, data->t);
1752 }
1753
1754 /* Hash function for exception specification lists. */
1755
1756 static hashval_t
1757 ehspec_filter_hash (const void *pentry)
1758 {
1759 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1760 hashval_t h = 0;
1761 tree list;
1762
1763 for (list = entry->t; list ; list = TREE_CHAIN (list))
1764 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1765 return h;
1766 }
1767
1768 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1769 to speed up the search. Return the filter value to be used. */
1770
1771 static int
1772 add_ttypes_entry (htab_t ttypes_hash, tree type)
1773 {
1774 struct ttypes_filter **slot, *n;
1775
1776 slot = (struct ttypes_filter **)
1777 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1778
1779 if ((n = *slot) == NULL)
1780 {
1781 /* Filter value is a 1 based table index. */
1782
1783 n = XNEW (struct ttypes_filter);
1784 n->t = type;
1785 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1786 *slot = n;
1787
1788 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1789 }
1790
1791 return n->filter;
1792 }
1793
1794 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1795 to speed up the search. Return the filter value to be used. */
1796
1797 static int
1798 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1799 {
1800 struct ttypes_filter **slot, *n;
1801 struct ttypes_filter dummy;
1802
1803 dummy.t = list;
1804 slot = (struct ttypes_filter **)
1805 htab_find_slot (ehspec_hash, &dummy, INSERT);
1806
1807 if ((n = *slot) == NULL)
1808 {
1809 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1810
1811 n = XNEW (struct ttypes_filter);
1812 n->t = list;
1813 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1814 *slot = n;
1815
1816 /* Generate a 0 terminated list of filter values. */
1817 for (; list ; list = TREE_CHAIN (list))
1818 {
1819 if (targetm.arm_eabi_unwinder)
1820 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1821 else
1822 {
1823 /* Look up each type in the list and encode its filter
1824 value as a uleb128. */
1825 push_uleb128 (&crtl->eh.ehspec_data,
1826 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1827 }
1828 }
1829 if (targetm.arm_eabi_unwinder)
1830 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1831 else
1832 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1833 }
1834
1835 return n->filter;
1836 }
1837
1838 /* Generate the action filter values to be used for CATCH and
1839 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1840 we use lots of landing pads, and so every type or list can share
1841 the same filter value, which saves table space. */
1842
1843 static void
1844 assign_filter_values (void)
1845 {
1846 int i;
1847 htab_t ttypes, ehspec;
1848
1849 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1850 if (targetm.arm_eabi_unwinder)
1851 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1852 else
1853 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1854
1855 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1856 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1857
1858 for (i = cfun->eh->last_region_number; i > 0; --i)
1859 {
1860 struct eh_region_d *r;
1861
1862 r = VEC_index (eh_region, cfun->eh->region_array, i);
1863
1864 /* Mind we don't process a region more than once. */
1865 if (!r || r->region_number != i)
1866 continue;
1867
1868 switch (r->type)
1869 {
1870 case ERT_CATCH:
1871 /* Whatever type_list is (NULL or true list), we build a list
1872 of filters for the region. */
1873 r->u.eh_catch.filter_list = NULL_TREE;
1874
1875 if (r->u.eh_catch.type_list != NULL)
1876 {
1877 /* Get a filter value for each of the types caught and store
1878 them in the region's dedicated list. */
1879 tree tp_node = r->u.eh_catch.type_list;
1880
1881 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1882 {
1883 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1884 tree flt_node = build_int_cst (NULL_TREE, flt);
1885
1886 r->u.eh_catch.filter_list
1887 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1888 }
1889 }
1890 else
1891 {
1892 /* Get a filter value for the NULL list also since it will need
1893 an action record anyway. */
1894 int flt = add_ttypes_entry (ttypes, NULL);
1895 tree flt_node = build_int_cst (NULL_TREE, flt);
1896
1897 r->u.eh_catch.filter_list
1898 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1899 }
1900
1901 break;
1902
1903 case ERT_ALLOWED_EXCEPTIONS:
1904 r->u.allowed.filter
1905 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1906 break;
1907
1908 default:
1909 break;
1910 }
1911 }
1912
1913 htab_delete (ttypes);
1914 htab_delete (ehspec);
1915 }
1916
1917 /* Emit SEQ into basic block just before INSN (that is assumed to be
1918 first instruction of some existing BB and return the newly
1919 produced block. */
1920 static basic_block
1921 emit_to_new_bb_before (rtx seq, rtx insn)
1922 {
1923 rtx last;
1924 basic_block bb;
1925 edge e;
1926 edge_iterator ei;
1927
1928 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1929 call), we don't want it to go into newly created landing pad or other EH
1930 construct. */
1931 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1932 if (e->flags & EDGE_FALLTHRU)
1933 force_nonfallthru (e);
1934 else
1935 ei_next (&ei);
1936 last = emit_insn_before (seq, insn);
1937 if (BARRIER_P (last))
1938 last = PREV_INSN (last);
1939 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1940 update_bb_for_insn (bb);
1941 bb->flags |= BB_SUPERBLOCK;
1942 return bb;
1943 }
1944
1945 /* Generate the code to actually handle exceptions, which will follow the
1946 landing pads. */
1947
1948 static void
1949 build_post_landing_pads (void)
1950 {
1951 int i;
1952
1953 for (i = cfun->eh->last_region_number; i > 0; --i)
1954 {
1955 struct eh_region_d *region;
1956 rtx seq;
1957
1958 region = VEC_index (eh_region, cfun->eh->region_array, i);
1959 /* Mind we don't process a region more than once. */
1960 if (!region || region->region_number != i)
1961 continue;
1962
1963 switch (region->type)
1964 {
1965 case ERT_TRY:
1966 /* It is possible that TRY region is kept alive only because some of
1967 contained catch region still have RESX instruction but they are
1968 reached via their copies. In this case we need to do nothing. */
1969 if (!region->u.eh_try.eh_catch->label)
1970 break;
1971
1972 /* ??? Collect the set of all non-overlapping catch handlers
1973 all the way up the chain until blocked by a cleanup. */
1974 /* ??? Outer try regions can share landing pads with inner
1975 try regions if the types are completely non-overlapping,
1976 and there are no intervening cleanups. */
1977
1978 region->post_landing_pad = gen_label_rtx ();
1979
1980 start_sequence ();
1981
1982 emit_label (region->post_landing_pad);
1983
1984 /* ??? It is mighty inconvenient to call back into the
1985 switch statement generation code in expand_end_case.
1986 Rapid prototyping sez a sequence of ifs. */
1987 {
1988 struct eh_region_d *c;
1989 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1990 {
1991 if (c->u.eh_catch.type_list == NULL)
1992 emit_jump (c->label);
1993 else
1994 {
1995 /* Need for one cmp/jump per type caught. Each type
1996 list entry has a matching entry in the filter list
1997 (see assign_filter_values). */
1998 tree tp_node = c->u.eh_catch.type_list;
1999 tree flt_node = c->u.eh_catch.filter_list;
2000
2001 for (; tp_node; )
2002 {
2003 emit_cmp_and_jump_insns
2004 (crtl->eh.filter,
2005 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
2006 EQ, NULL_RTX,
2007 targetm.eh_return_filter_mode (), 0, c->label);
2008
2009 tp_node = TREE_CHAIN (tp_node);
2010 flt_node = TREE_CHAIN (flt_node);
2011 }
2012 }
2013 }
2014 }
2015
2016 /* We delay the generation of the _Unwind_Resume until we generate
2017 landing pads. We emit a marker here so as to get good control
2018 flow data in the meantime. */
2019 gcc_assert (!region->resume);
2020 region->resume
2021 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2022 emit_barrier ();
2023
2024 seq = get_insns ();
2025 end_sequence ();
2026
2027 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
2028
2029 break;
2030
2031 case ERT_ALLOWED_EXCEPTIONS:
2032 if (!region->label)
2033 break;
2034 region->post_landing_pad = gen_label_rtx ();
2035
2036 start_sequence ();
2037
2038 emit_label (region->post_landing_pad);
2039
2040 emit_cmp_and_jump_insns (crtl->eh.filter,
2041 GEN_INT (region->u.allowed.filter),
2042 EQ, NULL_RTX,
2043 targetm.eh_return_filter_mode (), 0, region->label);
2044
2045 /* We delay the generation of the _Unwind_Resume until we generate
2046 landing pads. We emit a marker here so as to get good control
2047 flow data in the meantime. */
2048 gcc_assert (!region->resume);
2049 region->resume
2050 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2051 emit_barrier ();
2052
2053 seq = get_insns ();
2054 end_sequence ();
2055
2056 emit_to_new_bb_before (seq, region->label);
2057 break;
2058
2059 case ERT_CLEANUP:
2060 case ERT_MUST_NOT_THROW:
2061 region->post_landing_pad = region->label;
2062 break;
2063
2064 case ERT_CATCH:
2065 case ERT_THROW:
2066 /* Nothing to do. */
2067 break;
2068
2069 default:
2070 gcc_unreachable ();
2071 }
2072 }
2073 }
2074
2075 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2076 _Unwind_Resume otherwise. */
2077
2078 static void
2079 connect_post_landing_pads (void)
2080 {
2081 int i;
2082
2083 for (i = cfun->eh->last_region_number; i > 0; --i)
2084 {
2085 struct eh_region_d *region;
2086 struct eh_region_d *outer;
2087 rtx seq;
2088 rtx barrier;
2089 rtx resume_list;
2090
2091 region = VEC_index (eh_region, cfun->eh->region_array, i);
2092 /* Mind we don't process a region more than once. */
2093 if (!region || region->region_number != i)
2094 continue;
2095
2096 /* If there is no RESX, or it has been deleted by flow, there's
2097 nothing to fix up. */
2098 if (! region->resume)
2099 continue;
2100
2101 /* Search for another landing pad in this function. */
2102 for (outer = region->outer; outer ; outer = outer->outer)
2103 if (outer->post_landing_pad)
2104 break;
2105
2106 for (resume_list = region->resume; resume_list;
2107 resume_list = (GET_CODE (resume_list) == INSN_LIST
2108 ? XEXP (resume_list, 1) : NULL_RTX))
2109 {
2110 rtx resume = (GET_CODE (resume_list) == INSN_LIST
2111 ? XEXP (resume_list, 0) : resume_list);
2112 if (INSN_DELETED_P (resume))
2113 continue;
2114 start_sequence ();
2115
2116 if (outer)
2117 {
2118 edge e;
2119 basic_block src, dest;
2120
2121 emit_jump (outer->post_landing_pad);
2122 src = BLOCK_FOR_INSN (resume);
2123 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2124 while (EDGE_COUNT (src->succs) > 0)
2125 remove_edge (EDGE_SUCC (src, 0));
2126 e = make_edge (src, dest, 0);
2127 e->probability = REG_BR_PROB_BASE;
2128 e->count = src->count;
2129 }
2130 else
2131 {
2132 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2133 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2134
2135 /* What we just emitted was a throwing libcall, so it got a
2136 barrier automatically added after it. If the last insn in
2137 the libcall sequence isn't the barrier, it's because the
2138 target emits multiple insns for a call, and there are insns
2139 after the actual call insn (which are redundant and would be
2140 optimized away). The barrier is inserted exactly after the
2141 call insn, so let's go get that and delete the insns after
2142 it, because below we need the barrier to be the last insn in
2143 the sequence. */
2144 delete_insns_since (NEXT_INSN (last_call_insn ()));
2145 }
2146
2147 seq = get_insns ();
2148 end_sequence ();
2149 barrier = emit_insn_before (seq, resume);
2150 /* Avoid duplicate barrier. */
2151 gcc_assert (BARRIER_P (barrier));
2152 delete_insn (barrier);
2153 delete_insn (resume);
2154 }
2155
2156 /* ??? From tree-ssa we can wind up with catch regions whose
2157 label is not instantiated, but whose resx is present. Now
2158 that we've dealt with the resx, kill the region. */
2159 if (region->label == NULL && region->type == ERT_CLEANUP)
2160 remove_eh_handler (region);
2161 }
2162 }
2163
2164 \f
2165 static void
2166 dw2_build_landing_pads (void)
2167 {
2168 int i;
2169
2170 for (i = cfun->eh->last_region_number; i > 0; --i)
2171 {
2172 struct eh_region_d *region;
2173 rtx seq;
2174 basic_block bb;
2175 edge e;
2176
2177 region = VEC_index (eh_region, cfun->eh->region_array, i);
2178 /* Mind we don't process a region more than once. */
2179 if (!region || region->region_number != i)
2180 continue;
2181
2182 if (region->type != ERT_CLEANUP
2183 && region->type != ERT_TRY
2184 && region->type != ERT_ALLOWED_EXCEPTIONS)
2185 continue;
2186
2187 if (!region->post_landing_pad)
2188 continue;
2189
2190 start_sequence ();
2191
2192 region->landing_pad = gen_label_rtx ();
2193 emit_label (region->landing_pad);
2194
2195 #ifdef HAVE_exception_receiver
2196 if (HAVE_exception_receiver)
2197 emit_insn (gen_exception_receiver ());
2198 else
2199 #endif
2200 #ifdef HAVE_nonlocal_goto_receiver
2201 if (HAVE_nonlocal_goto_receiver)
2202 emit_insn (gen_nonlocal_goto_receiver ());
2203 else
2204 #endif
2205 { /* Nothing */ }
2206
2207 emit_move_insn (crtl->eh.exc_ptr,
2208 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2209 emit_move_insn (crtl->eh.filter,
2210 gen_rtx_REG (targetm.eh_return_filter_mode (),
2211 EH_RETURN_DATA_REGNO (1)));
2212
2213 seq = get_insns ();
2214 end_sequence ();
2215
2216 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2217 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2218 e->count = bb->count;
2219 e->probability = REG_BR_PROB_BASE;
2220 }
2221 }
2222
2223 \f
2224 struct sjlj_lp_info
2225 {
2226 int directly_reachable;
2227 int action_index;
2228 int dispatch_index;
2229 int call_site_index;
2230 };
2231
2232 static bool
2233 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2234 {
2235 rtx insn;
2236 bool found_one = false;
2237
2238 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2239 {
2240 struct eh_region_d *region;
2241 enum reachable_code rc;
2242 tree type_thrown;
2243 rtx note;
2244
2245 if (! INSN_P (insn))
2246 continue;
2247
2248 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2249 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2250 continue;
2251
2252 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2253 if (!region)
2254 continue;
2255
2256 type_thrown = NULL_TREE;
2257 if (region->type == ERT_THROW)
2258 {
2259 type_thrown = region->u.eh_throw.type;
2260 region = region->outer;
2261 }
2262
2263 /* Find the first containing region that might handle the exception.
2264 That's the landing pad to which we will transfer control. */
2265 rc = RNL_NOT_CAUGHT;
2266 for (; region; region = region->outer)
2267 {
2268 rc = reachable_next_level (region, type_thrown, NULL, false);
2269 if (rc != RNL_NOT_CAUGHT)
2270 break;
2271 }
2272 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2273 {
2274 lp_info[region->region_number].directly_reachable = 1;
2275 found_one = true;
2276 }
2277 }
2278
2279 return found_one;
2280 }
2281
2282 static void
2283 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2284 {
2285 htab_t ar_hash;
2286 int i, index;
2287
2288 /* First task: build the action table. */
2289
2290 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2291 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2292
2293 for (i = cfun->eh->last_region_number; i > 0; --i)
2294 if (lp_info[i].directly_reachable)
2295 {
2296 struct eh_region_d *r =
2297 VEC_index (eh_region, cfun->eh->region_array, i);
2298
2299 r->landing_pad = dispatch_label;
2300 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2301 if (lp_info[i].action_index != -1)
2302 crtl->uses_eh_lsda = 1;
2303 }
2304
2305 htab_delete (ar_hash);
2306
2307 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2308 landing pad label for the region. For sjlj though, there is one
2309 common landing pad from which we dispatch to the post-landing pads.
2310
2311 A region receives a dispatch index if it is directly reachable
2312 and requires in-function processing. Regions that share post-landing
2313 pads may share dispatch indices. */
2314 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2315 (see build_post_landing_pads) so we don't bother checking for it. */
2316
2317 index = 0;
2318 for (i = cfun->eh->last_region_number; i > 0; --i)
2319 if (lp_info[i].directly_reachable)
2320 lp_info[i].dispatch_index = index++;
2321
2322 /* Finally: assign call-site values. If dwarf2 terms, this would be
2323 the region number assigned by convert_to_eh_region_ranges, but
2324 handles no-action and must-not-throw differently. */
2325
2326 call_site_base = 1;
2327 for (i = cfun->eh->last_region_number; i > 0; --i)
2328 if (lp_info[i].directly_reachable)
2329 {
2330 int action = lp_info[i].action_index;
2331
2332 /* Map must-not-throw to otherwise unused call-site index 0. */
2333 if (action == -2)
2334 index = 0;
2335 /* Map no-action to otherwise unused call-site index -1. */
2336 else if (action == -1)
2337 index = -1;
2338 /* Otherwise, look it up in the table. */
2339 else
2340 index = add_call_site (GEN_INT (lp_info[i].dispatch_index),
2341 action, 0);
2342
2343 lp_info[i].call_site_index = index;
2344 }
2345 }
2346
2347 static void
2348 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2349 {
2350 int last_call_site = -2;
2351 rtx insn, mem;
2352
2353 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2354 {
2355 struct eh_region_d *region;
2356 int this_call_site;
2357 rtx note, before, p;
2358
2359 /* Reset value tracking at extended basic block boundaries. */
2360 if (LABEL_P (insn))
2361 last_call_site = -2;
2362
2363 if (! INSN_P (insn))
2364 continue;
2365
2366 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2367
2368 /* Calls that are known to not throw need not be marked. */
2369 if (note && INTVAL (XEXP (note, 0)) <= 0)
2370 continue;
2371
2372 if (note)
2373 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2374 else
2375 region = NULL;
2376
2377 if (!region)
2378 {
2379 /* Calls (and trapping insns) without notes are outside any
2380 exception handling region in this function. Mark them as
2381 no action. */
2382 if (CALL_P (insn)
2383 || (flag_non_call_exceptions
2384 && may_trap_p (PATTERN (insn))))
2385 this_call_site = -1;
2386 else
2387 continue;
2388 }
2389 else
2390 this_call_site = lp_info[region->region_number].call_site_index;
2391
2392 if (this_call_site == last_call_site)
2393 continue;
2394
2395 /* Don't separate a call from it's argument loads. */
2396 before = insn;
2397 if (CALL_P (insn))
2398 before = find_first_parameter_load (insn, NULL_RTX);
2399
2400 start_sequence ();
2401 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2402 sjlj_fc_call_site_ofs);
2403 emit_move_insn (mem, GEN_INT (this_call_site));
2404 p = get_insns ();
2405 end_sequence ();
2406
2407 emit_insn_before (p, before);
2408 last_call_site = this_call_site;
2409 }
2410 }
2411
2412 /* Construct the SjLj_Function_Context. */
2413
2414 static void
2415 sjlj_emit_function_enter (rtx dispatch_label)
2416 {
2417 rtx fn_begin, fc, mem, seq;
2418 bool fn_begin_outside_block;
2419
2420 fc = crtl->eh.sjlj_fc;
2421
2422 start_sequence ();
2423
2424 /* We're storing this libcall's address into memory instead of
2425 calling it directly. Thus, we must call assemble_external_libcall
2426 here, as we can not depend on emit_library_call to do it for us. */
2427 assemble_external_libcall (eh_personality_libfunc);
2428 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2429 emit_move_insn (mem, eh_personality_libfunc);
2430
2431 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2432 if (crtl->uses_eh_lsda)
2433 {
2434 char buf[20];
2435 rtx sym;
2436
2437 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2438 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2439 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2440 emit_move_insn (mem, sym);
2441 }
2442 else
2443 emit_move_insn (mem, const0_rtx);
2444
2445 #ifdef DONT_USE_BUILTIN_SETJMP
2446 {
2447 rtx x;
2448 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2449 TYPE_MODE (integer_type_node), 1,
2450 plus_constant (XEXP (fc, 0),
2451 sjlj_fc_jbuf_ofs), Pmode);
2452
2453 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2454 TYPE_MODE (integer_type_node), 0, dispatch_label);
2455 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2456 }
2457 #else
2458 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2459 dispatch_label);
2460 #endif
2461
2462 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2463 1, XEXP (fc, 0), Pmode);
2464
2465 seq = get_insns ();
2466 end_sequence ();
2467
2468 /* ??? Instead of doing this at the beginning of the function,
2469 do this in a block that is at loop level 0 and dominates all
2470 can_throw_internal instructions. */
2471
2472 fn_begin_outside_block = true;
2473 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2474 if (NOTE_P (fn_begin))
2475 {
2476 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2477 break;
2478 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2479 fn_begin_outside_block = false;
2480 }
2481
2482 if (fn_begin_outside_block)
2483 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2484 else
2485 emit_insn_after (seq, fn_begin);
2486 }
2487
2488 /* Call back from expand_function_end to know where we should put
2489 the call to unwind_sjlj_unregister_libfunc if needed. */
2490
2491 void
2492 sjlj_emit_function_exit_after (rtx after)
2493 {
2494 crtl->eh.sjlj_exit_after = after;
2495 }
2496
2497 static void
2498 sjlj_emit_function_exit (void)
2499 {
2500 rtx seq, insn;
2501
2502 start_sequence ();
2503
2504 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2505 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2506
2507 seq = get_insns ();
2508 end_sequence ();
2509
2510 /* ??? Really this can be done in any block at loop level 0 that
2511 post-dominates all can_throw_internal instructions. This is
2512 the last possible moment. */
2513
2514 insn = crtl->eh.sjlj_exit_after;
2515 if (LABEL_P (insn))
2516 insn = NEXT_INSN (insn);
2517
2518 emit_insn_after (seq, insn);
2519 }
2520
2521 static void
2522 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2523 {
2524 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2525 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2526 int i, first_reachable;
2527 rtx mem, dispatch, seq, fc;
2528 rtx before;
2529 basic_block bb;
2530 edge e;
2531
2532 fc = crtl->eh.sjlj_fc;
2533
2534 start_sequence ();
2535
2536 emit_label (dispatch_label);
2537
2538 #ifndef DONT_USE_BUILTIN_SETJMP
2539 expand_builtin_setjmp_receiver (dispatch_label);
2540 #endif
2541
2542 /* Load up dispatch index, exc_ptr and filter values from the
2543 function context. */
2544 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2545 sjlj_fc_call_site_ofs);
2546 dispatch = copy_to_reg (mem);
2547
2548 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2549 if (unwind_word_mode != ptr_mode)
2550 {
2551 #ifdef POINTERS_EXTEND_UNSIGNED
2552 mem = convert_memory_address (ptr_mode, mem);
2553 #else
2554 mem = convert_to_mode (ptr_mode, mem, 0);
2555 #endif
2556 }
2557 emit_move_insn (crtl->eh.exc_ptr, mem);
2558
2559 mem = adjust_address (fc, unwind_word_mode,
2560 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2561 if (unwind_word_mode != filter_mode)
2562 mem = convert_to_mode (filter_mode, mem, 0);
2563 emit_move_insn (crtl->eh.filter, mem);
2564
2565 /* Jump to one of the directly reachable regions. */
2566 /* ??? This really ought to be using a switch statement. */
2567
2568 first_reachable = 0;
2569 for (i = cfun->eh->last_region_number; i > 0; --i)
2570 {
2571 if (! lp_info[i].directly_reachable)
2572 continue;
2573
2574 if (! first_reachable)
2575 {
2576 first_reachable = i;
2577 continue;
2578 }
2579
2580 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2581 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2582 (((struct eh_region_d *)
2583 VEC_index (eh_region,
2584 cfun->eh->region_array, i))
2585 ->post_landing_pad));
2586 }
2587
2588 seq = get_insns ();
2589 end_sequence ();
2590
2591 before = (((struct eh_region_d *)
2592 VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2593 ->post_landing_pad);
2594
2595 bb = emit_to_new_bb_before (seq, before);
2596 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2597 e->count = bb->count;
2598 e->probability = REG_BR_PROB_BASE;
2599 }
2600
2601 static void
2602 sjlj_build_landing_pads (void)
2603 {
2604 struct sjlj_lp_info *lp_info;
2605
2606 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2607
2608 if (sjlj_find_directly_reachable_regions (lp_info))
2609 {
2610 rtx dispatch_label = gen_label_rtx ();
2611 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2612 TYPE_MODE (sjlj_fc_type_node),
2613 TYPE_ALIGN (sjlj_fc_type_node));
2614 crtl->eh.sjlj_fc
2615 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2616 int_size_in_bytes (sjlj_fc_type_node),
2617 align);
2618
2619 sjlj_assign_call_site_values (dispatch_label, lp_info);
2620 sjlj_mark_call_sites (lp_info);
2621
2622 sjlj_emit_function_enter (dispatch_label);
2623 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2624 sjlj_emit_function_exit ();
2625 }
2626
2627 free (lp_info);
2628 }
2629
2630 /* After initial rtl generation, call back to finish generating
2631 exception support code. */
2632
2633 static void
2634 finish_eh_generation (void)
2635 {
2636 basic_block bb;
2637
2638 /* Nothing to do if no regions created. */
2639 if (cfun->eh->region_tree == NULL)
2640 return;
2641
2642 /* The object here is to provide detailed information (via
2643 reachable_handlers) on how exception control flows within the
2644 function for the CFG construction. In this first pass, we can
2645 include type information garnered from ERT_THROW and
2646 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2647 in deleting unreachable handlers. Subsequently, we will generate
2648 landing pads which will connect many of the handlers, and then
2649 type information will not be effective. Still, this is a win
2650 over previous implementations. */
2651
2652 /* These registers are used by the landing pads. Make sure they
2653 have been generated. */
2654 get_exception_pointer ();
2655 get_exception_filter ();
2656
2657 /* Construct the landing pads. */
2658
2659 assign_filter_values ();
2660 build_post_landing_pads ();
2661 connect_post_landing_pads ();
2662 if (USING_SJLJ_EXCEPTIONS)
2663 sjlj_build_landing_pads ();
2664 else
2665 dw2_build_landing_pads ();
2666
2667 crtl->eh.built_landing_pads = 1;
2668
2669 /* We've totally changed the CFG. Start over. */
2670 find_exception_handler_labels ();
2671 break_superblocks ();
2672 if (USING_SJLJ_EXCEPTIONS
2673 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2674 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2675 commit_edge_insertions ();
2676 FOR_EACH_BB (bb)
2677 {
2678 edge e;
2679 edge_iterator ei;
2680 bool eh = false;
2681 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2682 {
2683 if (e->flags & EDGE_EH)
2684 {
2685 remove_edge (e);
2686 eh = true;
2687 }
2688 else
2689 ei_next (&ei);
2690 }
2691 if (eh)
2692 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2693 }
2694 }
2695 \f
2696 /* This section handles removing dead code for flow. */
2697
2698 /* Splice REGION from the region tree and replace it by REPLACE etc.
2699 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2700 region.*/
2701
2702 static void
2703 remove_eh_handler_and_replace (struct eh_region_d *region,
2704 struct eh_region_d *replace,
2705 bool update_catch_try)
2706 {
2707 struct eh_region_d **pp, **pp_start, *p, *outer, *inner;
2708 rtx lab;
2709
2710 outer = region->outer;
2711
2712 /* For the benefit of efficiently handling REG_EH_REGION notes,
2713 replace this region in the region array with its containing
2714 region. Note that previous region deletions may result in
2715 multiple copies of this region in the array, so we have a
2716 list of alternate numbers by which we are known. */
2717
2718 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2719 replace);
2720 if (region->aka)
2721 {
2722 unsigned i;
2723 bitmap_iterator bi;
2724
2725 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2726 {
2727 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2728 }
2729 }
2730
2731 if (replace)
2732 {
2733 if (!replace->aka)
2734 replace->aka = BITMAP_GGC_ALLOC ();
2735 if (region->aka)
2736 bitmap_ior_into (replace->aka, region->aka);
2737 bitmap_set_bit (replace->aka, region->region_number);
2738 }
2739
2740 if (crtl->eh.built_landing_pads)
2741 lab = region->landing_pad;
2742 else
2743 lab = region->label;
2744 if (outer)
2745 pp_start = &outer->inner;
2746 else
2747 pp_start = &cfun->eh->region_tree;
2748 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2749 continue;
2750 *pp = region->next_peer;
2751
2752 if (replace)
2753 pp_start = &replace->inner;
2754 else
2755 pp_start = &cfun->eh->region_tree;
2756 inner = region->inner;
2757 if (inner)
2758 {
2759 for (p = inner; p->next_peer ; p = p->next_peer)
2760 p->outer = replace;
2761 p->outer = replace;
2762
2763 p->next_peer = *pp_start;
2764 *pp_start = inner;
2765 }
2766
2767 if (region->type == ERT_CATCH
2768 && update_catch_try)
2769 {
2770 struct eh_region_d *eh_try, *next, *prev;
2771
2772 for (eh_try = region->next_peer;
2773 eh_try->type == ERT_CATCH;
2774 eh_try = eh_try->next_peer)
2775 continue;
2776 gcc_assert (eh_try->type == ERT_TRY);
2777
2778 next = region->u.eh_catch.next_catch;
2779 prev = region->u.eh_catch.prev_catch;
2780
2781 if (next)
2782 next->u.eh_catch.prev_catch = prev;
2783 else
2784 eh_try->u.eh_try.last_catch = prev;
2785 if (prev)
2786 prev->u.eh_catch.next_catch = next;
2787 else
2788 {
2789 eh_try->u.eh_try.eh_catch = next;
2790 if (! next)
2791 remove_eh_handler (eh_try);
2792 }
2793 }
2794 }
2795
2796 /* Splice REGION from the region tree and replace it by the outer region
2797 etc. */
2798
2799 static void
2800 remove_eh_handler (struct eh_region_d *region)
2801 {
2802 remove_eh_handler_and_replace (region, region->outer, true);
2803 }
2804
2805 /* Remove Eh region R that has turned out to have no code in its handler. */
2806
2807 void
2808 remove_eh_region (int r)
2809 {
2810 struct eh_region_d *region;
2811
2812 region = VEC_index (eh_region, cfun->eh->region_array, r);
2813 remove_eh_handler (region);
2814 }
2815
2816 /* Remove Eh region R that has turned out to have no code in its handler
2817 and replace in by R2. */
2818
2819 void
2820 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2821 {
2822 struct eh_region_d *region, *region2;
2823
2824 region = VEC_index (eh_region, cfun->eh->region_array, r);
2825 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2826 remove_eh_handler_and_replace (region, region2->outer, true);
2827 }
2828
2829 /* Invokes CALLBACK for every exception handler label. Only used by old
2830 loop hackery; should not be used by new code. */
2831
2832 void
2833 for_each_eh_label (void (*callback) (rtx))
2834 {
2835 int i;
2836 for (i = 0; i < cfun->eh->last_region_number; i++)
2837 {
2838 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
2839 if (r && r->region_number == i && r->label
2840 && LABEL_P (r->label))
2841 (*callback) (r->label);
2842 }
2843 }
2844
2845 /* Invoke CALLBACK for every exception region in the current function. */
2846
2847 void
2848 for_each_eh_region (void (*callback) (struct eh_region_d *))
2849 {
2850 int i, n = cfun->eh->last_region_number;
2851 for (i = 1; i <= n; ++i)
2852 {
2853 struct eh_region_d *region;
2854
2855 region = VEC_index (eh_region, cfun->eh->region_array, i);
2856 if (region)
2857 (*callback) (region);
2858 }
2859 }
2860 \f
2861 /* This section describes CFG exception edges for flow. */
2862
2863 /* For communicating between calls to reachable_next_level. */
2864 struct reachable_info
2865 {
2866 tree types_caught;
2867 tree types_allowed;
2868 void (*callback) (struct eh_region_d *, void *);
2869 void *callback_data;
2870 };
2871
2872 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2873 base class of TYPE, is in HANDLED. */
2874
2875 static int
2876 check_handled (tree handled, tree type)
2877 {
2878 tree t;
2879
2880 /* We can check for exact matches without front-end help. */
2881 if (! lang_eh_type_covers)
2882 {
2883 for (t = handled; t ; t = TREE_CHAIN (t))
2884 if (TREE_VALUE (t) == type)
2885 return 1;
2886 }
2887 else
2888 {
2889 for (t = handled; t ; t = TREE_CHAIN (t))
2890 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2891 return 1;
2892 }
2893
2894 return 0;
2895 }
2896
2897 /* A subroutine of reachable_next_level. If we are collecting a list
2898 of handlers, add one. After landing pad generation, reference
2899 it instead of the handlers themselves. Further, the handlers are
2900 all wired together, so by referencing one, we've got them all.
2901 Before landing pad generation we reference each handler individually.
2902
2903 LP_REGION contains the landing pad; REGION is the handler. */
2904
2905 static void
2906 add_reachable_handler (struct reachable_info *info,
2907 struct eh_region_d *lp_region,
2908 struct eh_region_d *region)
2909 {
2910 if (! info)
2911 return;
2912
2913 if (crtl->eh.built_landing_pads)
2914 info->callback (lp_region, info->callback_data);
2915 else
2916 info->callback (region, info->callback_data);
2917 }
2918
2919 /* Process one level of exception regions for reachability.
2920 If TYPE_THROWN is non-null, then it is the *exact* type being
2921 propagated. If INFO is non-null, then collect handler labels
2922 and caught/allowed type information between invocations. */
2923
2924 static enum reachable_code
2925 reachable_next_level (struct eh_region_d *region, tree type_thrown,
2926 struct reachable_info *info,
2927 bool maybe_resx)
2928 {
2929 switch (region->type)
2930 {
2931 case ERT_CLEANUP:
2932 /* Before landing-pad generation, we model control flow
2933 directly to the individual handlers. In this way we can
2934 see that catch handler types may shadow one another. */
2935 add_reachable_handler (info, region, region);
2936 return RNL_MAYBE_CAUGHT;
2937
2938 case ERT_TRY:
2939 {
2940 struct eh_region_d *c;
2941 enum reachable_code ret = RNL_NOT_CAUGHT;
2942
2943 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2944 {
2945 /* A catch-all handler ends the search. */
2946 if (c->u.eh_catch.type_list == NULL)
2947 {
2948 add_reachable_handler (info, region, c);
2949 return RNL_CAUGHT;
2950 }
2951
2952 if (type_thrown)
2953 {
2954 /* If we have at least one type match, end the search. */
2955 tree tp_node = c->u.eh_catch.type_list;
2956
2957 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2958 {
2959 tree type = TREE_VALUE (tp_node);
2960
2961 if (type == type_thrown
2962 || (lang_eh_type_covers
2963 && (*lang_eh_type_covers) (type, type_thrown)))
2964 {
2965 add_reachable_handler (info, region, c);
2966 return RNL_CAUGHT;
2967 }
2968 }
2969
2970 /* If we have definitive information of a match failure,
2971 the catch won't trigger. */
2972 if (lang_eh_type_covers)
2973 return RNL_NOT_CAUGHT;
2974 }
2975
2976 /* At this point, we either don't know what type is thrown or
2977 don't have front-end assistance to help deciding if it is
2978 covered by one of the types in the list for this region.
2979
2980 We'd then like to add this region to the list of reachable
2981 handlers since it is indeed potentially reachable based on the
2982 information we have.
2983
2984 Actually, this handler is for sure not reachable if all the
2985 types it matches have already been caught. That is, it is only
2986 potentially reachable if at least one of the types it catches
2987 has not been previously caught. */
2988
2989 if (! info)
2990 ret = RNL_MAYBE_CAUGHT;
2991 else
2992 {
2993 tree tp_node = c->u.eh_catch.type_list;
2994 bool maybe_reachable = false;
2995
2996 /* Compute the potential reachability of this handler and
2997 update the list of types caught at the same time. */
2998 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2999 {
3000 tree type = TREE_VALUE (tp_node);
3001
3002 if (! check_handled (info->types_caught, type))
3003 {
3004 info->types_caught
3005 = tree_cons (NULL, type, info->types_caught);
3006
3007 maybe_reachable = true;
3008 }
3009 }
3010
3011 if (maybe_reachable)
3012 {
3013 add_reachable_handler (info, region, c);
3014
3015 /* ??? If the catch type is a base class of every allowed
3016 type, then we know we can stop the search. */
3017 ret = RNL_MAYBE_CAUGHT;
3018 }
3019 }
3020 }
3021
3022 return ret;
3023 }
3024
3025 case ERT_ALLOWED_EXCEPTIONS:
3026 /* An empty list of types definitely ends the search. */
3027 if (region->u.allowed.type_list == NULL_TREE)
3028 {
3029 add_reachable_handler (info, region, region);
3030 return RNL_CAUGHT;
3031 }
3032
3033 /* Collect a list of lists of allowed types for use in detecting
3034 when a catch may be transformed into a catch-all. */
3035 if (info)
3036 info->types_allowed = tree_cons (NULL_TREE,
3037 region->u.allowed.type_list,
3038 info->types_allowed);
3039
3040 /* If we have definitive information about the type hierarchy,
3041 then we can tell if the thrown type will pass through the
3042 filter. */
3043 if (type_thrown && lang_eh_type_covers)
3044 {
3045 if (check_handled (region->u.allowed.type_list, type_thrown))
3046 return RNL_NOT_CAUGHT;
3047 else
3048 {
3049 add_reachable_handler (info, region, region);
3050 return RNL_CAUGHT;
3051 }
3052 }
3053
3054 add_reachable_handler (info, region, region);
3055 return RNL_MAYBE_CAUGHT;
3056
3057 case ERT_CATCH:
3058 /* Catch regions are handled by their controlling try region. */
3059 return RNL_NOT_CAUGHT;
3060
3061 case ERT_MUST_NOT_THROW:
3062 /* Here we end our search, since no exceptions may propagate.
3063
3064 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3065 only via locally handled RESX instructions.
3066
3067 When we inline a function call, we can bring in new handlers. In order
3068 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3069 assume that such handlers exists prior for any inlinable call prior
3070 inlining decisions are fixed. */
3071
3072 if (maybe_resx)
3073 {
3074 add_reachable_handler (info, region, region);
3075 return RNL_CAUGHT;
3076 }
3077 else
3078 return RNL_BLOCKED;
3079
3080 case ERT_THROW:
3081 case ERT_UNKNOWN:
3082 /* Shouldn't see these here. */
3083 gcc_unreachable ();
3084 break;
3085 default:
3086 gcc_unreachable ();
3087 }
3088 }
3089
3090 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3091
3092 void
3093 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3094 void (*callback) (struct eh_region_d *, void *),
3095 void *callback_data)
3096 {
3097 struct reachable_info info;
3098 struct eh_region_d *region;
3099 tree type_thrown;
3100
3101 memset (&info, 0, sizeof (info));
3102 info.callback = callback;
3103 info.callback_data = callback_data;
3104
3105 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3106 if (!region)
3107 return;
3108
3109 type_thrown = NULL_TREE;
3110 if (is_resx)
3111 {
3112 /* A RESX leaves a region instead of entering it. Thus the
3113 region itself may have been deleted out from under us. */
3114 if (region == NULL)
3115 return;
3116 region = region->outer;
3117 }
3118 else if (region->type == ERT_THROW)
3119 {
3120 type_thrown = region->u.eh_throw.type;
3121 region = region->outer;
3122 }
3123
3124 while (region)
3125 {
3126 if (reachable_next_level (region, type_thrown, &info,
3127 inlinable_call || is_resx) >= RNL_CAUGHT)
3128 break;
3129 /* If we have processed one cleanup, there is no point in
3130 processing any more of them. Each cleanup will have an edge
3131 to the next outer cleanup region, so the flow graph will be
3132 accurate. */
3133 if (region->type == ERT_CLEANUP)
3134 {
3135 enum reachable_code code = RNL_NOT_CAUGHT;
3136 region = find_prev_try (region->outer);
3137 /* Continue looking for outer TRY region until we find one
3138 that might cath something. */
3139 while (region
3140 && (code = reachable_next_level (region, type_thrown, &info,
3141 inlinable_call || is_resx))
3142 == RNL_NOT_CAUGHT)
3143 region = find_prev_try (region->outer);
3144 if (code >= RNL_CAUGHT)
3145 break;
3146 }
3147 if (region)
3148 region = region->outer;
3149 }
3150 }
3151
3152 /* Retrieve a list of labels of exception handlers which can be
3153 reached by a given insn. */
3154
3155 static void
3156 arh_to_landing_pad (struct eh_region_d *region, void *data)
3157 {
3158 rtx *p_handlers = (rtx *) data;
3159 if (! *p_handlers)
3160 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3161 }
3162
3163 static void
3164 arh_to_label (struct eh_region_d *region, void *data)
3165 {
3166 rtx *p_handlers = (rtx *) data;
3167 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3168 }
3169
3170 rtx
3171 reachable_handlers (rtx insn)
3172 {
3173 bool is_resx = false;
3174 rtx handlers = NULL;
3175 int region_number;
3176
3177 if (JUMP_P (insn)
3178 && GET_CODE (PATTERN (insn)) == RESX)
3179 {
3180 region_number = XINT (PATTERN (insn), 0);
3181 is_resx = true;
3182 }
3183 else
3184 {
3185 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3186 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3187 return NULL;
3188 region_number = INTVAL (XEXP (note, 0));
3189 }
3190
3191 foreach_reachable_handler (region_number, is_resx, false,
3192 (crtl->eh.built_landing_pads
3193 ? arh_to_landing_pad
3194 : arh_to_label),
3195 &handlers);
3196
3197 return handlers;
3198 }
3199
3200 /* Determine if the given INSN can throw an exception that is caught
3201 within the function. */
3202
3203 bool
3204 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3205 {
3206 struct eh_region_d *region;
3207 tree type_thrown;
3208
3209 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3210 if (!region)
3211 return false;
3212
3213 type_thrown = NULL_TREE;
3214 if (is_resx)
3215 region = region->outer;
3216 else if (region->type == ERT_THROW)
3217 {
3218 type_thrown = region->u.eh_throw.type;
3219 region = region->outer;
3220 }
3221
3222 /* If this exception is ignored by each and every containing region,
3223 then control passes straight out. The runtime may handle some
3224 regions, which also do not require processing internally. */
3225 for (; region; region = region->outer)
3226 {
3227 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3228 inlinable_call || is_resx);
3229 if (how == RNL_BLOCKED)
3230 return false;
3231 if (how != RNL_NOT_CAUGHT)
3232 return true;
3233 }
3234
3235 return false;
3236 }
3237
3238 bool
3239 can_throw_internal (const_rtx insn)
3240 {
3241 rtx note;
3242
3243 if (! INSN_P (insn))
3244 return false;
3245
3246 if (JUMP_P (insn)
3247 && GET_CODE (PATTERN (insn)) == RESX
3248 && XINT (PATTERN (insn), 0) > 0)
3249 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3250
3251 if (NONJUMP_INSN_P (insn)
3252 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3253 insn = XVECEXP (PATTERN (insn), 0, 0);
3254
3255 /* Every insn that might throw has an EH_REGION note. */
3256 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3257 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3258 return false;
3259
3260 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3261 }
3262
3263 /* Determine if the given INSN can throw an exception that is
3264 visible outside the function. */
3265
3266 bool
3267 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3268 {
3269 struct eh_region_d *region;
3270 tree type_thrown;
3271
3272 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3273 if (!region)
3274 return true;
3275
3276 type_thrown = NULL_TREE;
3277 if (is_resx)
3278 region = region->outer;
3279 else if (region->type == ERT_THROW)
3280 {
3281 type_thrown = region->u.eh_throw.type;
3282 region = region->outer;
3283 }
3284
3285 /* If the exception is caught or blocked by any containing region,
3286 then it is not seen by any calling function. */
3287 for (; region ; region = region->outer)
3288 if (reachable_next_level (region, type_thrown, NULL,
3289 inlinable_call || is_resx) >= RNL_CAUGHT)
3290 return false;
3291
3292 return true;
3293 }
3294
3295 bool
3296 can_throw_external (const_rtx insn)
3297 {
3298 rtx note;
3299
3300 if (! INSN_P (insn))
3301 return false;
3302
3303 if (JUMP_P (insn)
3304 && GET_CODE (PATTERN (insn)) == RESX
3305 && XINT (PATTERN (insn), 0) > 0)
3306 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3307
3308 if (NONJUMP_INSN_P (insn)
3309 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3310 {
3311 rtx seq = PATTERN (insn);
3312 int i, n = XVECLEN (seq, 0);
3313
3314 for (i = 0; i < n; i++)
3315 if (can_throw_external (XVECEXP (seq, 0, i)))
3316 return true;
3317
3318 return false;
3319 }
3320
3321 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3322 if (!note)
3323 {
3324 /* Calls (and trapping insns) without notes are outside any
3325 exception handling region in this function. We have to
3326 assume it might throw. Given that the front end and middle
3327 ends mark known NOTHROW functions, this isn't so wildly
3328 inaccurate. */
3329 return (CALL_P (insn)
3330 || (flag_non_call_exceptions
3331 && may_trap_p (PATTERN (insn))));
3332 }
3333 if (INTVAL (XEXP (note, 0)) <= 0)
3334 return false;
3335
3336 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3337 }
3338
3339 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3340
3341 unsigned int
3342 set_nothrow_function_flags (void)
3343 {
3344 rtx insn;
3345
3346 crtl->nothrow = 1;
3347
3348 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3349 something that can throw an exception. We specifically exempt
3350 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3351 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3352 is optimistic. */
3353
3354 crtl->all_throwers_are_sibcalls = 1;
3355
3356 /* If we don't know that this implementation of the function will
3357 actually be used, then we must not set TREE_NOTHROW, since
3358 callers must not assume that this function does not throw. */
3359 if (TREE_NOTHROW (current_function_decl))
3360 return 0;
3361
3362 if (! flag_exceptions)
3363 return 0;
3364
3365 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3366 if (can_throw_external (insn))
3367 {
3368 crtl->nothrow = 0;
3369
3370 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3371 {
3372 crtl->all_throwers_are_sibcalls = 0;
3373 return 0;
3374 }
3375 }
3376
3377 for (insn = crtl->epilogue_delay_list; insn;
3378 insn = XEXP (insn, 1))
3379 if (can_throw_external (insn))
3380 {
3381 crtl->nothrow = 0;
3382
3383 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3384 {
3385 crtl->all_throwers_are_sibcalls = 0;
3386 return 0;
3387 }
3388 }
3389 if (crtl->nothrow
3390 && (cgraph_function_body_availability (cgraph_node
3391 (current_function_decl))
3392 >= AVAIL_AVAILABLE))
3393 {
3394 struct cgraph_node *node = cgraph_node (current_function_decl);
3395 struct cgraph_edge *e;
3396 for (e = node->callers; e; e = e->next_caller)
3397 e->can_throw_external = false;
3398 TREE_NOTHROW (current_function_decl) = 1;
3399
3400 if (dump_file)
3401 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3402 current_function_name ());
3403 }
3404 return 0;
3405 }
3406
3407 struct rtl_opt_pass pass_set_nothrow_function_flags =
3408 {
3409 {
3410 RTL_PASS,
3411 "nothrow", /* name */
3412 NULL, /* gate */
3413 set_nothrow_function_flags, /* execute */
3414 NULL, /* sub */
3415 NULL, /* next */
3416 0, /* static_pass_number */
3417 TV_NONE, /* tv_id */
3418 0, /* properties_required */
3419 0, /* properties_provided */
3420 0, /* properties_destroyed */
3421 0, /* todo_flags_start */
3422 TODO_dump_func, /* todo_flags_finish */
3423 }
3424 };
3425
3426 \f
3427 /* Various hooks for unwind library. */
3428
3429 /* Do any necessary initialization to access arbitrary stack frames.
3430 On the SPARC, this means flushing the register windows. */
3431
3432 void
3433 expand_builtin_unwind_init (void)
3434 {
3435 /* Set this so all the registers get saved in our frame; we need to be
3436 able to copy the saved values for any registers from frames we unwind. */
3437 crtl->saves_all_registers = 1;
3438
3439 #ifdef SETUP_FRAME_ADDRESSES
3440 SETUP_FRAME_ADDRESSES ();
3441 #endif
3442 }
3443
3444 rtx
3445 expand_builtin_eh_return_data_regno (tree exp)
3446 {
3447 tree which = CALL_EXPR_ARG (exp, 0);
3448 unsigned HOST_WIDE_INT iwhich;
3449
3450 if (TREE_CODE (which) != INTEGER_CST)
3451 {
3452 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3453 return constm1_rtx;
3454 }
3455
3456 iwhich = tree_low_cst (which, 1);
3457 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3458 if (iwhich == INVALID_REGNUM)
3459 return constm1_rtx;
3460
3461 #ifdef DWARF_FRAME_REGNUM
3462 iwhich = DWARF_FRAME_REGNUM (iwhich);
3463 #else
3464 iwhich = DBX_REGISTER_NUMBER (iwhich);
3465 #endif
3466
3467 return GEN_INT (iwhich);
3468 }
3469
3470 /* Given a value extracted from the return address register or stack slot,
3471 return the actual address encoded in that value. */
3472
3473 rtx
3474 expand_builtin_extract_return_addr (tree addr_tree)
3475 {
3476 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3477
3478 if (GET_MODE (addr) != Pmode
3479 && GET_MODE (addr) != VOIDmode)
3480 {
3481 #ifdef POINTERS_EXTEND_UNSIGNED
3482 addr = convert_memory_address (Pmode, addr);
3483 #else
3484 addr = convert_to_mode (Pmode, addr, 0);
3485 #endif
3486 }
3487
3488 /* First mask out any unwanted bits. */
3489 #ifdef MASK_RETURN_ADDR
3490 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3491 #endif
3492
3493 /* Then adjust to find the real return address. */
3494 #if defined (RETURN_ADDR_OFFSET)
3495 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3496 #endif
3497
3498 return addr;
3499 }
3500
3501 /* Given an actual address in addr_tree, do any necessary encoding
3502 and return the value to be stored in the return address register or
3503 stack slot so the epilogue will return to that address. */
3504
3505 rtx
3506 expand_builtin_frob_return_addr (tree addr_tree)
3507 {
3508 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3509
3510 addr = convert_memory_address (Pmode, addr);
3511
3512 #ifdef RETURN_ADDR_OFFSET
3513 addr = force_reg (Pmode, addr);
3514 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3515 #endif
3516
3517 return addr;
3518 }
3519
3520 /* Set up the epilogue with the magic bits we'll need to return to the
3521 exception handler. */
3522
3523 void
3524 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3525 tree handler_tree)
3526 {
3527 rtx tmp;
3528
3529 #ifdef EH_RETURN_STACKADJ_RTX
3530 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3531 VOIDmode, EXPAND_NORMAL);
3532 tmp = convert_memory_address (Pmode, tmp);
3533 if (!crtl->eh.ehr_stackadj)
3534 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3535 else if (tmp != crtl->eh.ehr_stackadj)
3536 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3537 #endif
3538
3539 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3540 VOIDmode, EXPAND_NORMAL);
3541 tmp = convert_memory_address (Pmode, tmp);
3542 if (!crtl->eh.ehr_handler)
3543 crtl->eh.ehr_handler = copy_to_reg (tmp);
3544 else if (tmp != crtl->eh.ehr_handler)
3545 emit_move_insn (crtl->eh.ehr_handler, tmp);
3546
3547 if (!crtl->eh.ehr_label)
3548 crtl->eh.ehr_label = gen_label_rtx ();
3549 emit_jump (crtl->eh.ehr_label);
3550 }
3551
3552 void
3553 expand_eh_return (void)
3554 {
3555 rtx around_label;
3556
3557 if (! crtl->eh.ehr_label)
3558 return;
3559
3560 crtl->calls_eh_return = 1;
3561
3562 #ifdef EH_RETURN_STACKADJ_RTX
3563 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3564 #endif
3565
3566 around_label = gen_label_rtx ();
3567 emit_jump (around_label);
3568
3569 emit_label (crtl->eh.ehr_label);
3570 clobber_return_register ();
3571
3572 #ifdef EH_RETURN_STACKADJ_RTX
3573 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3574 #endif
3575
3576 #ifdef HAVE_eh_return
3577 if (HAVE_eh_return)
3578 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3579 else
3580 #endif
3581 {
3582 #ifdef EH_RETURN_HANDLER_RTX
3583 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3584 #else
3585 error ("__builtin_eh_return not supported on this target");
3586 #endif
3587 }
3588
3589 emit_label (around_label);
3590 }
3591
3592 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3593 POINTERS_EXTEND_UNSIGNED and return it. */
3594
3595 rtx
3596 expand_builtin_extend_pointer (tree addr_tree)
3597 {
3598 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3599 int extend;
3600
3601 #ifdef POINTERS_EXTEND_UNSIGNED
3602 extend = POINTERS_EXTEND_UNSIGNED;
3603 #else
3604 /* The previous EH code did an unsigned extend by default, so we do this also
3605 for consistency. */
3606 extend = 1;
3607 #endif
3608
3609 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3610 }
3611 \f
3612 /* In the following functions, we represent entries in the action table
3613 as 1-based indices. Special cases are:
3614
3615 0: null action record, non-null landing pad; implies cleanups
3616 -1: null action record, null landing pad; implies no action
3617 -2: no call-site entry; implies must_not_throw
3618 -3: we have yet to process outer regions
3619
3620 Further, no special cases apply to the "next" field of the record.
3621 For next, 0 means end of list. */
3622
3623 struct action_record
3624 {
3625 int offset;
3626 int filter;
3627 int next;
3628 };
3629
3630 static int
3631 action_record_eq (const void *pentry, const void *pdata)
3632 {
3633 const struct action_record *entry = (const struct action_record *) pentry;
3634 const struct action_record *data = (const struct action_record *) pdata;
3635 return entry->filter == data->filter && entry->next == data->next;
3636 }
3637
3638 static hashval_t
3639 action_record_hash (const void *pentry)
3640 {
3641 const struct action_record *entry = (const struct action_record *) pentry;
3642 return entry->next * 1009 + entry->filter;
3643 }
3644
3645 static int
3646 add_action_record (htab_t ar_hash, int filter, int next)
3647 {
3648 struct action_record **slot, *new_ar, tmp;
3649
3650 tmp.filter = filter;
3651 tmp.next = next;
3652 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3653
3654 if ((new_ar = *slot) == NULL)
3655 {
3656 new_ar = XNEW (struct action_record);
3657 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3658 new_ar->filter = filter;
3659 new_ar->next = next;
3660 *slot = new_ar;
3661
3662 /* The filter value goes in untouched. The link to the next
3663 record is a "self-relative" byte offset, or zero to indicate
3664 that there is no next record. So convert the absolute 1 based
3665 indices we've been carrying around into a displacement. */
3666
3667 push_sleb128 (&crtl->eh.action_record_data, filter);
3668 if (next)
3669 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3670 push_sleb128 (&crtl->eh.action_record_data, next);
3671 }
3672
3673 return new_ar->offset;
3674 }
3675
3676 static int
3677 collect_one_action_chain (htab_t ar_hash, struct eh_region_d *region)
3678 {
3679 struct eh_region_d *c;
3680 int next;
3681
3682 /* If we've reached the top of the region chain, then we have
3683 no actions, and require no landing pad. */
3684 if (region == NULL)
3685 return -1;
3686
3687 switch (region->type)
3688 {
3689 case ERT_CLEANUP:
3690 /* A cleanup adds a zero filter to the beginning of the chain, but
3691 there are special cases to look out for. If there are *only*
3692 cleanups along a path, then it compresses to a zero action.
3693 Further, if there are multiple cleanups along a path, we only
3694 need to represent one of them, as that is enough to trigger
3695 entry to the landing pad at runtime. */
3696 next = collect_one_action_chain (ar_hash, region->outer);
3697 if (next <= 0)
3698 return 0;
3699 for (c = region->outer; c ; c = c->outer)
3700 if (c->type == ERT_CLEANUP)
3701 return next;
3702 return add_action_record (ar_hash, 0, next);
3703
3704 case ERT_TRY:
3705 /* Process the associated catch regions in reverse order.
3706 If there's a catch-all handler, then we don't need to
3707 search outer regions. Use a magic -3 value to record
3708 that we haven't done the outer search. */
3709 next = -3;
3710 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3711 {
3712 if (c->u.eh_catch.type_list == NULL)
3713 {
3714 /* Retrieve the filter from the head of the filter list
3715 where we have stored it (see assign_filter_values). */
3716 int filter
3717 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3718
3719 next = add_action_record (ar_hash, filter, 0);
3720 }
3721 else
3722 {
3723 /* Once the outer search is done, trigger an action record for
3724 each filter we have. */
3725 tree flt_node;
3726
3727 if (next == -3)
3728 {
3729 next = collect_one_action_chain (ar_hash, region->outer);
3730
3731 /* If there is no next action, terminate the chain. */
3732 if (next == -1)
3733 next = 0;
3734 /* If all outer actions are cleanups or must_not_throw,
3735 we'll have no action record for it, since we had wanted
3736 to encode these states in the call-site record directly.
3737 Add a cleanup action to the chain to catch these. */
3738 else if (next <= 0)
3739 next = add_action_record (ar_hash, 0, 0);
3740 }
3741
3742 flt_node = c->u.eh_catch.filter_list;
3743 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3744 {
3745 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3746 next = add_action_record (ar_hash, filter, next);
3747 }
3748 }
3749 }
3750 return next;
3751
3752 case ERT_ALLOWED_EXCEPTIONS:
3753 /* An exception specification adds its filter to the
3754 beginning of the chain. */
3755 next = collect_one_action_chain (ar_hash, region->outer);
3756
3757 /* If there is no next action, terminate the chain. */
3758 if (next == -1)
3759 next = 0;
3760 /* If all outer actions are cleanups or must_not_throw,
3761 we'll have no action record for it, since we had wanted
3762 to encode these states in the call-site record directly.
3763 Add a cleanup action to the chain to catch these. */
3764 else if (next <= 0)
3765 next = add_action_record (ar_hash, 0, 0);
3766
3767 return add_action_record (ar_hash, region->u.allowed.filter, next);
3768
3769 case ERT_MUST_NOT_THROW:
3770 /* A must-not-throw region with no inner handlers or cleanups
3771 requires no call-site entry. Note that this differs from
3772 the no handler or cleanup case in that we do require an lsda
3773 to be generated. Return a magic -2 value to record this. */
3774 return -2;
3775
3776 case ERT_CATCH:
3777 case ERT_THROW:
3778 /* CATCH regions are handled in TRY above. THROW regions are
3779 for optimization information only and produce no output. */
3780 return collect_one_action_chain (ar_hash, region->outer);
3781
3782 default:
3783 gcc_unreachable ();
3784 }
3785 }
3786
3787 static int
3788 add_call_site (rtx landing_pad, int action, int section)
3789 {
3790 call_site_record record;
3791
3792 record = GGC_NEW (struct call_site_record_d);
3793 record->landing_pad = landing_pad;
3794 record->action = action;
3795
3796 VEC_safe_push (call_site_record, gc,
3797 crtl->eh.call_site_record[section], record);
3798
3799 return call_site_base + VEC_length (call_site_record,
3800 crtl->eh.call_site_record[section]) - 1;
3801 }
3802
3803 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3804 The new note numbers will not refer to region numbers, but
3805 instead to call site entries. */
3806
3807 unsigned int
3808 convert_to_eh_region_ranges (void)
3809 {
3810 rtx insn, iter, note;
3811 htab_t ar_hash;
3812 int last_action = -3;
3813 rtx last_action_insn = NULL_RTX;
3814 rtx last_landing_pad = NULL_RTX;
3815 rtx first_no_action_insn = NULL_RTX;
3816 int call_site = 0;
3817 int cur_sec = 0;
3818 rtx section_switch_note = NULL_RTX;
3819 rtx first_no_action_insn_before_switch = NULL_RTX;
3820 rtx last_no_action_insn_before_switch = NULL_RTX;
3821 rtx *pad_map = NULL;
3822 sbitmap pad_loc = NULL;
3823 int min_labelno = 0, max_labelno = 0;
3824 int saved_call_site_base = call_site_base;
3825
3826 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3827 return 0;
3828
3829 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3830
3831 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3832
3833 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3834 if (INSN_P (iter))
3835 {
3836 struct eh_region_d *region;
3837 int this_action;
3838 rtx this_landing_pad;
3839
3840 insn = iter;
3841 if (NONJUMP_INSN_P (insn)
3842 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3843 insn = XVECEXP (PATTERN (insn), 0, 0);
3844
3845 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3846 if (!note)
3847 {
3848 if (! (CALL_P (insn)
3849 || (flag_non_call_exceptions
3850 && may_trap_p (PATTERN (insn)))))
3851 continue;
3852 this_action = -1;
3853 region = NULL;
3854 }
3855 else
3856 {
3857 if (INTVAL (XEXP (note, 0)) <= 0)
3858 continue;
3859 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3860 this_action = collect_one_action_chain (ar_hash, region);
3861 }
3862
3863 /* Existence of catch handlers, or must-not-throw regions
3864 implies that an lsda is needed (even if empty). */
3865 if (this_action != -1)
3866 crtl->uses_eh_lsda = 1;
3867
3868 /* Delay creation of region notes for no-action regions
3869 until we're sure that an lsda will be required. */
3870 else if (last_action == -3)
3871 {
3872 first_no_action_insn = iter;
3873 last_action = -1;
3874 }
3875
3876 /* Cleanups and handlers may share action chains but not
3877 landing pads. Collect the landing pad for this region. */
3878 if (this_action >= 0)
3879 {
3880 struct eh_region_d *o;
3881 for (o = region; ! o->landing_pad ; o = o->outer)
3882 continue;
3883 this_landing_pad = o->landing_pad;
3884 }
3885 else
3886 this_landing_pad = NULL_RTX;
3887
3888 /* Differing actions or landing pads implies a change in call-site
3889 info, which implies some EH_REGION note should be emitted. */
3890 if (last_action != this_action
3891 || last_landing_pad != this_landing_pad)
3892 {
3893 /* If we'd not seen a previous action (-3) or the previous
3894 action was must-not-throw (-2), then we do not need an
3895 end note. */
3896 if (last_action >= -1)
3897 {
3898 /* If we delayed the creation of the begin, do it now. */
3899 if (first_no_action_insn_before_switch)
3900 {
3901 call_site = add_call_site (NULL_RTX, 0, 0);
3902 note
3903 = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3904 first_no_action_insn_before_switch);
3905 NOTE_EH_HANDLER (note) = call_site;
3906 if (first_no_action_insn)
3907 {
3908 note
3909 = emit_note_after (NOTE_INSN_EH_REGION_END,
3910 last_no_action_insn_before_switch);
3911 NOTE_EH_HANDLER (note) = call_site;
3912 }
3913 else
3914 gcc_assert (last_action_insn
3915 == last_no_action_insn_before_switch);
3916 }
3917 if (first_no_action_insn)
3918 {
3919 call_site = add_call_site (NULL_RTX, 0, cur_sec);
3920 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3921 first_no_action_insn);
3922 NOTE_EH_HANDLER (note) = call_site;
3923 first_no_action_insn = NULL_RTX;
3924 }
3925
3926 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3927 last_action_insn);
3928 NOTE_EH_HANDLER (note) = call_site;
3929 }
3930
3931 /* If the new action is must-not-throw, then no region notes
3932 are created. */
3933 if (this_action >= -1)
3934 {
3935 call_site = add_call_site (this_landing_pad,
3936 this_action < 0 ? 0 : this_action,
3937 cur_sec);
3938 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3939 NOTE_EH_HANDLER (note) = call_site;
3940 }
3941
3942 last_action = this_action;
3943 last_landing_pad = this_landing_pad;
3944 }
3945 last_action_insn = iter;
3946 }
3947 else if (NOTE_P (iter)
3948 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
3949 {
3950 gcc_assert (section_switch_note == NULL_RTX);
3951 gcc_assert (flag_reorder_blocks_and_partition);
3952 section_switch_note = iter;
3953 if (first_no_action_insn)
3954 {
3955 first_no_action_insn_before_switch = first_no_action_insn;
3956 last_no_action_insn_before_switch = last_action_insn;
3957 first_no_action_insn = NULL_RTX;
3958 gcc_assert (last_action == -1);
3959 last_action = -3;
3960 }
3961 /* Force closing of current EH region before section switch and
3962 opening a new one afterwards. */
3963 else if (last_action != -3)
3964 last_landing_pad = pc_rtx;
3965 call_site_base += VEC_length (call_site_record,
3966 crtl->eh.call_site_record[cur_sec]);
3967 cur_sec++;
3968 gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
3969 crtl->eh.call_site_record[cur_sec]
3970 = VEC_alloc (call_site_record, gc, 10);
3971 max_labelno = max_label_num ();
3972 min_labelno = get_first_label_num ();
3973 pad_map = XCNEWVEC (rtx, max_labelno - min_labelno + 1);
3974 pad_loc = sbitmap_alloc (max_labelno - min_labelno + 1);
3975 }
3976 else if (LABEL_P (iter) && pad_map)
3977 SET_BIT (pad_loc, CODE_LABEL_NUMBER (iter) - min_labelno);
3978
3979 if (last_action >= -1 && ! first_no_action_insn)
3980 {
3981 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3982 NOTE_EH_HANDLER (note) = call_site;
3983 }
3984
3985 call_site_base = saved_call_site_base;
3986
3987 if (pad_map)
3988 {
3989 /* When doing hot/cold partitioning, ensure landing pads are
3990 always in the same section as the EH region, .gcc_except_table
3991 can't express it otherwise. */
3992 for (cur_sec = 0; cur_sec < 2; cur_sec++)
3993 {
3994 int i, idx;
3995 int n = VEC_length (call_site_record,
3996 crtl->eh.call_site_record[cur_sec]);
3997 basic_block prev_bb = NULL, padbb;
3998
3999 for (i = 0; i < n; ++i)
4000 {
4001 struct call_site_record_d *cs =
4002 VEC_index (call_site_record,
4003 crtl->eh.call_site_record[cur_sec], i);
4004 rtx jump, note;
4005
4006 if (cs->landing_pad == NULL_RTX)
4007 continue;
4008 idx = CODE_LABEL_NUMBER (cs->landing_pad) - min_labelno;
4009 /* If the landing pad is in the correct section, nothing
4010 is needed. */
4011 if (TEST_BIT (pad_loc, idx) ^ (cur_sec == 0))
4012 continue;
4013 /* Otherwise, if we haven't seen this pad yet, we need to
4014 add a new label and jump to the correct section. */
4015 if (pad_map[idx] == NULL_RTX)
4016 {
4017 pad_map[idx] = gen_label_rtx ();
4018 if (prev_bb == NULL)
4019 for (iter = section_switch_note;
4020 iter; iter = PREV_INSN (iter))
4021 if (NOTE_INSN_BASIC_BLOCK_P (iter))
4022 {
4023 prev_bb = NOTE_BASIC_BLOCK (iter);
4024 break;
4025 }
4026 if (cur_sec == 0)
4027 {
4028 note = emit_label_before (pad_map[idx],
4029 section_switch_note);
4030 jump = emit_jump_insn_before (gen_jump (cs->landing_pad),
4031 section_switch_note);
4032 }
4033 else
4034 {
4035 jump = emit_jump_insn_after (gen_jump (cs->landing_pad),
4036 section_switch_note);
4037 note = emit_label_after (pad_map[idx],
4038 section_switch_note);
4039 }
4040 JUMP_LABEL (jump) = cs->landing_pad;
4041 add_reg_note (jump, REG_CROSSING_JUMP, NULL_RTX);
4042 iter = NEXT_INSN (cs->landing_pad);
4043 if (iter && NOTE_INSN_BASIC_BLOCK_P (iter))
4044 padbb = NOTE_BASIC_BLOCK (iter);
4045 else
4046 padbb = NULL;
4047 if (padbb && prev_bb
4048 && BB_PARTITION (padbb) != BB_UNPARTITIONED)
4049 {
4050 basic_block bb;
4051 int part
4052 = BB_PARTITION (padbb) == BB_COLD_PARTITION
4053 ? BB_HOT_PARTITION : BB_COLD_PARTITION;
4054 edge_iterator ei;
4055 edge e;
4056
4057 bb = create_basic_block (note, jump, prev_bb);
4058 make_single_succ_edge (bb, padbb, EDGE_CROSSING);
4059 BB_SET_PARTITION (bb, part);
4060 for (ei = ei_start (padbb->preds);
4061 (e = ei_safe_edge (ei)); )
4062 {
4063 if ((e->flags & (EDGE_EH|EDGE_CROSSING))
4064 == (EDGE_EH|EDGE_CROSSING))
4065 {
4066 redirect_edge_succ (e, bb);
4067 e->flags &= ~EDGE_CROSSING;
4068 }
4069 else
4070 ei_next (&ei);
4071 }
4072 if (cur_sec == 0)
4073 prev_bb = bb;
4074 }
4075 }
4076 cs->landing_pad = pad_map[idx];
4077 }
4078 }
4079
4080 sbitmap_free (pad_loc);
4081 XDELETEVEC (pad_map);
4082 }
4083
4084 htab_delete (ar_hash);
4085 return 0;
4086 }
4087
4088 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
4089 {
4090 {
4091 RTL_PASS,
4092 "eh_ranges", /* name */
4093 NULL, /* gate */
4094 convert_to_eh_region_ranges, /* execute */
4095 NULL, /* sub */
4096 NULL, /* next */
4097 0, /* static_pass_number */
4098 TV_NONE, /* tv_id */
4099 0, /* properties_required */
4100 0, /* properties_provided */
4101 0, /* properties_destroyed */
4102 0, /* todo_flags_start */
4103 TODO_dump_func, /* todo_flags_finish */
4104 }
4105 };
4106
4107 \f
4108 static void
4109 push_uleb128 (varray_type *data_area, unsigned int value)
4110 {
4111 do
4112 {
4113 unsigned char byte = value & 0x7f;
4114 value >>= 7;
4115 if (value)
4116 byte |= 0x80;
4117 VARRAY_PUSH_UCHAR (*data_area, byte);
4118 }
4119 while (value);
4120 }
4121
4122 static void
4123 push_sleb128 (varray_type *data_area, int value)
4124 {
4125 unsigned char byte;
4126 int more;
4127
4128 do
4129 {
4130 byte = value & 0x7f;
4131 value >>= 7;
4132 more = ! ((value == 0 && (byte & 0x40) == 0)
4133 || (value == -1 && (byte & 0x40) != 0));
4134 if (more)
4135 byte |= 0x80;
4136 VARRAY_PUSH_UCHAR (*data_area, byte);
4137 }
4138 while (more);
4139 }
4140
4141 \f
4142 #ifndef HAVE_AS_LEB128
4143 static int
4144 dw2_size_of_call_site_table (int section)
4145 {
4146 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
4147 int size = n * (4 + 4 + 4);
4148 int i;
4149
4150 for (i = 0; i < n; ++i)
4151 {
4152 struct call_site_record_d *cs =
4153 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
4154 size += size_of_uleb128 (cs->action);
4155 }
4156
4157 return size;
4158 }
4159
4160 static int
4161 sjlj_size_of_call_site_table (void)
4162 {
4163 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
4164 int size = 0;
4165 int i;
4166
4167 for (i = 0; i < n; ++i)
4168 {
4169 struct call_site_record_d *cs =
4170 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
4171 size += size_of_uleb128 (INTVAL (cs->landing_pad));
4172 size += size_of_uleb128 (cs->action);
4173 }
4174
4175 return size;
4176 }
4177 #endif
4178
4179 static void
4180 dw2_output_call_site_table (int cs_format, int section)
4181 {
4182 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
4183 int i;
4184 const char *begin;
4185
4186 if (section == 0)
4187 begin = current_function_func_begin_label;
4188 else if (first_function_block_is_cold)
4189 begin = crtl->subsections.hot_section_label;
4190 else
4191 begin = crtl->subsections.cold_section_label;
4192
4193 for (i = 0; i < n; ++i)
4194 {
4195 struct call_site_record_d *cs =
4196 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
4197 char reg_start_lab[32];
4198 char reg_end_lab[32];
4199 char landing_pad_lab[32];
4200
4201 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
4202 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
4203
4204 if (cs->landing_pad)
4205 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
4206 CODE_LABEL_NUMBER (cs->landing_pad));
4207
4208 /* ??? Perhaps use insn length scaling if the assembler supports
4209 generic arithmetic. */
4210 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
4211 data4 if the function is small enough. */
4212 if (cs_format == DW_EH_PE_uleb128)
4213 {
4214 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
4215 "region %d start", i);
4216 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
4217 "length");
4218 if (cs->landing_pad)
4219 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
4220 "landing pad");
4221 else
4222 dw2_asm_output_data_uleb128 (0, "landing pad");
4223 }
4224 else
4225 {
4226 dw2_asm_output_delta (4, reg_start_lab, begin,
4227 "region %d start", i);
4228 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
4229 if (cs->landing_pad)
4230 dw2_asm_output_delta (4, landing_pad_lab, begin,
4231 "landing pad");
4232 else
4233 dw2_asm_output_data (4, 0, "landing pad");
4234 }
4235 dw2_asm_output_data_uleb128 (cs->action, "action");
4236 }
4237
4238 call_site_base += n;
4239 }
4240
4241 static void
4242 sjlj_output_call_site_table (void)
4243 {
4244 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
4245 int i;
4246
4247 for (i = 0; i < n; ++i)
4248 {
4249 struct call_site_record_d *cs =
4250 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
4251
4252 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4253 "region %d landing pad", i);
4254 dw2_asm_output_data_uleb128 (cs->action, "action");
4255 }
4256
4257 call_site_base += n;
4258 }
4259
4260 #ifndef TARGET_UNWIND_INFO
4261 /* Switch to the section that should be used for exception tables. */
4262
4263 static void
4264 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4265 {
4266 section *s;
4267
4268 if (exception_section)
4269 s = exception_section;
4270 else
4271 {
4272 /* Compute the section and cache it into exception_section,
4273 unless it depends on the function name. */
4274 if (targetm.have_named_sections)
4275 {
4276 int flags;
4277
4278 if (EH_TABLES_CAN_BE_READ_ONLY)
4279 {
4280 int tt_format =
4281 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4282 flags = ((! flag_pic
4283 || ((tt_format & 0x70) != DW_EH_PE_absptr
4284 && (tt_format & 0x70) != DW_EH_PE_aligned))
4285 ? 0 : SECTION_WRITE);
4286 }
4287 else
4288 flags = SECTION_WRITE;
4289
4290 #ifdef HAVE_LD_EH_GC_SECTIONS
4291 if (flag_function_sections)
4292 {
4293 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4294 sprintf (section_name, ".gcc_except_table.%s", fnname);
4295 s = get_section (section_name, flags, NULL);
4296 free (section_name);
4297 }
4298 else
4299 #endif
4300 exception_section
4301 = s = get_section (".gcc_except_table", flags, NULL);
4302 }
4303 else
4304 exception_section
4305 = s = flag_pic ? data_section : readonly_data_section;
4306 }
4307
4308 switch_to_section (s);
4309 }
4310 #endif
4311
4312
4313 /* Output a reference from an exception table to the type_info object TYPE.
4314 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4315 the value. */
4316
4317 static void
4318 output_ttype (tree type, int tt_format, int tt_format_size)
4319 {
4320 rtx value;
4321 bool is_public = true;
4322
4323 if (type == NULL_TREE)
4324 value = const0_rtx;
4325 else
4326 {
4327 struct varpool_node *node;
4328
4329 type = lookup_type_for_runtime (type);
4330 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4331
4332 /* Let cgraph know that the rtti decl is used. Not all of the
4333 paths below go through assemble_integer, which would take
4334 care of this for us. */
4335 STRIP_NOPS (type);
4336 if (TREE_CODE (type) == ADDR_EXPR)
4337 {
4338 type = TREE_OPERAND (type, 0);
4339 if (TREE_CODE (type) == VAR_DECL)
4340 {
4341 node = varpool_node (type);
4342 if (node)
4343 varpool_mark_needed_node (node);
4344 is_public = TREE_PUBLIC (type);
4345 }
4346 }
4347 else
4348 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4349 }
4350
4351 /* Allow the target to override the type table entry format. */
4352 if (targetm.asm_out.ttype (value))
4353 return;
4354
4355 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4356 assemble_integer (value, tt_format_size,
4357 tt_format_size * BITS_PER_UNIT, 1);
4358 else
4359 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4360 }
4361
4362 static void
4363 output_one_function_exception_table (const char * ARG_UNUSED (fnname),
4364 int section)
4365 {
4366 int tt_format, cs_format, lp_format, i, n;
4367 #ifdef HAVE_AS_LEB128
4368 char ttype_label[32];
4369 char cs_after_size_label[32];
4370 char cs_end_label[32];
4371 #else
4372 int call_site_len;
4373 #endif
4374 int have_tt_data;
4375 int tt_format_size = 0;
4376
4377 #ifdef TARGET_UNWIND_INFO
4378 /* TODO: Move this into target file. */
4379 fputs ("\t.personality\t", asm_out_file);
4380 output_addr_const (asm_out_file, eh_personality_libfunc);
4381 fputs ("\n\t.handlerdata\n", asm_out_file);
4382 /* Note that varasm still thinks we're in the function's code section.
4383 The ".endp" directive that will immediately follow will take us back. */
4384 #else
4385 switch_to_exception_section (fnname);
4386 #endif
4387
4388 /* If the target wants a label to begin the table, emit it here. */
4389 targetm.asm_out.except_table_label (asm_out_file);
4390
4391 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4392 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4393
4394 /* Indicate the format of the @TType entries. */
4395 if (! have_tt_data)
4396 tt_format = DW_EH_PE_omit;
4397 else
4398 {
4399 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4400 #ifdef HAVE_AS_LEB128
4401 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
4402 section ? "LLSDATTC" : "LLSDATT",
4403 current_function_funcdef_no);
4404 #endif
4405 tt_format_size = size_of_encoded_value (tt_format);
4406
4407 assemble_align (tt_format_size * BITS_PER_UNIT);
4408 }
4409
4410 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
4411 current_function_funcdef_no);
4412
4413 /* The LSDA header. */
4414
4415 /* Indicate the format of the landing pad start pointer. An omitted
4416 field implies @LPStart == @Start. */
4417 /* Currently we always put @LPStart == @Start. This field would
4418 be most useful in moving the landing pads completely out of
4419 line to another section, but it could also be used to minimize
4420 the size of uleb128 landing pad offsets. */
4421 lp_format = DW_EH_PE_omit;
4422 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4423 eh_data_format_name (lp_format));
4424
4425 /* @LPStart pointer would go here. */
4426
4427 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4428 eh_data_format_name (tt_format));
4429
4430 #ifndef HAVE_AS_LEB128
4431 if (USING_SJLJ_EXCEPTIONS)
4432 call_site_len = sjlj_size_of_call_site_table ();
4433 else
4434 call_site_len = dw2_size_of_call_site_table (section);
4435 #endif
4436
4437 /* A pc-relative 4-byte displacement to the @TType data. */
4438 if (have_tt_data)
4439 {
4440 #ifdef HAVE_AS_LEB128
4441 char ttype_after_disp_label[32];
4442 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
4443 section ? "LLSDATTDC" : "LLSDATTD",
4444 current_function_funcdef_no);
4445 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4446 "@TType base offset");
4447 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4448 #else
4449 /* Ug. Alignment queers things. */
4450 unsigned int before_disp, after_disp, last_disp, disp;
4451
4452 before_disp = 1 + 1;
4453 after_disp = (1 + size_of_uleb128 (call_site_len)
4454 + call_site_len
4455 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4456 + (VEC_length (tree, crtl->eh.ttype_data)
4457 * tt_format_size));
4458
4459 disp = after_disp;
4460 do
4461 {
4462 unsigned int disp_size, pad;
4463
4464 last_disp = disp;
4465 disp_size = size_of_uleb128 (disp);
4466 pad = before_disp + disp_size + after_disp;
4467 if (pad % tt_format_size)
4468 pad = tt_format_size - (pad % tt_format_size);
4469 else
4470 pad = 0;
4471 disp = after_disp + pad;
4472 }
4473 while (disp != last_disp);
4474
4475 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4476 #endif
4477 }
4478
4479 /* Indicate the format of the call-site offsets. */
4480 #ifdef HAVE_AS_LEB128
4481 cs_format = DW_EH_PE_uleb128;
4482 #else
4483 cs_format = DW_EH_PE_udata4;
4484 #endif
4485 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4486 eh_data_format_name (cs_format));
4487
4488 #ifdef HAVE_AS_LEB128
4489 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
4490 section ? "LLSDACSBC" : "LLSDACSB",
4491 current_function_funcdef_no);
4492 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
4493 section ? "LLSDACSEC" : "LLSDACSE",
4494 current_function_funcdef_no);
4495 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4496 "Call-site table length");
4497 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4498 if (USING_SJLJ_EXCEPTIONS)
4499 sjlj_output_call_site_table ();
4500 else
4501 dw2_output_call_site_table (cs_format, section);
4502 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4503 #else
4504 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
4505 if (USING_SJLJ_EXCEPTIONS)
4506 sjlj_output_call_site_table ();
4507 else
4508 dw2_output_call_site_table (cs_format, section);
4509 #endif
4510
4511 /* ??? Decode and interpret the data for flag_debug_asm. */
4512 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4513 for (i = 0; i < n; ++i)
4514 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4515 (i ? NULL : "Action record table"));
4516
4517 if (have_tt_data)
4518 assemble_align (tt_format_size * BITS_PER_UNIT);
4519
4520 i = VEC_length (tree, crtl->eh.ttype_data);
4521 while (i-- > 0)
4522 {
4523 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4524 output_ttype (type, tt_format, tt_format_size);
4525 }
4526
4527 #ifdef HAVE_AS_LEB128
4528 if (have_tt_data)
4529 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4530 #endif
4531
4532 /* ??? Decode and interpret the data for flag_debug_asm. */
4533 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4534 for (i = 0; i < n; ++i)
4535 {
4536 if (targetm.arm_eabi_unwinder)
4537 {
4538 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4539 output_ttype (type, tt_format, tt_format_size);
4540 }
4541 else
4542 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4543 (i ? NULL : "Exception specification table"));
4544 }
4545 }
4546
4547 void
4548 output_function_exception_table (const char * ARG_UNUSED (fnname))
4549 {
4550 /* Not all functions need anything. */
4551 if (! crtl->uses_eh_lsda)
4552 return;
4553
4554 if (eh_personality_libfunc)
4555 assemble_external_libcall (eh_personality_libfunc);
4556
4557 output_one_function_exception_table (fnname, 0);
4558 if (crtl->eh.call_site_record[1] != NULL)
4559 output_one_function_exception_table (fnname, 1);
4560
4561 switch_to_section (current_function_section ());
4562 }
4563
4564 void
4565 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4566 {
4567 fun->eh->throw_stmt_table = table;
4568 }
4569
4570 htab_t
4571 get_eh_throw_stmt_table (struct function *fun)
4572 {
4573 return fun->eh->throw_stmt_table;
4574 }
4575
4576 /* Dump EH information to OUT. */
4577
4578 void
4579 dump_eh_tree (FILE * out, struct function *fun)
4580 {
4581 struct eh_region_d *i;
4582 int depth = 0;
4583 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4584 "allowed_exceptions", "must_not_throw",
4585 "throw"
4586 };
4587
4588 i = fun->eh->region_tree;
4589 if (!i)
4590 return;
4591
4592 fprintf (out, "Eh tree:\n");
4593 while (1)
4594 {
4595 fprintf (out, " %*s %i %s", depth * 2, "",
4596 i->region_number, type_name[(int) i->type]);
4597 if (i->tree_label)
4598 {
4599 fprintf (out, " tree_label:");
4600 print_generic_expr (out, i->tree_label, 0);
4601 }
4602 if (i->label)
4603 fprintf (out, " label:%i", INSN_UID (i->label));
4604 if (i->landing_pad)
4605 {
4606 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4607 if (NOTE_P (i->landing_pad))
4608 fprintf (out, " (deleted)");
4609 }
4610 if (i->post_landing_pad)
4611 {
4612 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4613 if (NOTE_P (i->post_landing_pad))
4614 fprintf (out, " (deleted)");
4615 }
4616 if (i->resume)
4617 {
4618 rtx resume_list = i->resume;
4619 fprintf (out, " resume:");
4620 while (GET_CODE (resume_list) == INSN_LIST)
4621 {
4622 fprintf (out, "%i,", INSN_UID (XEXP (resume_list, 0)));
4623 if (NOTE_P (XEXP (resume_list, 0)))
4624 fprintf (out, " (deleted)");
4625 resume_list = XEXP (resume_list, 1);
4626 }
4627 fprintf (out, " resume:%i", INSN_UID (i->resume));
4628 if (NOTE_P (i->resume))
4629 fprintf (out, " (deleted)");
4630 }
4631 if (i->may_contain_throw)
4632 fprintf (out, " may_contain_throw");
4633 switch (i->type)
4634 {
4635 case ERT_CLEANUP:
4636 break;
4637
4638 case ERT_TRY:
4639 {
4640 struct eh_region_d *c;
4641 fprintf (out, " catch regions:");
4642 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4643 fprintf (out, " %i", c->region_number);
4644 }
4645 break;
4646
4647 case ERT_CATCH:
4648 if (i->u.eh_catch.prev_catch)
4649 fprintf (out, " prev: %i",
4650 i->u.eh_catch.prev_catch->region_number);
4651 if (i->u.eh_catch.next_catch)
4652 fprintf (out, " next %i",
4653 i->u.eh_catch.next_catch->region_number);
4654 fprintf (out, " type:");
4655 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4656 break;
4657
4658 case ERT_ALLOWED_EXCEPTIONS:
4659 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4660 print_generic_expr (out, i->u.allowed.type_list, 0);
4661 break;
4662
4663 case ERT_THROW:
4664 fprintf (out, " type:");
4665 print_generic_expr (out, i->u.eh_throw.type, 0);
4666 break;
4667
4668 case ERT_MUST_NOT_THROW:
4669 break;
4670
4671 case ERT_UNKNOWN:
4672 break;
4673 }
4674 if (i->aka)
4675 {
4676 fprintf (out, " also known as:");
4677 dump_bitmap (out, i->aka);
4678 }
4679 else
4680 fprintf (out, "\n");
4681 /* If there are sub-regions, process them. */
4682 if (i->inner)
4683 i = i->inner, depth++;
4684 /* If there are peers, process them. */
4685 else if (i->next_peer)
4686 i = i->next_peer;
4687 /* Otherwise, step back up the tree to the next peer. */
4688 else
4689 {
4690 do
4691 {
4692 i = i->outer;
4693 depth--;
4694 if (i == NULL)
4695 return;
4696 }
4697 while (i->next_peer == NULL);
4698 i = i->next_peer;
4699 }
4700 }
4701 }
4702
4703 /* Dump the EH tree for FN on stderr. */
4704
4705 void
4706 debug_eh_tree (struct function *fn)
4707 {
4708 dump_eh_tree (stderr, fn);
4709 }
4710
4711
4712 /* Verify EH region invariants. */
4713
4714 static bool
4715 verify_eh_region (struct eh_region_d *region)
4716 {
4717 bool found = false;
4718 if (!region)
4719 return false;
4720 switch (region->type)
4721 {
4722 case ERT_TRY:
4723 {
4724 struct eh_region_d *c, *prev = NULL;
4725 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4726 {
4727 error ("Try region %i has wrong rh_catch pointer to %i",
4728 region->region_number,
4729 region->u.eh_try.eh_catch->region_number);
4730 found = true;
4731 }
4732 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4733 {
4734 if (c->outer != region->outer)
4735 {
4736 error
4737 ("Catch region %i has different outer region than try region %i",
4738 c->region_number, region->region_number);
4739 found = true;
4740 }
4741 if (c->u.eh_catch.prev_catch != prev)
4742 {
4743 error ("Catch region %i has corrupted catchlist",
4744 c->region_number);
4745 found = true;
4746 }
4747 prev = c;
4748 }
4749 if (prev != region->u.eh_try.last_catch)
4750 {
4751 error
4752 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4753 region->region_number,
4754 region->u.eh_try.last_catch->region_number,
4755 prev->region_number);
4756 found = true;
4757 }
4758 }
4759 break;
4760 case ERT_CATCH:
4761 if (!region->u.eh_catch.prev_catch
4762 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4763 {
4764 error ("Catch region %i should be followed by try", region->region_number);
4765 found = true;
4766 }
4767 break;
4768 case ERT_CLEANUP:
4769 case ERT_ALLOWED_EXCEPTIONS:
4770 case ERT_MUST_NOT_THROW:
4771 case ERT_THROW:
4772 break;
4773 case ERT_UNKNOWN:
4774 gcc_unreachable ();
4775 }
4776 for (region = region->inner; region; region = region->next_peer)
4777 found |= verify_eh_region (region);
4778 return found;
4779 }
4780
4781 /* Verify invariants on EH datastructures. */
4782
4783 void
4784 verify_eh_tree (struct function *fun)
4785 {
4786 struct eh_region_d *i, *outer = NULL;
4787 bool err = false;
4788 int nvisited = 0;
4789 int count = 0;
4790 int j;
4791 int depth = 0;
4792
4793 if (!fun->eh->region_tree)
4794 return;
4795 for (j = fun->eh->last_region_number; j > 0; --j)
4796 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4797 {
4798 if (i->region_number == j)
4799 count++;
4800 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4801 {
4802 error ("region_array is corrupted for region %i",
4803 i->region_number);
4804 err = true;
4805 }
4806 }
4807 i = fun->eh->region_tree;
4808
4809 while (1)
4810 {
4811 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4812 {
4813 error ("region_array is corrupted for region %i", i->region_number);
4814 err = true;
4815 }
4816 if (i->outer != outer)
4817 {
4818 error ("outer block of region %i is wrong", i->region_number);
4819 err = true;
4820 }
4821 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4822 {
4823 error
4824 ("region %i may contain throw and is contained in region that may not",
4825 i->region_number);
4826 err = true;
4827 }
4828 if (depth < 0)
4829 {
4830 error ("negative nesting depth of region %i", i->region_number);
4831 err = true;
4832 }
4833 nvisited++;
4834 /* If there are sub-regions, process them. */
4835 if (i->inner)
4836 outer = i, i = i->inner, depth++;
4837 /* If there are peers, process them. */
4838 else if (i->next_peer)
4839 i = i->next_peer;
4840 /* Otherwise, step back up the tree to the next peer. */
4841 else
4842 {
4843 do
4844 {
4845 i = i->outer;
4846 depth--;
4847 if (i == NULL)
4848 {
4849 if (depth != -1)
4850 {
4851 error ("tree list ends on depth %i", depth + 1);
4852 err = true;
4853 }
4854 if (count != nvisited)
4855 {
4856 error ("array does not match the region tree");
4857 err = true;
4858 }
4859 if (!err)
4860 for (i = fun->eh->region_tree; i; i = i->next_peer)
4861 err |= verify_eh_region (i);
4862
4863 if (err)
4864 {
4865 dump_eh_tree (stderr, fun);
4866 internal_error ("verify_eh_tree failed");
4867 }
4868 return;
4869 }
4870 outer = i->outer;
4871 }
4872 while (i->next_peer == NULL);
4873 i = i->next_peer;
4874 }
4875 }
4876 }
4877
4878 /* Initialize unwind_resume_libfunc. */
4879
4880 void
4881 default_init_unwind_resume_libfunc (void)
4882 {
4883 /* The default c++ routines aren't actually c++ specific, so use those. */
4884 unwind_resume_libfunc =
4885 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4886 : "_Unwind_Resume");
4887 }
4888
4889 \f
4890 static bool
4891 gate_handle_eh (void)
4892 {
4893 return doing_eh (0);
4894 }
4895
4896 /* Complete generation of exception handling code. */
4897 static unsigned int
4898 rest_of_handle_eh (void)
4899 {
4900 finish_eh_generation ();
4901 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4902 return 0;
4903 }
4904
4905 struct rtl_opt_pass pass_rtl_eh =
4906 {
4907 {
4908 RTL_PASS,
4909 "eh", /* name */
4910 gate_handle_eh, /* gate */
4911 rest_of_handle_eh, /* execute */
4912 NULL, /* sub */
4913 NULL, /* next */
4914 0, /* static_pass_number */
4915 TV_JUMP, /* tv_id */
4916 0, /* properties_required */
4917 0, /* properties_provided */
4918 0, /* properties_destroyed */
4919 0, /* todo_flags_start */
4920 TODO_dump_func /* todo_flags_finish */
4921 }
4922 };
4923
4924 #include "gt-except.h"