cgraph.c (dump_cgraph_node): Dump size/time/benefit.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
81
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
87
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
91
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
94
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
97
98 /* A hash table of label to region number. */
99
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region *region;
103 };
104
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
108
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
116 \f
117
118 struct GTY(()) call_site_record
119 {
120 rtx landing_pad;
121 int action;
122 };
123 \f
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
126
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
137
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
146
147 static void remove_eh_handler (struct eh_region *);
148 static void remove_eh_handler_and_replace (struct eh_region *,
149 struct eh_region *, bool);
150
151 /* The return value of reachable_next_level. */
152 enum reachable_code
153 {
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
162 };
163
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region *, tree,
166 struct reachable_info *, bool);
167
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region *);
172 static int add_call_site (rtx, int);
173
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
182
183 \f
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
187
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
190
191 int
192 doing_eh (int do_warn)
193 {
194 if (! flag_exceptions)
195 {
196 static int warned = 0;
197 if (! warned && do_warn)
198 {
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
201 }
202 return 0;
203 }
204 return 1;
205 }
206
207 \f
208 void
209 init_eh (void)
210 {
211 if (! flag_exceptions)
212 return;
213
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
215
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
219 {
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
221
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
223
224 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
225 build_pointer_type (sjlj_fc_type_node));
226 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
227
228 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
229 integer_type_node);
230 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
231
232 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
233 tmp = build_array_type (lang_hooks.types.type_for_mode
234 (targetm.unwind_word_mode (), 1),
235 tmp);
236 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
237 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
238
239 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
240 ptr_type_node);
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
242
243 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
246
247 #ifdef DONT_USE_BUILTIN_SETJMP
248 #ifdef JMP_BUF_SIZE
249 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
250 #else
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
256 #endif
257 #else
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
260 #endif
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
268 DECL_USER_ALIGN (f_jbuf) = 1;
269 #endif
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
271
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
273 TREE_CHAIN (f_prev) = f_cs;
274 TREE_CHAIN (f_cs) = f_data;
275 TREE_CHAIN (f_data) = f_per;
276 TREE_CHAIN (f_per) = f_lsda;
277 TREE_CHAIN (f_lsda) = f_jbuf;
278
279 layout_type (sjlj_fc_type_node);
280
281 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */
283 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
286 sjlj_fc_data_ofs
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
292 sjlj_fc_lsda_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
295 sjlj_fc_jbuf_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
298 }
299 }
300
301 void
302 init_eh_for_function (void)
303 {
304 cfun->eh = GGC_CNEW (struct eh_status);
305 }
306 \f
307 /* Routines to generate the exception tree somewhat directly.
308 These are used from tree-eh.c when processing exception related
309 nodes during tree optimization. */
310
311 static struct eh_region *
312 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
313 {
314 struct eh_region *new_eh;
315
316 #ifdef ENABLE_CHECKING
317 gcc_assert (doing_eh (0));
318 #endif
319
320 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = GGC_CNEW (struct eh_region);
322 new_eh->type = type;
323 new_eh->outer = outer;
324 if (outer)
325 {
326 new_eh->next_peer = outer->inner;
327 outer->inner = new_eh;
328 }
329 else
330 {
331 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh;
333 }
334
335 new_eh->region_number = ++cfun->eh->last_region_number;
336
337 return new_eh;
338 }
339
340 struct eh_region *
341 gen_eh_region_cleanup (struct eh_region *outer)
342 {
343 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
344 return cleanup;
345 }
346
347 struct eh_region *
348 gen_eh_region_try (struct eh_region *outer)
349 {
350 return gen_eh_region (ERT_TRY, outer);
351 }
352
353 struct eh_region *
354 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
355 {
356 struct eh_region *c, *l;
357 tree type_list, type_node;
358
359 /* Ensure to always end up with a type list to normalize further
360 processing, then register each type against the runtime types map. */
361 type_list = type_or_list;
362 if (type_or_list)
363 {
364 if (TREE_CODE (type_or_list) != TREE_LIST)
365 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
366
367 type_node = type_list;
368 for (; type_node; type_node = TREE_CHAIN (type_node))
369 add_type_for_runtime (TREE_VALUE (type_node));
370 }
371
372 c = gen_eh_region (ERT_CATCH, t->outer);
373 c->u.eh_catch.type_list = type_list;
374 l = t->u.eh_try.last_catch;
375 c->u.eh_catch.prev_catch = l;
376 if (l)
377 l->u.eh_catch.next_catch = c;
378 else
379 t->u.eh_try.eh_catch = c;
380 t->u.eh_try.last_catch = c;
381
382 return c;
383 }
384
385 struct eh_region *
386 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
387 {
388 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
389 region->u.allowed.type_list = allowed;
390
391 for (; allowed ; allowed = TREE_CHAIN (allowed))
392 add_type_for_runtime (TREE_VALUE (allowed));
393
394 return region;
395 }
396
397 struct eh_region *
398 gen_eh_region_must_not_throw (struct eh_region *outer)
399 {
400 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
401 }
402
403 int
404 get_eh_region_number (struct eh_region *region)
405 {
406 return region->region_number;
407 }
408
409 bool
410 get_eh_region_may_contain_throw (struct eh_region *region)
411 {
412 return region->may_contain_throw;
413 }
414
415 tree
416 get_eh_region_tree_label (struct eh_region *region)
417 {
418 return region->tree_label;
419 }
420
421 tree
422 get_eh_region_no_tree_label (int region)
423 {
424 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
425 }
426
427 void
428 set_eh_region_tree_label (struct eh_region *region, tree lab)
429 {
430 region->tree_label = lab;
431 }
432 \f
433 void
434 expand_resx_expr (tree exp)
435 {
436 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
437 struct eh_region *reg = VEC_index (eh_region,
438 cfun->eh->region_array, region_nr);
439
440 gcc_assert (!reg->resume);
441 do_pending_stack_adjust ();
442 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
443 emit_barrier ();
444 }
445
446 /* Note that the current EH region (if any) may contain a throw, or a
447 call to a function which itself may contain a throw. */
448
449 void
450 note_eh_region_may_contain_throw (struct eh_region *region)
451 {
452 while (region && !region->may_contain_throw)
453 {
454 region->may_contain_throw = 1;
455 region = region->outer;
456 }
457 }
458
459
460 /* Return an rtl expression for a pointer to the exception object
461 within a handler. */
462
463 rtx
464 get_exception_pointer (void)
465 {
466 if (! crtl->eh.exc_ptr)
467 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
468 return crtl->eh.exc_ptr;
469 }
470
471 /* Return an rtl expression for the exception dispatch filter
472 within a handler. */
473
474 rtx
475 get_exception_filter (void)
476 {
477 if (! crtl->eh.filter)
478 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
479 return crtl->eh.filter;
480 }
481 \f
482 /* This section is for the exception handling specific optimization pass. */
483
484 /* Random access the exception region tree. */
485
486 void
487 collect_eh_region_array (void)
488 {
489 struct eh_region *i;
490
491 i = cfun->eh->region_tree;
492 if (! i)
493 return;
494
495 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
496 cfun->eh->last_region_number + 1);
497 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
498
499 while (1)
500 {
501 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
502
503 /* If there are sub-regions, process them. */
504 if (i->inner)
505 i = i->inner;
506 /* If there are peers, process them. */
507 else if (i->next_peer)
508 i = i->next_peer;
509 /* Otherwise, step back up the tree to the next peer. */
510 else
511 {
512 do {
513 i = i->outer;
514 if (i == NULL)
515 return;
516 } while (i->next_peer == NULL);
517 i = i->next_peer;
518 }
519 }
520 }
521
522 /* R is MUST_NOT_THROW region that is not reachable via local
523 RESX instructions. It still must be kept in the tree in case runtime
524 can unwind through it, or we will eliminate out terminate call
525 runtime would do otherwise. Return TRUE if R contains throwing statements
526 or some of the exceptions in inner regions can be unwound up to R.
527
528 CONTAINS_STMT is bitmap of all regions that contains some throwing
529 statements.
530
531 Function looks O(^3) at first sight. In fact the function is called at most
532 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
533 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
534 the outer loop examines every region at most once. The inner loop
535 is doing unwinding from the throwing statement same way as we do during
536 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
537 of CFG. In practice Eh trees are wide, not deep, so this is not
538 a problem. */
539
540 static bool
541 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
542 {
543 struct eh_region *i = r->inner;
544 unsigned n;
545 bitmap_iterator bi;
546
547 if (TEST_BIT (contains_stmt, r->region_number))
548 return true;
549 if (r->aka)
550 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
551 if (TEST_BIT (contains_stmt, n))
552 return true;
553 if (!i)
554 return false;
555 while (1)
556 {
557 /* It is pointless to look into MUST_NOT_THROW
558 or dive into subregions. They never unwind up. */
559 if (i->type != ERT_MUST_NOT_THROW)
560 {
561 bool found = TEST_BIT (contains_stmt, i->region_number);
562 if (!found)
563 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
564 if (TEST_BIT (contains_stmt, n))
565 {
566 found = true;
567 break;
568 }
569 /* We have nested region that contains throwing statement.
570 See if resuming might lead up to the resx or we get locally
571 caught sooner. If we get locally caught sooner, we either
572 know region R is not reachable or it would have direct edge
573 from the EH resx and thus consider region reachable at
574 firest place. */
575 if (found)
576 {
577 struct eh_region *i1 = i;
578 tree type_thrown = NULL_TREE;
579
580 if (i1->type == ERT_THROW)
581 {
582 type_thrown = i1->u.eh_throw.type;
583 i1 = i1->outer;
584 }
585 for (; i1 != r; i1 = i1->outer)
586 if (reachable_next_level (i1, type_thrown, NULL,
587 false) >= RNL_CAUGHT)
588 break;
589 if (i1 == r)
590 return true;
591 }
592 }
593 /* If there are sub-regions, process them. */
594 if (i->type != ERT_MUST_NOT_THROW && i->inner)
595 i = i->inner;
596 /* If there are peers, process them. */
597 else if (i->next_peer)
598 i = i->next_peer;
599 /* Otherwise, step back up the tree to the next peer. */
600 else
601 {
602 do
603 {
604 i = i->outer;
605 if (i == r)
606 return false;
607 }
608 while (i->next_peer == NULL);
609 i = i->next_peer;
610 }
611 }
612 }
613
614 /* Bring region R to the root of tree. */
615
616 static void
617 bring_to_root (struct eh_region *r)
618 {
619 struct eh_region **pp;
620 struct eh_region *outer = r->outer;
621 if (!r->outer)
622 return;
623 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
624 continue;
625 *pp = r->next_peer;
626 r->outer = NULL;
627 r->next_peer = cfun->eh->region_tree;
628 cfun->eh->region_tree = r;
629 }
630
631 /* Return true if region R2 can be replaced by R1. */
632
633 static bool
634 eh_region_replaceable_by_p (const struct eh_region *r1,
635 const struct eh_region *r2)
636 {
637 /* Regions are semantically same if they are of same type,
638 have same label and type. */
639 if (r1->type != r2->type)
640 return false;
641 if (r1->tree_label != r2->tree_label)
642 return false;
643
644 /* Verify that also region type dependent data are the same. */
645 switch (r1->type)
646 {
647 case ERT_MUST_NOT_THROW:
648 case ERT_CLEANUP:
649 break;
650 case ERT_TRY:
651 {
652 struct eh_region *c1, *c2;
653 for (c1 = r1->u.eh_try.eh_catch,
654 c2 = r2->u.eh_try.eh_catch;
655 c1 && c2;
656 c1 = c1->u.eh_catch.next_catch,
657 c2 = c2->u.eh_catch.next_catch)
658 if (!eh_region_replaceable_by_p (c1, c2))
659 return false;
660 if (c1 || c2)
661 return false;
662 }
663 break;
664 case ERT_CATCH:
665 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
666 return false;
667 if (!list_equal_p (r1->u.eh_catch.filter_list,
668 r2->u.eh_catch.filter_list))
669 return false;
670 break;
671 case ERT_ALLOWED_EXCEPTIONS:
672 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
673 return false;
674 if (r1->u.allowed.filter != r2->u.allowed.filter)
675 return false;
676 break;
677 case ERT_THROW:
678 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
679 return false;
680 break;
681 default:
682 gcc_unreachable ();
683 }
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
686 r2->region_number);
687 return true;
688 }
689
690 /* Replace region R2 by R1. */
691
692 static void
693 replace_region (struct eh_region *r1, struct eh_region *r2)
694 {
695 struct eh_region *next1 = r1->u.eh_try.eh_catch;
696 struct eh_region *next2 = r2->u.eh_try.eh_catch;
697 bool is_try = r1->type == ERT_TRY;
698
699 gcc_assert (r1->type != ERT_CATCH);
700 remove_eh_handler_and_replace (r2, r1, false);
701 if (is_try)
702 {
703 while (next1)
704 {
705 r1 = next1;
706 r2 = next2;
707 gcc_assert (next1->type == ERT_CATCH);
708 gcc_assert (next2->type == ERT_CATCH);
709 next1 = next1->u.eh_catch.next_catch;
710 next2 = next2->u.eh_catch.next_catch;
711 remove_eh_handler_and_replace (r2, r1, false);
712 }
713 }
714 }
715
716 /* Return hash value of type list T. */
717
718 static hashval_t
719 hash_type_list (tree t)
720 {
721 hashval_t val = 0;
722 for (; t; t = TREE_CHAIN (t))
723 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
724 return val;
725 }
726
727 /* Hash EH regions so semantically same regions get same hash value. */
728
729 static hashval_t
730 hash_eh_region (const void *r)
731 {
732 const struct eh_region *region = (const struct eh_region *)r;
733 hashval_t val = region->type;
734
735 if (region->tree_label)
736 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
737 switch (region->type)
738 {
739 case ERT_MUST_NOT_THROW:
740 case ERT_CLEANUP:
741 break;
742 case ERT_TRY:
743 {
744 struct eh_region *c;
745 for (c = region->u.eh_try.eh_catch;
746 c; c = c->u.eh_catch.next_catch)
747 val = iterative_hash_hashval_t (hash_eh_region (c), val);
748 }
749 break;
750 case ERT_CATCH:
751 val = iterative_hash_hashval_t (hash_type_list
752 (region->u.eh_catch.type_list), val);
753 break;
754 case ERT_ALLOWED_EXCEPTIONS:
755 val = iterative_hash_hashval_t
756 (hash_type_list (region->u.allowed.type_list), val);
757 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
758 break;
759 case ERT_THROW:
760 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
761 break;
762 default:
763 gcc_unreachable ();
764 }
765 return val;
766 }
767
768 /* Return true if regions R1 and R2 are equal. */
769
770 static int
771 eh_regions_equal_p (const void *r1, const void *r2)
772 {
773 return eh_region_replaceable_by_p ((const struct eh_region *)r1,
774 (const struct eh_region *)r2);
775 }
776
777 /* Walk all peers of REGION and try to merge those regions
778 that are semantically equivalent. Look into subregions
779 recursively too. */
780
781 static bool
782 merge_peers (struct eh_region *region)
783 {
784 struct eh_region *r1, *r2, *outer = NULL, *next;
785 bool merged = false;
786 int num_regions = 0;
787 if (region)
788 outer = region->outer;
789 else
790 return false;
791
792 /* First see if there is inner region equivalent to region
793 in question. EH control flow is acyclic so we know we
794 can merge them. */
795 if (outer)
796 for (r1 = region; r1; r1 = next)
797 {
798 next = r1->next_peer;
799 if (r1->type == ERT_CATCH)
800 continue;
801 if (eh_region_replaceable_by_p (r1->outer, r1))
802 {
803 replace_region (r1->outer, r1);
804 merged = true;
805 }
806 else
807 num_regions ++;
808 }
809
810 /* Get new first region and try to match the peers
811 for equivalence. */
812 if (outer)
813 region = outer->inner;
814 else
815 region = cfun->eh->region_tree;
816
817 /* There are few regions to inspect:
818 N^2 loop matching each region with each region
819 will do the job well. */
820 if (num_regions < 10)
821 {
822 for (r1 = region; r1; r1 = r1->next_peer)
823 {
824 if (r1->type == ERT_CATCH)
825 continue;
826 for (r2 = r1->next_peer; r2; r2 = next)
827 {
828 next = r2->next_peer;
829 if (eh_region_replaceable_by_p (r1, r2))
830 {
831 replace_region (r1, r2);
832 merged = true;
833 }
834 }
835 }
836 }
837 /* Or use hashtable to avoid N^2 behaviour. */
838 else
839 {
840 htab_t hash;
841 hash = htab_create (num_regions, hash_eh_region,
842 eh_regions_equal_p, NULL);
843 for (r1 = region; r1; r1 = next)
844 {
845 void **slot;
846
847 next = r1->next_peer;
848 if (r1->type == ERT_CATCH)
849 continue;
850 slot = htab_find_slot (hash, r1, INSERT);
851 if (!*slot)
852 *slot = r1;
853 else
854 replace_region ((struct eh_region *)*slot, r1);
855 }
856 htab_delete (hash);
857 }
858 for (r1 = region; r1; r1 = r1->next_peer)
859 merged |= merge_peers (r1->inner);
860 return merged;
861 }
862
863 /* Remove all regions whose labels are not reachable.
864 REACHABLE is bitmap of all regions that are used by the function
865 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
866
867 void
868 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
869 {
870 int i;
871 struct eh_region *r;
872 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
873 struct eh_region *local_must_not_throw = NULL;
874 struct eh_region *first_must_not_throw = NULL;
875
876 for (i = cfun->eh->last_region_number; i > 0; --i)
877 {
878 r = VEC_index (eh_region, cfun->eh->region_array, i);
879 if (!r || r->region_number != i)
880 continue;
881 if (!TEST_BIT (reachable, i) && !r->resume)
882 {
883 bool kill_it = true;
884
885 r->tree_label = NULL;
886 switch (r->type)
887 {
888 case ERT_THROW:
889 /* Don't remove ERT_THROW regions if their outer region
890 is reachable. */
891 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
892 kill_it = false;
893 break;
894 case ERT_MUST_NOT_THROW:
895 /* MUST_NOT_THROW regions are implementable solely in the
896 runtime, but we need them when inlining function.
897
898 Keep them if outer region is not MUST_NOT_THROW a well
899 and if they contain some statement that might unwind through
900 them. */
901 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
902 && (!contains_stmt
903 || can_be_reached_by_runtime (contains_stmt, r)))
904 kill_it = false;
905 break;
906 case ERT_TRY:
907 {
908 /* TRY regions are reachable if any of its CATCH regions
909 are reachable. */
910 struct eh_region *c;
911 for (c = r->u.eh_try.eh_catch; c;
912 c = c->u.eh_catch.next_catch)
913 if (TEST_BIT (reachable, c->region_number))
914 {
915 kill_it = false;
916 break;
917 }
918 break;
919 }
920
921 default:
922 break;
923 }
924
925 if (kill_it)
926 {
927 if (dump_file)
928 fprintf (dump_file, "Removing unreachable eh region %i\n",
929 r->region_number);
930 remove_eh_handler (r);
931 }
932 else if (r->type == ERT_MUST_NOT_THROW)
933 {
934 if (!first_must_not_throw)
935 first_must_not_throw = r;
936 VEC_safe_push (eh_region, heap, must_not_throws, r);
937 }
938 }
939 else
940 if (r->type == ERT_MUST_NOT_THROW)
941 {
942 if (!local_must_not_throw)
943 local_must_not_throw = r;
944 if (r->outer)
945 VEC_safe_push (eh_region, heap, must_not_throws, r);
946 }
947 }
948
949 /* MUST_NOT_THROW regions without local handler are all the same; they
950 trigger terminate call in runtime.
951 MUST_NOT_THROW handled locally can differ in debug info associated
952 to std::terminate () call or if one is coming from Java and other
953 from C++ whether they call terminate or abort.
954
955 We merge all MUST_NOT_THROW regions handled by the run-time into one.
956 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
957 (since unwinding never continues to the outer region anyway).
958 If MUST_NOT_THROW with local handler is present in the tree, we use
959 that region to merge into, since it will remain in tree anyway;
960 otherwise we use first MUST_NOT_THROW.
961
962 Merging of locally handled regions needs changes to the CFG. Crossjumping
963 should take care of this, by looking at the actual code and
964 ensuring that the cleanup actions are really the same. */
965
966 if (local_must_not_throw)
967 first_must_not_throw = local_must_not_throw;
968
969 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
970 {
971 if (!r->label && !r->tree_label && r != first_must_not_throw)
972 {
973 if (dump_file)
974 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
975 r->region_number,
976 first_must_not_throw->region_number);
977 remove_eh_handler_and_replace (r, first_must_not_throw, false);
978 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
979 }
980 else
981 bring_to_root (r);
982 }
983 merge_peers (cfun->eh->region_tree);
984 #ifdef ENABLE_CHECKING
985 verify_eh_tree (cfun);
986 #endif
987 VEC_free (eh_region, heap, must_not_throws);
988 }
989
990 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
991 is identical to label. */
992
993 VEC (int, heap) *
994 label_to_region_map (void)
995 {
996 VEC (int, heap) * label_to_region = NULL;
997 int i;
998 int idx;
999
1000 VEC_safe_grow_cleared (int, heap, label_to_region,
1001 cfun->cfg->last_label_uid + 1);
1002 for (i = cfun->eh->last_region_number; i > 0; --i)
1003 {
1004 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1005 if (r && r->region_number == i
1006 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1007 {
1008 if ((idx = VEC_index (int, label_to_region,
1009 LABEL_DECL_UID (r->tree_label))) != 0)
1010 r->next_region_sharing_label =
1011 VEC_index (eh_region, cfun->eh->region_array, idx);
1012 else
1013 r->next_region_sharing_label = NULL;
1014 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1015 i);
1016 }
1017 }
1018 return label_to_region;
1019 }
1020
1021 /* Return number of EH regions. */
1022 int
1023 num_eh_regions (void)
1024 {
1025 return cfun->eh->last_region_number + 1;
1026 }
1027
1028 /* Return next region sharing same label as REGION. */
1029
1030 int
1031 get_next_region_sharing_label (int region)
1032 {
1033 struct eh_region *r;
1034 if (!region)
1035 return 0;
1036 r = VEC_index (eh_region, cfun->eh->region_array, region);
1037 if (!r || !r->next_region_sharing_label)
1038 return 0;
1039 return r->next_region_sharing_label->region_number;
1040 }
1041
1042 /* Return bitmap of all labels that are handlers of must not throw regions. */
1043
1044 bitmap
1045 must_not_throw_labels (void)
1046 {
1047 struct eh_region *i;
1048 bitmap labels = BITMAP_ALLOC (NULL);
1049
1050 i = cfun->eh->region_tree;
1051 if (! i)
1052 return labels;
1053
1054 while (1)
1055 {
1056 if (i->type == ERT_MUST_NOT_THROW && i->tree_label
1057 && LABEL_DECL_UID (i->tree_label) >= 0)
1058 bitmap_set_bit (labels, LABEL_DECL_UID (i->tree_label));
1059
1060 /* If there are sub-regions, process them. */
1061 if (i->inner)
1062 i = i->inner;
1063 /* If there are peers, process them. */
1064 else if (i->next_peer)
1065 i = i->next_peer;
1066 /* Otherwise, step back up the tree to the next peer. */
1067 else
1068 {
1069 do {
1070 i = i->outer;
1071 if (i == NULL)
1072 return labels;
1073 } while (i->next_peer == NULL);
1074 i = i->next_peer;
1075 }
1076 }
1077 }
1078
1079 /* Set up EH labels for RTL. */
1080
1081 void
1082 convert_from_eh_region_ranges (void)
1083 {
1084 int i, n = cfun->eh->last_region_number;
1085
1086 /* Most of the work is already done at the tree level. All we need to
1087 do is collect the rtl labels that correspond to the tree labels that
1088 collect the rtl labels that correspond to the tree labels
1089 we allocated earlier. */
1090 for (i = 1; i <= n; ++i)
1091 {
1092 struct eh_region *region;
1093
1094 region = VEC_index (eh_region, cfun->eh->region_array, i);
1095 if (region && region->tree_label)
1096 region->label = DECL_RTL_IF_SET (region->tree_label);
1097 }
1098 }
1099
1100 void
1101 find_exception_handler_labels (void)
1102 {
1103 int i;
1104
1105 if (cfun->eh->region_tree == NULL)
1106 return;
1107
1108 for (i = cfun->eh->last_region_number; i > 0; --i)
1109 {
1110 struct eh_region *region;
1111 rtx lab;
1112
1113 region = VEC_index (eh_region, cfun->eh->region_array, i);
1114 if (! region || region->region_number != i)
1115 continue;
1116 if (crtl->eh.built_landing_pads)
1117 lab = region->landing_pad;
1118 else
1119 lab = region->label;
1120 }
1121 }
1122
1123 /* Returns true if the current function has exception handling regions. */
1124
1125 bool
1126 current_function_has_exception_handlers (void)
1127 {
1128 int i;
1129
1130 for (i = cfun->eh->last_region_number; i > 0; --i)
1131 {
1132 struct eh_region *region;
1133
1134 region = VEC_index (eh_region, cfun->eh->region_array, i);
1135 if (region
1136 && region->region_number == i
1137 && region->type != ERT_THROW)
1138 return true;
1139 }
1140
1141 return false;
1142 }
1143 \f
1144 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1145 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1146
1147 static void
1148 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1149 {
1150 int i;
1151
1152 if (o->aka)
1153 {
1154 i = bitmap_first_set_bit (o->aka);
1155 if (i < *min)
1156 *min = i;
1157 i = bitmap_last_set_bit (o->aka);
1158 if (i > *max)
1159 *max = i;
1160 }
1161 if (o->region_number < *min)
1162 *min = o->region_number;
1163 if (o->region_number > *max)
1164 *max = o->region_number;
1165
1166 if (o->inner)
1167 {
1168 o = o->inner;
1169 duplicate_eh_regions_0 (o, min, max);
1170 while (o->next_peer)
1171 {
1172 o = o->next_peer;
1173 duplicate_eh_regions_0 (o, min, max);
1174 }
1175 }
1176 }
1177
1178 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1179 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1180 about the other internal pointers just yet, just the tree-like pointers. */
1181
1182 static eh_region
1183 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1184 {
1185 eh_region ret, n;
1186
1187 ret = n = GGC_NEW (struct eh_region);
1188
1189 *n = *old;
1190 n->outer = outer;
1191 n->next_peer = NULL;
1192 if (old->aka)
1193 {
1194 unsigned i;
1195 bitmap_iterator bi;
1196 n->aka = BITMAP_GGC_ALLOC ();
1197
1198 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1199 {
1200 bitmap_set_bit (n->aka, i + eh_offset);
1201 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1202 }
1203 }
1204
1205 n->region_number += eh_offset;
1206 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1207
1208 if (old->inner)
1209 {
1210 old = old->inner;
1211 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1212 while (old->next_peer)
1213 {
1214 old = old->next_peer;
1215 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1216 }
1217 }
1218
1219 return ret;
1220 }
1221
1222 /* Look for first outer region of R (or R itself) that is
1223 TRY region. Return NULL if none. */
1224
1225 static struct eh_region *
1226 find_prev_try (struct eh_region * r)
1227 {
1228 for (; r && r->type != ERT_TRY; r = r->outer)
1229 if (r->type == ERT_MUST_NOT_THROW
1230 || (r->type == ERT_ALLOWED_EXCEPTIONS
1231 && !r->u.allowed.type_list))
1232 {
1233 r = NULL;
1234 break;
1235 }
1236 return r;
1237 }
1238
1239 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1240 function and root the tree below OUTER_REGION. Remap labels using MAP
1241 callback. The special case of COPY_REGION of 0 means all regions. */
1242
1243 int
1244 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1245 void *data, int copy_region, int outer_region)
1246 {
1247 eh_region cur, outer, *splice;
1248 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1249 int num_regions;
1250
1251 if (!ifun->eh)
1252 return 0;
1253 #ifdef ENABLE_CHECKING
1254 verify_eh_tree (ifun);
1255 #endif
1256
1257 /* Find the range of region numbers to be copied. The interface we
1258 provide here mandates a single offset to find new number from old,
1259 which means we must look at the numbers present, instead of the
1260 count or something else. */
1261 if (copy_region > 0)
1262 {
1263 min_region = INT_MAX;
1264 max_region = 0;
1265
1266 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1267 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1268 }
1269 else
1270 {
1271 min_region = 1;
1272 max_region = ifun->eh->last_region_number;
1273 }
1274 num_regions = max_region - min_region + 1;
1275 cfun_last_region_number = cfun->eh->last_region_number;
1276 eh_offset = cfun_last_region_number + 1 - min_region;
1277
1278 /* If we've not yet created a region array, do so now. */
1279 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1280 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1281 cfun->eh->last_region_number + 1);
1282
1283 /* Locate the spot at which to insert the new tree. */
1284 if (outer_region > 0)
1285 {
1286 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1287 if (outer)
1288 splice = &outer->inner;
1289 else
1290 splice = &cfun->eh->region_tree;
1291 }
1292 else
1293 {
1294 outer = NULL;
1295 splice = &cfun->eh->region_tree;
1296 }
1297 while (*splice)
1298 splice = &(*splice)->next_peer;
1299
1300 if (!ifun->eh->region_tree)
1301 {
1302 if (outer)
1303 for (i = cfun_last_region_number + 1;
1304 i <= cfun->eh->last_region_number; i++)
1305 {
1306 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1307 if (outer->aka == NULL)
1308 outer->aka = BITMAP_GGC_ALLOC ();
1309 bitmap_set_bit (outer->aka, i);
1310 }
1311 return eh_offset;
1312 }
1313
1314 /* Copy all the regions in the subtree. */
1315 if (copy_region > 0)
1316 {
1317 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1318 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1319 }
1320 else
1321 {
1322 eh_region n;
1323
1324 cur = ifun->eh->region_tree;
1325 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1326 while (cur->next_peer)
1327 {
1328 cur = cur->next_peer;
1329 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1330 }
1331 }
1332
1333 /* Remap all the labels in the new regions. */
1334 for (i = cfun_last_region_number + 1;
1335 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1336 if (cur && cur->tree_label)
1337 cur->tree_label = map (cur->tree_label, data);
1338
1339 /* Remap all of the internal catch and cleanup linkages. Since we
1340 duplicate entire subtrees, all of the referenced regions will have
1341 been copied too. And since we renumbered them as a block, a simple
1342 bit of arithmetic finds us the index for the replacement region. */
1343 for (i = cfun_last_region_number + 1;
1344 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1345 {
1346 /* All removed EH that is toplevel in input function is now
1347 in outer EH of output function. */
1348 if (cur == NULL)
1349 {
1350 gcc_assert (VEC_index
1351 (eh_region, ifun->eh->region_array,
1352 i - eh_offset) == NULL);
1353 if (outer)
1354 {
1355 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1356 if (outer->aka == NULL)
1357 outer->aka = BITMAP_GGC_ALLOC ();
1358 bitmap_set_bit (outer->aka, i);
1359 }
1360 continue;
1361 }
1362 if (i != cur->region_number)
1363 continue;
1364
1365 #define REMAP(REG) \
1366 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1367 (REG)->region_number + eh_offset)
1368
1369 switch (cur->type)
1370 {
1371 case ERT_TRY:
1372 if (cur->u.eh_try.eh_catch)
1373 REMAP (cur->u.eh_try.eh_catch);
1374 if (cur->u.eh_try.last_catch)
1375 REMAP (cur->u.eh_try.last_catch);
1376 break;
1377
1378 case ERT_CATCH:
1379 if (cur->u.eh_catch.next_catch)
1380 REMAP (cur->u.eh_catch.next_catch);
1381 if (cur->u.eh_catch.prev_catch)
1382 REMAP (cur->u.eh_catch.prev_catch);
1383 break;
1384
1385 default:
1386 break;
1387 }
1388
1389 #undef REMAP
1390 }
1391 #ifdef ENABLE_CHECKING
1392 verify_eh_tree (cfun);
1393 #endif
1394
1395 return eh_offset;
1396 }
1397
1398 /* Return new copy of eh region OLD inside region NEW_OUTER.
1399 Do not care about updating the tree otherwise. */
1400
1401 static struct eh_region *
1402 copy_eh_region_1 (struct eh_region *old, struct eh_region *new_outer)
1403 {
1404 struct eh_region *new_eh = gen_eh_region (old->type, new_outer);
1405 new_eh->u = old->u;
1406 new_eh->tree_label = old->tree_label;
1407 new_eh->may_contain_throw = old->may_contain_throw;
1408 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1409 cfun->eh->last_region_number + 1);
1410 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1411 if (dump_file && (dump_flags & TDF_DETAILS))
1412 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1413 return new_eh;
1414 }
1415
1416 /* Return new copy of eh region OLD inside region NEW_OUTER.
1417
1418 Copy whole catch-try chain if neccesary. */
1419
1420 static struct eh_region *
1421 copy_eh_region (struct eh_region *old, struct eh_region *new_outer)
1422 {
1423 struct eh_region *r, *n, *old_try, *new_try, *ret = NULL;
1424 VEC(eh_region,heap) *catch_list = NULL;
1425
1426 if (old->type != ERT_CATCH)
1427 {
1428 gcc_assert (old->type != ERT_TRY);
1429 r = copy_eh_region_1 (old, new_outer);
1430 return r;
1431 }
1432
1433 /* Locate and copy corresponding TRY. */
1434 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1435 continue;
1436 gcc_assert (old_try->type == ERT_TRY);
1437 new_try = gen_eh_region_try (new_outer);
1438 new_try->tree_label = old_try->tree_label;
1439 new_try->may_contain_throw = old_try->may_contain_throw;
1440 if (dump_file && (dump_flags & TDF_DETAILS))
1441 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1442 old_try->region_number, new_try->region_number);
1443 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1444 cfun->eh->last_region_number + 1);
1445 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1446
1447 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1448 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1449 VEC_safe_push (eh_region, heap, catch_list, r);
1450
1451 while (VEC_length (eh_region, catch_list))
1452 {
1453 r = VEC_pop (eh_region, catch_list);
1454
1455 /* Duplicate CATCH. */
1456 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1457 n->tree_label = r->tree_label;
1458 n->may_contain_throw = r->may_contain_throw;
1459 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1460 cfun->eh->last_region_number + 1);
1461 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1462 n->tree_label = r->tree_label;
1463
1464 if (dump_file && (dump_flags & TDF_DETAILS))
1465 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1466 r->region_number, n->region_number);
1467 if (r == old)
1468 ret = n;
1469 }
1470 VEC_free (eh_region, heap, catch_list);
1471 gcc_assert (ret);
1472 return ret;
1473 }
1474
1475 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1476
1477 static void
1478 push_reachable_handler (struct eh_region *region, void *data)
1479 {
1480 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1481 VEC_safe_push (eh_region, heap, *trace, region);
1482 }
1483
1484 /* Redirect EH edge E that to NEW_DEST_LABEL.
1485 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1486 foreach_reachable_handler. */
1487
1488 struct eh_region *
1489 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1490 bool inlinable_call, int region_number)
1491 {
1492 struct eh_region *outer;
1493 struct eh_region *region;
1494 VEC (eh_region, heap) * trace = NULL;
1495 int i;
1496 int start_here = -1;
1497 basic_block old_bb = e->dest;
1498 struct eh_region *old, *r = NULL;
1499 bool update_inplace = true;
1500 edge_iterator ei;
1501 edge e2;
1502
1503 /* If there is only one EH edge, we don't need to duplicate;
1504 just update labels in the tree. */
1505 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1506 if ((e2->flags & EDGE_EH) && e2 != e)
1507 {
1508 update_inplace = false;
1509 break;
1510 }
1511
1512 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1513 gcc_assert (region);
1514
1515 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1516 push_reachable_handler, &trace);
1517 if (dump_file && (dump_flags & TDF_DETAILS))
1518 {
1519 dump_eh_tree (dump_file, cfun);
1520 fprintf (dump_file, "Trace: ");
1521 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1522 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1523 fprintf (dump_file, " inplace: %i\n", update_inplace);
1524 }
1525
1526 if (update_inplace)
1527 {
1528 /* In easy route just walk trace and update all occurences of the label. */
1529 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1530 {
1531 r = VEC_index (eh_region, trace, i);
1532 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1533 {
1534 r->tree_label = new_dest_label;
1535 if (dump_file && (dump_flags & TDF_DETAILS))
1536 fprintf (dump_file, "Updating label for region %i\n",
1537 r->region_number);
1538 }
1539 }
1540 r = region;
1541 }
1542 else
1543 {
1544 /* Now look for outermost handler that reffers to the basic block in question.
1545 We start our duplication there. */
1546 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1547 {
1548 r = VEC_index (eh_region, trace, i);
1549 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1550 start_here = i;
1551 }
1552 outer = VEC_index (eh_region, trace, start_here)->outer;
1553 gcc_assert (start_here >= 0);
1554
1555 /* And now do the dirty job! */
1556 for (i = start_here; i >= 0; i--)
1557 {
1558 old = VEC_index (eh_region, trace, i);
1559 gcc_assert (!outer || old->outer != outer->outer);
1560
1561 /* Copy region and update label. */
1562 r = copy_eh_region (old, outer);
1563 VEC_replace (eh_region, trace, i, r);
1564 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1565 {
1566 r->tree_label = new_dest_label;
1567 if (dump_file && (dump_flags & TDF_DETAILS))
1568 fprintf (dump_file, "Updating label for region %i\n",
1569 r->region_number);
1570 }
1571
1572 /* We got into copying CATCH. copy_eh_region already did job
1573 of copying all catch blocks corresponding to the try. Now
1574 we need to update labels in all of them and see trace.
1575
1576 We continue nesting into TRY region corresponding to CATCH:
1577 When duplicating EH tree contaiing subregions of CATCH,
1578 the CATCH region itself is never inserted to trace so we
1579 never get here anyway. */
1580 if (r->type == ERT_CATCH)
1581 {
1582 /* Walk other catch regions we copied and update labels as needed. */
1583 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1584 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1585 {
1586 r->tree_label = new_dest_label;
1587 if (dump_file && (dump_flags & TDF_DETAILS))
1588 fprintf (dump_file, "Updating label for region %i\n",
1589 r->region_number);
1590 }
1591 gcc_assert (r->type == ERT_TRY);
1592
1593 /* Skip sibling catch regions from the trace.
1594 They are already updated. */
1595 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1596 {
1597 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1598 i--;
1599 }
1600 }
1601
1602 outer = r;
1603 }
1604
1605 if (is_resx || region->type == ERT_THROW)
1606 r = copy_eh_region (region, outer);
1607 }
1608
1609 VEC_free (eh_region, heap, trace);
1610 if (dump_file && (dump_flags & TDF_DETAILS))
1611 {
1612 dump_eh_tree (dump_file, cfun);
1613 fprintf (dump_file, "New region: %i\n", r->region_number);
1614 }
1615 return r;
1616 }
1617
1618 /* Return region number of region that is outer to both if REGION_A and
1619 REGION_B in IFUN. */
1620
1621 int
1622 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1623 {
1624 struct eh_region *rp_a, *rp_b;
1625 sbitmap b_outer;
1626
1627 gcc_assert (ifun->eh->last_region_number > 0);
1628 gcc_assert (ifun->eh->region_tree);
1629
1630 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1631 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1632 gcc_assert (rp_a != NULL);
1633 gcc_assert (rp_b != NULL);
1634
1635 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1636 sbitmap_zero (b_outer);
1637
1638 do
1639 {
1640 SET_BIT (b_outer, rp_b->region_number);
1641 rp_b = rp_b->outer;
1642 }
1643 while (rp_b);
1644
1645 do
1646 {
1647 if (TEST_BIT (b_outer, rp_a->region_number))
1648 {
1649 sbitmap_free (b_outer);
1650 return rp_a->region_number;
1651 }
1652 rp_a = rp_a->outer;
1653 }
1654 while (rp_a);
1655
1656 sbitmap_free (b_outer);
1657 return -1;
1658 }
1659 \f
1660 static int
1661 t2r_eq (const void *pentry, const void *pdata)
1662 {
1663 const_tree const entry = (const_tree) pentry;
1664 const_tree const data = (const_tree) pdata;
1665
1666 return TREE_PURPOSE (entry) == data;
1667 }
1668
1669 static hashval_t
1670 t2r_hash (const void *pentry)
1671 {
1672 const_tree const entry = (const_tree) pentry;
1673 return TREE_HASH (TREE_PURPOSE (entry));
1674 }
1675
1676 void
1677 add_type_for_runtime (tree type)
1678 {
1679 tree *slot;
1680
1681 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1682 TREE_HASH (type), INSERT);
1683 if (*slot == NULL)
1684 {
1685 tree runtime = (*lang_eh_runtime_type) (type);
1686 *slot = tree_cons (type, runtime, NULL_TREE);
1687 }
1688 }
1689
1690 tree
1691 lookup_type_for_runtime (tree type)
1692 {
1693 tree *slot;
1694
1695 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1696 TREE_HASH (type), NO_INSERT);
1697
1698 /* We should have always inserted the data earlier. */
1699 return TREE_VALUE (*slot);
1700 }
1701
1702 \f
1703 /* Represent an entry in @TTypes for either catch actions
1704 or exception filter actions. */
1705 struct GTY(()) ttypes_filter {
1706 tree t;
1707 int filter;
1708 };
1709
1710 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1711 (a tree) for a @TTypes type node we are thinking about adding. */
1712
1713 static int
1714 ttypes_filter_eq (const void *pentry, const void *pdata)
1715 {
1716 const struct ttypes_filter *const entry
1717 = (const struct ttypes_filter *) pentry;
1718 const_tree const data = (const_tree) pdata;
1719
1720 return entry->t == data;
1721 }
1722
1723 static hashval_t
1724 ttypes_filter_hash (const void *pentry)
1725 {
1726 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1727 return TREE_HASH (entry->t);
1728 }
1729
1730 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1731 exception specification list we are thinking about adding. */
1732 /* ??? Currently we use the type lists in the order given. Someone
1733 should put these in some canonical order. */
1734
1735 static int
1736 ehspec_filter_eq (const void *pentry, const void *pdata)
1737 {
1738 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1739 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1740
1741 return type_list_equal (entry->t, data->t);
1742 }
1743
1744 /* Hash function for exception specification lists. */
1745
1746 static hashval_t
1747 ehspec_filter_hash (const void *pentry)
1748 {
1749 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1750 hashval_t h = 0;
1751 tree list;
1752
1753 for (list = entry->t; list ; list = TREE_CHAIN (list))
1754 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1755 return h;
1756 }
1757
1758 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1759 to speed up the search. Return the filter value to be used. */
1760
1761 static int
1762 add_ttypes_entry (htab_t ttypes_hash, tree type)
1763 {
1764 struct ttypes_filter **slot, *n;
1765
1766 slot = (struct ttypes_filter **)
1767 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1768
1769 if ((n = *slot) == NULL)
1770 {
1771 /* Filter value is a 1 based table index. */
1772
1773 n = XNEW (struct ttypes_filter);
1774 n->t = type;
1775 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1776 *slot = n;
1777
1778 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1779 }
1780
1781 return n->filter;
1782 }
1783
1784 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1785 to speed up the search. Return the filter value to be used. */
1786
1787 static int
1788 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1789 {
1790 struct ttypes_filter **slot, *n;
1791 struct ttypes_filter dummy;
1792
1793 dummy.t = list;
1794 slot = (struct ttypes_filter **)
1795 htab_find_slot (ehspec_hash, &dummy, INSERT);
1796
1797 if ((n = *slot) == NULL)
1798 {
1799 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1800
1801 n = XNEW (struct ttypes_filter);
1802 n->t = list;
1803 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1804 *slot = n;
1805
1806 /* Generate a 0 terminated list of filter values. */
1807 for (; list ; list = TREE_CHAIN (list))
1808 {
1809 if (targetm.arm_eabi_unwinder)
1810 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1811 else
1812 {
1813 /* Look up each type in the list and encode its filter
1814 value as a uleb128. */
1815 push_uleb128 (&crtl->eh.ehspec_data,
1816 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1817 }
1818 }
1819 if (targetm.arm_eabi_unwinder)
1820 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1821 else
1822 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1823 }
1824
1825 return n->filter;
1826 }
1827
1828 /* Generate the action filter values to be used for CATCH and
1829 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1830 we use lots of landing pads, and so every type or list can share
1831 the same filter value, which saves table space. */
1832
1833 static void
1834 assign_filter_values (void)
1835 {
1836 int i;
1837 htab_t ttypes, ehspec;
1838
1839 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1840 if (targetm.arm_eabi_unwinder)
1841 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1842 else
1843 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1844
1845 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1846 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1847
1848 for (i = cfun->eh->last_region_number; i > 0; --i)
1849 {
1850 struct eh_region *r;
1851
1852 r = VEC_index (eh_region, cfun->eh->region_array, i);
1853
1854 /* Mind we don't process a region more than once. */
1855 if (!r || r->region_number != i)
1856 continue;
1857
1858 switch (r->type)
1859 {
1860 case ERT_CATCH:
1861 /* Whatever type_list is (NULL or true list), we build a list
1862 of filters for the region. */
1863 r->u.eh_catch.filter_list = NULL_TREE;
1864
1865 if (r->u.eh_catch.type_list != NULL)
1866 {
1867 /* Get a filter value for each of the types caught and store
1868 them in the region's dedicated list. */
1869 tree tp_node = r->u.eh_catch.type_list;
1870
1871 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1872 {
1873 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1874 tree flt_node = build_int_cst (NULL_TREE, flt);
1875
1876 r->u.eh_catch.filter_list
1877 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1878 }
1879 }
1880 else
1881 {
1882 /* Get a filter value for the NULL list also since it will need
1883 an action record anyway. */
1884 int flt = add_ttypes_entry (ttypes, NULL);
1885 tree flt_node = build_int_cst (NULL_TREE, flt);
1886
1887 r->u.eh_catch.filter_list
1888 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1889 }
1890
1891 break;
1892
1893 case ERT_ALLOWED_EXCEPTIONS:
1894 r->u.allowed.filter
1895 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1896 break;
1897
1898 default:
1899 break;
1900 }
1901 }
1902
1903 htab_delete (ttypes);
1904 htab_delete (ehspec);
1905 }
1906
1907 /* Emit SEQ into basic block just before INSN (that is assumed to be
1908 first instruction of some existing BB and return the newly
1909 produced block. */
1910 static basic_block
1911 emit_to_new_bb_before (rtx seq, rtx insn)
1912 {
1913 rtx last;
1914 basic_block bb;
1915 edge e;
1916 edge_iterator ei;
1917
1918 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1919 call), we don't want it to go into newly created landing pad or other EH
1920 construct. */
1921 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1922 if (e->flags & EDGE_FALLTHRU)
1923 force_nonfallthru (e);
1924 else
1925 ei_next (&ei);
1926 last = emit_insn_before (seq, insn);
1927 if (BARRIER_P (last))
1928 last = PREV_INSN (last);
1929 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1930 update_bb_for_insn (bb);
1931 bb->flags |= BB_SUPERBLOCK;
1932 return bb;
1933 }
1934
1935 /* Generate the code to actually handle exceptions, which will follow the
1936 landing pads. */
1937
1938 static void
1939 build_post_landing_pads (void)
1940 {
1941 int i;
1942
1943 for (i = cfun->eh->last_region_number; i > 0; --i)
1944 {
1945 struct eh_region *region;
1946 rtx seq;
1947
1948 region = VEC_index (eh_region, cfun->eh->region_array, i);
1949 /* Mind we don't process a region more than once. */
1950 if (!region || region->region_number != i)
1951 continue;
1952
1953 switch (region->type)
1954 {
1955 case ERT_TRY:
1956 /* It is possible that TRY region is kept alive only because some of
1957 contained catch region still have RESX instruction but they are
1958 reached via their copies. In this case we need to do nothing. */
1959 if (!region->u.eh_try.eh_catch->label)
1960 break;
1961
1962 /* ??? Collect the set of all non-overlapping catch handlers
1963 all the way up the chain until blocked by a cleanup. */
1964 /* ??? Outer try regions can share landing pads with inner
1965 try regions if the types are completely non-overlapping,
1966 and there are no intervening cleanups. */
1967
1968 region->post_landing_pad = gen_label_rtx ();
1969
1970 start_sequence ();
1971
1972 emit_label (region->post_landing_pad);
1973
1974 /* ??? It is mighty inconvenient to call back into the
1975 switch statement generation code in expand_end_case.
1976 Rapid prototyping sez a sequence of ifs. */
1977 {
1978 struct eh_region *c;
1979 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1980 {
1981 if (c->u.eh_catch.type_list == NULL)
1982 emit_jump (c->label);
1983 else
1984 {
1985 /* Need for one cmp/jump per type caught. Each type
1986 list entry has a matching entry in the filter list
1987 (see assign_filter_values). */
1988 tree tp_node = c->u.eh_catch.type_list;
1989 tree flt_node = c->u.eh_catch.filter_list;
1990
1991 for (; tp_node; )
1992 {
1993 emit_cmp_and_jump_insns
1994 (crtl->eh.filter,
1995 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1996 EQ, NULL_RTX,
1997 targetm.eh_return_filter_mode (), 0, c->label);
1998
1999 tp_node = TREE_CHAIN (tp_node);
2000 flt_node = TREE_CHAIN (flt_node);
2001 }
2002 }
2003 }
2004 }
2005
2006 /* We delay the generation of the _Unwind_Resume until we generate
2007 landing pads. We emit a marker here so as to get good control
2008 flow data in the meantime. */
2009 region->resume
2010 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2011 emit_barrier ();
2012
2013 seq = get_insns ();
2014 end_sequence ();
2015
2016 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
2017
2018 break;
2019
2020 case ERT_ALLOWED_EXCEPTIONS:
2021 if (!region->label)
2022 break;
2023 region->post_landing_pad = gen_label_rtx ();
2024
2025 start_sequence ();
2026
2027 emit_label (region->post_landing_pad);
2028
2029 emit_cmp_and_jump_insns (crtl->eh.filter,
2030 GEN_INT (region->u.allowed.filter),
2031 EQ, NULL_RTX,
2032 targetm.eh_return_filter_mode (), 0, region->label);
2033
2034 /* We delay the generation of the _Unwind_Resume until we generate
2035 landing pads. We emit a marker here so as to get good control
2036 flow data in the meantime. */
2037 region->resume
2038 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2039 emit_barrier ();
2040
2041 seq = get_insns ();
2042 end_sequence ();
2043
2044 emit_to_new_bb_before (seq, region->label);
2045 break;
2046
2047 case ERT_CLEANUP:
2048 case ERT_MUST_NOT_THROW:
2049 region->post_landing_pad = region->label;
2050 break;
2051
2052 case ERT_CATCH:
2053 case ERT_THROW:
2054 /* Nothing to do. */
2055 break;
2056
2057 default:
2058 gcc_unreachable ();
2059 }
2060 }
2061 }
2062
2063 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2064 _Unwind_Resume otherwise. */
2065
2066 static void
2067 connect_post_landing_pads (void)
2068 {
2069 int i;
2070
2071 for (i = cfun->eh->last_region_number; i > 0; --i)
2072 {
2073 struct eh_region *region;
2074 struct eh_region *outer;
2075 rtx seq;
2076 rtx barrier;
2077
2078 region = VEC_index (eh_region, cfun->eh->region_array, i);
2079 /* Mind we don't process a region more than once. */
2080 if (!region || region->region_number != i)
2081 continue;
2082
2083 /* If there is no RESX, or it has been deleted by flow, there's
2084 nothing to fix up. */
2085 if (! region->resume || INSN_DELETED_P (region->resume))
2086 continue;
2087
2088 /* Search for another landing pad in this function. */
2089 for (outer = region->outer; outer ; outer = outer->outer)
2090 if (outer->post_landing_pad)
2091 break;
2092
2093 start_sequence ();
2094
2095 if (outer)
2096 {
2097 edge e;
2098 basic_block src, dest;
2099
2100 emit_jump (outer->post_landing_pad);
2101 src = BLOCK_FOR_INSN (region->resume);
2102 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2103 while (EDGE_COUNT (src->succs) > 0)
2104 remove_edge (EDGE_SUCC (src, 0));
2105 e = make_edge (src, dest, 0);
2106 e->probability = REG_BR_PROB_BASE;
2107 e->count = src->count;
2108 }
2109 else
2110 {
2111 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2112 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2113
2114 /* What we just emitted was a throwing libcall, so it got a
2115 barrier automatically added after it. If the last insn in
2116 the libcall sequence isn't the barrier, it's because the
2117 target emits multiple insns for a call, and there are insns
2118 after the actual call insn (which are redundant and would be
2119 optimized away). The barrier is inserted exactly after the
2120 call insn, so let's go get that and delete the insns after
2121 it, because below we need the barrier to be the last insn in
2122 the sequence. */
2123 delete_insns_since (NEXT_INSN (last_call_insn ()));
2124 }
2125
2126 seq = get_insns ();
2127 end_sequence ();
2128 barrier = emit_insn_before (seq, region->resume);
2129 /* Avoid duplicate barrier. */
2130 gcc_assert (BARRIER_P (barrier));
2131 delete_insn (barrier);
2132 delete_insn (region->resume);
2133
2134 /* ??? From tree-ssa we can wind up with catch regions whose
2135 label is not instantiated, but whose resx is present. Now
2136 that we've dealt with the resx, kill the region. */
2137 if (region->label == NULL && region->type == ERT_CLEANUP)
2138 remove_eh_handler (region);
2139 }
2140 }
2141
2142 \f
2143 static void
2144 dw2_build_landing_pads (void)
2145 {
2146 int i;
2147
2148 for (i = cfun->eh->last_region_number; i > 0; --i)
2149 {
2150 struct eh_region *region;
2151 rtx seq;
2152 basic_block bb;
2153 edge e;
2154
2155 region = VEC_index (eh_region, cfun->eh->region_array, i);
2156 /* Mind we don't process a region more than once. */
2157 if (!region || region->region_number != i)
2158 continue;
2159
2160 if (region->type != ERT_CLEANUP
2161 && region->type != ERT_TRY
2162 && region->type != ERT_ALLOWED_EXCEPTIONS)
2163 continue;
2164
2165 if (!region->post_landing_pad)
2166 continue;
2167
2168 start_sequence ();
2169
2170 region->landing_pad = gen_label_rtx ();
2171 emit_label (region->landing_pad);
2172
2173 #ifdef HAVE_exception_receiver
2174 if (HAVE_exception_receiver)
2175 emit_insn (gen_exception_receiver ());
2176 else
2177 #endif
2178 #ifdef HAVE_nonlocal_goto_receiver
2179 if (HAVE_nonlocal_goto_receiver)
2180 emit_insn (gen_nonlocal_goto_receiver ());
2181 else
2182 #endif
2183 { /* Nothing */ }
2184
2185 emit_move_insn (crtl->eh.exc_ptr,
2186 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2187 emit_move_insn (crtl->eh.filter,
2188 gen_rtx_REG (targetm.eh_return_filter_mode (),
2189 EH_RETURN_DATA_REGNO (1)));
2190
2191 seq = get_insns ();
2192 end_sequence ();
2193
2194 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2195 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2196 e->count = bb->count;
2197 e->probability = REG_BR_PROB_BASE;
2198 }
2199 }
2200
2201 \f
2202 struct sjlj_lp_info
2203 {
2204 int directly_reachable;
2205 int action_index;
2206 int dispatch_index;
2207 int call_site_index;
2208 };
2209
2210 static bool
2211 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2212 {
2213 rtx insn;
2214 bool found_one = false;
2215
2216 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2217 {
2218 struct eh_region *region;
2219 enum reachable_code rc;
2220 tree type_thrown;
2221 rtx note;
2222
2223 if (! INSN_P (insn))
2224 continue;
2225
2226 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2227 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2228 continue;
2229
2230 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2231 if (!region)
2232 continue;
2233
2234 type_thrown = NULL_TREE;
2235 if (region->type == ERT_THROW)
2236 {
2237 type_thrown = region->u.eh_throw.type;
2238 region = region->outer;
2239 }
2240
2241 /* Find the first containing region that might handle the exception.
2242 That's the landing pad to which we will transfer control. */
2243 rc = RNL_NOT_CAUGHT;
2244 for (; region; region = region->outer)
2245 {
2246 rc = reachable_next_level (region, type_thrown, NULL, false);
2247 if (rc != RNL_NOT_CAUGHT)
2248 break;
2249 }
2250 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2251 {
2252 lp_info[region->region_number].directly_reachable = 1;
2253 found_one = true;
2254 }
2255 }
2256
2257 return found_one;
2258 }
2259
2260 static void
2261 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2262 {
2263 htab_t ar_hash;
2264 int i, index;
2265
2266 /* First task: build the action table. */
2267
2268 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2269 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2270
2271 for (i = cfun->eh->last_region_number; i > 0; --i)
2272 if (lp_info[i].directly_reachable)
2273 {
2274 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2275
2276 r->landing_pad = dispatch_label;
2277 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2278 if (lp_info[i].action_index != -1)
2279 crtl->uses_eh_lsda = 1;
2280 }
2281
2282 htab_delete (ar_hash);
2283
2284 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2285 landing pad label for the region. For sjlj though, there is one
2286 common landing pad from which we dispatch to the post-landing pads.
2287
2288 A region receives a dispatch index if it is directly reachable
2289 and requires in-function processing. Regions that share post-landing
2290 pads may share dispatch indices. */
2291 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2292 (see build_post_landing_pads) so we don't bother checking for it. */
2293
2294 index = 0;
2295 for (i = cfun->eh->last_region_number; i > 0; --i)
2296 if (lp_info[i].directly_reachable)
2297 lp_info[i].dispatch_index = index++;
2298
2299 /* Finally: assign call-site values. If dwarf2 terms, this would be
2300 the region number assigned by convert_to_eh_region_ranges, but
2301 handles no-action and must-not-throw differently. */
2302
2303 call_site_base = 1;
2304 for (i = cfun->eh->last_region_number; i > 0; --i)
2305 if (lp_info[i].directly_reachable)
2306 {
2307 int action = lp_info[i].action_index;
2308
2309 /* Map must-not-throw to otherwise unused call-site index 0. */
2310 if (action == -2)
2311 index = 0;
2312 /* Map no-action to otherwise unused call-site index -1. */
2313 else if (action == -1)
2314 index = -1;
2315 /* Otherwise, look it up in the table. */
2316 else
2317 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2318
2319 lp_info[i].call_site_index = index;
2320 }
2321 }
2322
2323 static void
2324 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2325 {
2326 int last_call_site = -2;
2327 rtx insn, mem;
2328
2329 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2330 {
2331 struct eh_region *region;
2332 int this_call_site;
2333 rtx note, before, p;
2334
2335 /* Reset value tracking at extended basic block boundaries. */
2336 if (LABEL_P (insn))
2337 last_call_site = -2;
2338
2339 if (! INSN_P (insn))
2340 continue;
2341
2342 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2343
2344 /* Calls that are known to not throw need not be marked. */
2345 if (note && INTVAL (XEXP (note, 0)) <= 0)
2346 continue;
2347
2348 if (note)
2349 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2350 else
2351 region = NULL;
2352
2353 if (!region)
2354 {
2355 /* Calls (and trapping insns) without notes are outside any
2356 exception handling region in this function. Mark them as
2357 no action. */
2358 if (CALL_P (insn)
2359 || (flag_non_call_exceptions
2360 && may_trap_p (PATTERN (insn))))
2361 this_call_site = -1;
2362 else
2363 continue;
2364 }
2365 else
2366 this_call_site = lp_info[region->region_number].call_site_index;
2367
2368 if (this_call_site == last_call_site)
2369 continue;
2370
2371 /* Don't separate a call from it's argument loads. */
2372 before = insn;
2373 if (CALL_P (insn))
2374 before = find_first_parameter_load (insn, NULL_RTX);
2375
2376 start_sequence ();
2377 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2378 sjlj_fc_call_site_ofs);
2379 emit_move_insn (mem, GEN_INT (this_call_site));
2380 p = get_insns ();
2381 end_sequence ();
2382
2383 emit_insn_before (p, before);
2384 last_call_site = this_call_site;
2385 }
2386 }
2387
2388 /* Construct the SjLj_Function_Context. */
2389
2390 static void
2391 sjlj_emit_function_enter (rtx dispatch_label)
2392 {
2393 rtx fn_begin, fc, mem, seq;
2394 bool fn_begin_outside_block;
2395
2396 fc = crtl->eh.sjlj_fc;
2397
2398 start_sequence ();
2399
2400 /* We're storing this libcall's address into memory instead of
2401 calling it directly. Thus, we must call assemble_external_libcall
2402 here, as we can not depend on emit_library_call to do it for us. */
2403 assemble_external_libcall (eh_personality_libfunc);
2404 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2405 emit_move_insn (mem, eh_personality_libfunc);
2406
2407 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2408 if (crtl->uses_eh_lsda)
2409 {
2410 char buf[20];
2411 rtx sym;
2412
2413 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2414 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2415 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2416 emit_move_insn (mem, sym);
2417 }
2418 else
2419 emit_move_insn (mem, const0_rtx);
2420
2421 #ifdef DONT_USE_BUILTIN_SETJMP
2422 {
2423 rtx x;
2424 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2425 TYPE_MODE (integer_type_node), 1,
2426 plus_constant (XEXP (fc, 0),
2427 sjlj_fc_jbuf_ofs), Pmode);
2428
2429 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2430 TYPE_MODE (integer_type_node), 0, dispatch_label);
2431 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2432 }
2433 #else
2434 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2435 dispatch_label);
2436 #endif
2437
2438 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2439 1, XEXP (fc, 0), Pmode);
2440
2441 seq = get_insns ();
2442 end_sequence ();
2443
2444 /* ??? Instead of doing this at the beginning of the function,
2445 do this in a block that is at loop level 0 and dominates all
2446 can_throw_internal instructions. */
2447
2448 fn_begin_outside_block = true;
2449 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2450 if (NOTE_P (fn_begin))
2451 {
2452 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2453 break;
2454 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2455 fn_begin_outside_block = false;
2456 }
2457
2458 if (fn_begin_outside_block)
2459 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2460 else
2461 emit_insn_after (seq, fn_begin);
2462 }
2463
2464 /* Call back from expand_function_end to know where we should put
2465 the call to unwind_sjlj_unregister_libfunc if needed. */
2466
2467 void
2468 sjlj_emit_function_exit_after (rtx after)
2469 {
2470 crtl->eh.sjlj_exit_after = after;
2471 }
2472
2473 static void
2474 sjlj_emit_function_exit (void)
2475 {
2476 rtx seq, insn;
2477
2478 start_sequence ();
2479
2480 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2481 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2482
2483 seq = get_insns ();
2484 end_sequence ();
2485
2486 /* ??? Really this can be done in any block at loop level 0 that
2487 post-dominates all can_throw_internal instructions. This is
2488 the last possible moment. */
2489
2490 insn = crtl->eh.sjlj_exit_after;
2491 if (LABEL_P (insn))
2492 insn = NEXT_INSN (insn);
2493
2494 emit_insn_after (seq, insn);
2495 }
2496
2497 static void
2498 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2499 {
2500 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2501 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2502 int i, first_reachable;
2503 rtx mem, dispatch, seq, fc;
2504 rtx before;
2505 basic_block bb;
2506 edge e;
2507
2508 fc = crtl->eh.sjlj_fc;
2509
2510 start_sequence ();
2511
2512 emit_label (dispatch_label);
2513
2514 #ifndef DONT_USE_BUILTIN_SETJMP
2515 expand_builtin_setjmp_receiver (dispatch_label);
2516 #endif
2517
2518 /* Load up dispatch index, exc_ptr and filter values from the
2519 function context. */
2520 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2521 sjlj_fc_call_site_ofs);
2522 dispatch = copy_to_reg (mem);
2523
2524 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2525 if (unwind_word_mode != ptr_mode)
2526 {
2527 #ifdef POINTERS_EXTEND_UNSIGNED
2528 mem = convert_memory_address (ptr_mode, mem);
2529 #else
2530 mem = convert_to_mode (ptr_mode, mem, 0);
2531 #endif
2532 }
2533 emit_move_insn (crtl->eh.exc_ptr, mem);
2534
2535 mem = adjust_address (fc, unwind_word_mode,
2536 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2537 if (unwind_word_mode != filter_mode)
2538 mem = convert_to_mode (filter_mode, mem, 0);
2539 emit_move_insn (crtl->eh.filter, mem);
2540
2541 /* Jump to one of the directly reachable regions. */
2542 /* ??? This really ought to be using a switch statement. */
2543
2544 first_reachable = 0;
2545 for (i = cfun->eh->last_region_number; i > 0; --i)
2546 {
2547 if (! lp_info[i].directly_reachable)
2548 continue;
2549
2550 if (! first_reachable)
2551 {
2552 first_reachable = i;
2553 continue;
2554 }
2555
2556 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2557 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2558 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2559 ->post_landing_pad);
2560 }
2561
2562 seq = get_insns ();
2563 end_sequence ();
2564
2565 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2566 ->post_landing_pad);
2567
2568 bb = emit_to_new_bb_before (seq, before);
2569 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2570 e->count = bb->count;
2571 e->probability = REG_BR_PROB_BASE;
2572 }
2573
2574 static void
2575 sjlj_build_landing_pads (void)
2576 {
2577 struct sjlj_lp_info *lp_info;
2578
2579 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2580
2581 if (sjlj_find_directly_reachable_regions (lp_info))
2582 {
2583 rtx dispatch_label = gen_label_rtx ();
2584 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2585 TYPE_MODE (sjlj_fc_type_node),
2586 TYPE_ALIGN (sjlj_fc_type_node));
2587 crtl->eh.sjlj_fc
2588 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2589 int_size_in_bytes (sjlj_fc_type_node),
2590 align);
2591
2592 sjlj_assign_call_site_values (dispatch_label, lp_info);
2593 sjlj_mark_call_sites (lp_info);
2594
2595 sjlj_emit_function_enter (dispatch_label);
2596 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2597 sjlj_emit_function_exit ();
2598 }
2599
2600 free (lp_info);
2601 }
2602
2603 /* After initial rtl generation, call back to finish generating
2604 exception support code. */
2605
2606 static void
2607 finish_eh_generation (void)
2608 {
2609 basic_block bb;
2610
2611 /* Nothing to do if no regions created. */
2612 if (cfun->eh->region_tree == NULL)
2613 return;
2614
2615 /* The object here is to provide detailed information (via
2616 reachable_handlers) on how exception control flows within the
2617 function for the CFG construction. In this first pass, we can
2618 include type information garnered from ERT_THROW and
2619 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2620 in deleting unreachable handlers. Subsequently, we will generate
2621 landing pads which will connect many of the handlers, and then
2622 type information will not be effective. Still, this is a win
2623 over previous implementations. */
2624
2625 /* These registers are used by the landing pads. Make sure they
2626 have been generated. */
2627 get_exception_pointer ();
2628 get_exception_filter ();
2629
2630 /* Construct the landing pads. */
2631
2632 assign_filter_values ();
2633 build_post_landing_pads ();
2634 connect_post_landing_pads ();
2635 if (USING_SJLJ_EXCEPTIONS)
2636 sjlj_build_landing_pads ();
2637 else
2638 dw2_build_landing_pads ();
2639
2640 crtl->eh.built_landing_pads = 1;
2641
2642 /* We've totally changed the CFG. Start over. */
2643 find_exception_handler_labels ();
2644 break_superblocks ();
2645 if (USING_SJLJ_EXCEPTIONS
2646 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2647 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2648 commit_edge_insertions ();
2649 FOR_EACH_BB (bb)
2650 {
2651 edge e;
2652 edge_iterator ei;
2653 bool eh = false;
2654 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2655 {
2656 if (e->flags & EDGE_EH)
2657 {
2658 remove_edge (e);
2659 eh = true;
2660 }
2661 else
2662 ei_next (&ei);
2663 }
2664 if (eh)
2665 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2666 }
2667 }
2668 \f
2669 /* This section handles removing dead code for flow. */
2670
2671 /* Splice REGION from the region tree and replace it by REPLACE etc.
2672 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2673 region.*/
2674
2675 static void
2676 remove_eh_handler_and_replace (struct eh_region *region,
2677 struct eh_region *replace,
2678 bool update_catch_try)
2679 {
2680 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2681 rtx lab;
2682
2683 outer = region->outer;
2684
2685 /* For the benefit of efficiently handling REG_EH_REGION notes,
2686 replace this region in the region array with its containing
2687 region. Note that previous region deletions may result in
2688 multiple copies of this region in the array, so we have a
2689 list of alternate numbers by which we are known. */
2690
2691 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2692 replace);
2693 if (region->aka)
2694 {
2695 unsigned i;
2696 bitmap_iterator bi;
2697
2698 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2699 {
2700 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2701 }
2702 }
2703
2704 if (replace)
2705 {
2706 if (!replace->aka)
2707 replace->aka = BITMAP_GGC_ALLOC ();
2708 if (region->aka)
2709 bitmap_ior_into (replace->aka, region->aka);
2710 bitmap_set_bit (replace->aka, region->region_number);
2711 }
2712
2713 if (crtl->eh.built_landing_pads)
2714 lab = region->landing_pad;
2715 else
2716 lab = region->label;
2717 if (outer)
2718 pp_start = &outer->inner;
2719 else
2720 pp_start = &cfun->eh->region_tree;
2721 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2722 continue;
2723 *pp = region->next_peer;
2724
2725 if (replace)
2726 pp_start = &replace->inner;
2727 else
2728 pp_start = &cfun->eh->region_tree;
2729 inner = region->inner;
2730 if (inner)
2731 {
2732 for (p = inner; p->next_peer ; p = p->next_peer)
2733 p->outer = replace;
2734 p->outer = replace;
2735
2736 p->next_peer = *pp_start;
2737 *pp_start = inner;
2738 }
2739
2740 if (region->type == ERT_CATCH
2741 && update_catch_try)
2742 {
2743 struct eh_region *eh_try, *next, *prev;
2744
2745 for (eh_try = region->next_peer;
2746 eh_try->type == ERT_CATCH;
2747 eh_try = eh_try->next_peer)
2748 continue;
2749 gcc_assert (eh_try->type == ERT_TRY);
2750
2751 next = region->u.eh_catch.next_catch;
2752 prev = region->u.eh_catch.prev_catch;
2753
2754 if (next)
2755 next->u.eh_catch.prev_catch = prev;
2756 else
2757 eh_try->u.eh_try.last_catch = prev;
2758 if (prev)
2759 prev->u.eh_catch.next_catch = next;
2760 else
2761 {
2762 eh_try->u.eh_try.eh_catch = next;
2763 if (! next)
2764 remove_eh_handler (eh_try);
2765 }
2766 }
2767 }
2768
2769 /* Splice REGION from the region tree and replace it by the outer region
2770 etc. */
2771
2772 static void
2773 remove_eh_handler (struct eh_region *region)
2774 {
2775 remove_eh_handler_and_replace (region, region->outer, true);
2776 }
2777
2778 /* Remove Eh region R that has turned out to have no code in its handler. */
2779
2780 void
2781 remove_eh_region (int r)
2782 {
2783 struct eh_region *region;
2784
2785 region = VEC_index (eh_region, cfun->eh->region_array, r);
2786 remove_eh_handler (region);
2787 }
2788
2789 /* Remove Eh region R that has turned out to have no code in its handler
2790 and replace in by R2. */
2791
2792 void
2793 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2794 {
2795 struct eh_region *region, *region2;
2796
2797 region = VEC_index (eh_region, cfun->eh->region_array, r);
2798 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2799 remove_eh_handler_and_replace (region, region2->outer, true);
2800 }
2801
2802 /* Invokes CALLBACK for every exception handler label. Only used by old
2803 loop hackery; should not be used by new code. */
2804
2805 void
2806 for_each_eh_label (void (*callback) (rtx))
2807 {
2808 int i;
2809 for (i = 0; i < cfun->eh->last_region_number; i++)
2810 {
2811 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2812 if (r && r->region_number == i && r->label
2813 && GET_CODE (r->label) == CODE_LABEL)
2814 (*callback) (r->label);
2815 }
2816 }
2817
2818 /* Invoke CALLBACK for every exception region in the current function. */
2819
2820 void
2821 for_each_eh_region (void (*callback) (struct eh_region *))
2822 {
2823 int i, n = cfun->eh->last_region_number;
2824 for (i = 1; i <= n; ++i)
2825 {
2826 struct eh_region *region;
2827
2828 region = VEC_index (eh_region, cfun->eh->region_array, i);
2829 if (region)
2830 (*callback) (region);
2831 }
2832 }
2833 \f
2834 /* This section describes CFG exception edges for flow. */
2835
2836 /* For communicating between calls to reachable_next_level. */
2837 struct reachable_info
2838 {
2839 tree types_caught;
2840 tree types_allowed;
2841 void (*callback) (struct eh_region *, void *);
2842 void *callback_data;
2843 };
2844
2845 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2846 base class of TYPE, is in HANDLED. */
2847
2848 static int
2849 check_handled (tree handled, tree type)
2850 {
2851 tree t;
2852
2853 /* We can check for exact matches without front-end help. */
2854 if (! lang_eh_type_covers)
2855 {
2856 for (t = handled; t ; t = TREE_CHAIN (t))
2857 if (TREE_VALUE (t) == type)
2858 return 1;
2859 }
2860 else
2861 {
2862 for (t = handled; t ; t = TREE_CHAIN (t))
2863 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2864 return 1;
2865 }
2866
2867 return 0;
2868 }
2869
2870 /* A subroutine of reachable_next_level. If we are collecting a list
2871 of handlers, add one. After landing pad generation, reference
2872 it instead of the handlers themselves. Further, the handlers are
2873 all wired together, so by referencing one, we've got them all.
2874 Before landing pad generation we reference each handler individually.
2875
2876 LP_REGION contains the landing pad; REGION is the handler. */
2877
2878 static void
2879 add_reachable_handler (struct reachable_info *info,
2880 struct eh_region *lp_region, struct eh_region *region)
2881 {
2882 if (! info)
2883 return;
2884
2885 if (crtl->eh.built_landing_pads)
2886 info->callback (lp_region, info->callback_data);
2887 else
2888 info->callback (region, info->callback_data);
2889 }
2890
2891 /* Process one level of exception regions for reachability.
2892 If TYPE_THROWN is non-null, then it is the *exact* type being
2893 propagated. If INFO is non-null, then collect handler labels
2894 and caught/allowed type information between invocations. */
2895
2896 static enum reachable_code
2897 reachable_next_level (struct eh_region *region, tree type_thrown,
2898 struct reachable_info *info,
2899 bool maybe_resx)
2900 {
2901 switch (region->type)
2902 {
2903 case ERT_CLEANUP:
2904 /* Before landing-pad generation, we model control flow
2905 directly to the individual handlers. In this way we can
2906 see that catch handler types may shadow one another. */
2907 add_reachable_handler (info, region, region);
2908 return RNL_MAYBE_CAUGHT;
2909
2910 case ERT_TRY:
2911 {
2912 struct eh_region *c;
2913 enum reachable_code ret = RNL_NOT_CAUGHT;
2914
2915 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2916 {
2917 /* A catch-all handler ends the search. */
2918 if (c->u.eh_catch.type_list == NULL)
2919 {
2920 add_reachable_handler (info, region, c);
2921 return RNL_CAUGHT;
2922 }
2923
2924 if (type_thrown)
2925 {
2926 /* If we have at least one type match, end the search. */
2927 tree tp_node = c->u.eh_catch.type_list;
2928
2929 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2930 {
2931 tree type = TREE_VALUE (tp_node);
2932
2933 if (type == type_thrown
2934 || (lang_eh_type_covers
2935 && (*lang_eh_type_covers) (type, type_thrown)))
2936 {
2937 add_reachable_handler (info, region, c);
2938 return RNL_CAUGHT;
2939 }
2940 }
2941
2942 /* If we have definitive information of a match failure,
2943 the catch won't trigger. */
2944 if (lang_eh_type_covers)
2945 return RNL_NOT_CAUGHT;
2946 }
2947
2948 /* At this point, we either don't know what type is thrown or
2949 don't have front-end assistance to help deciding if it is
2950 covered by one of the types in the list for this region.
2951
2952 We'd then like to add this region to the list of reachable
2953 handlers since it is indeed potentially reachable based on the
2954 information we have.
2955
2956 Actually, this handler is for sure not reachable if all the
2957 types it matches have already been caught. That is, it is only
2958 potentially reachable if at least one of the types it catches
2959 has not been previously caught. */
2960
2961 if (! info)
2962 ret = RNL_MAYBE_CAUGHT;
2963 else
2964 {
2965 tree tp_node = c->u.eh_catch.type_list;
2966 bool maybe_reachable = false;
2967
2968 /* Compute the potential reachability of this handler and
2969 update the list of types caught at the same time. */
2970 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2971 {
2972 tree type = TREE_VALUE (tp_node);
2973
2974 if (! check_handled (info->types_caught, type))
2975 {
2976 info->types_caught
2977 = tree_cons (NULL, type, info->types_caught);
2978
2979 maybe_reachable = true;
2980 }
2981 }
2982
2983 if (maybe_reachable)
2984 {
2985 add_reachable_handler (info, region, c);
2986
2987 /* ??? If the catch type is a base class of every allowed
2988 type, then we know we can stop the search. */
2989 ret = RNL_MAYBE_CAUGHT;
2990 }
2991 }
2992 }
2993
2994 return ret;
2995 }
2996
2997 case ERT_ALLOWED_EXCEPTIONS:
2998 /* An empty list of types definitely ends the search. */
2999 if (region->u.allowed.type_list == NULL_TREE)
3000 {
3001 add_reachable_handler (info, region, region);
3002 return RNL_CAUGHT;
3003 }
3004
3005 /* Collect a list of lists of allowed types for use in detecting
3006 when a catch may be transformed into a catch-all. */
3007 if (info)
3008 info->types_allowed = tree_cons (NULL_TREE,
3009 region->u.allowed.type_list,
3010 info->types_allowed);
3011
3012 /* If we have definitive information about the type hierarchy,
3013 then we can tell if the thrown type will pass through the
3014 filter. */
3015 if (type_thrown && lang_eh_type_covers)
3016 {
3017 if (check_handled (region->u.allowed.type_list, type_thrown))
3018 return RNL_NOT_CAUGHT;
3019 else
3020 {
3021 add_reachable_handler (info, region, region);
3022 return RNL_CAUGHT;
3023 }
3024 }
3025
3026 add_reachable_handler (info, region, region);
3027 return RNL_MAYBE_CAUGHT;
3028
3029 case ERT_CATCH:
3030 /* Catch regions are handled by their controlling try region. */
3031 return RNL_NOT_CAUGHT;
3032
3033 case ERT_MUST_NOT_THROW:
3034 /* Here we end our search, since no exceptions may propagate.
3035
3036 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3037 only via locally handled RESX instructions.
3038
3039 When we inline a function call, we can bring in new handlers. In order
3040 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3041 assume that such handlers exists prior for any inlinable call prior
3042 inlining decisions are fixed. */
3043
3044 if (maybe_resx)
3045 {
3046 add_reachable_handler (info, region, region);
3047 return RNL_CAUGHT;
3048 }
3049 else
3050 return RNL_BLOCKED;
3051
3052 case ERT_THROW:
3053 case ERT_UNKNOWN:
3054 /* Shouldn't see these here. */
3055 gcc_unreachable ();
3056 break;
3057 default:
3058 gcc_unreachable ();
3059 }
3060 }
3061
3062 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3063
3064 void
3065 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3066 void (*callback) (struct eh_region *, void *),
3067 void *callback_data)
3068 {
3069 struct reachable_info info;
3070 struct eh_region *region;
3071 tree type_thrown;
3072
3073 memset (&info, 0, sizeof (info));
3074 info.callback = callback;
3075 info.callback_data = callback_data;
3076
3077 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3078 if (!region)
3079 return;
3080
3081 type_thrown = NULL_TREE;
3082 if (is_resx)
3083 {
3084 /* A RESX leaves a region instead of entering it. Thus the
3085 region itself may have been deleted out from under us. */
3086 if (region == NULL)
3087 return;
3088 region = region->outer;
3089 }
3090 else if (region->type == ERT_THROW)
3091 {
3092 type_thrown = region->u.eh_throw.type;
3093 region = region->outer;
3094 }
3095
3096 while (region)
3097 {
3098 if (reachable_next_level (region, type_thrown, &info,
3099 inlinable_call || is_resx) >= RNL_CAUGHT)
3100 break;
3101 /* If we have processed one cleanup, there is no point in
3102 processing any more of them. Each cleanup will have an edge
3103 to the next outer cleanup region, so the flow graph will be
3104 accurate. */
3105 if (region->type == ERT_CLEANUP)
3106 {
3107 enum reachable_code code = RNL_NOT_CAUGHT;
3108 region = find_prev_try (region->outer);
3109 /* Continue looking for outer TRY region until we find one
3110 that might cath something. */
3111 while (region
3112 && (code = reachable_next_level (region, type_thrown, &info,
3113 inlinable_call || is_resx))
3114 == RNL_NOT_CAUGHT)
3115 region = find_prev_try (region->outer);
3116 if (code >= RNL_CAUGHT)
3117 break;
3118 }
3119 if (region)
3120 region = region->outer;
3121 }
3122 }
3123
3124 /* Retrieve a list of labels of exception handlers which can be
3125 reached by a given insn. */
3126
3127 static void
3128 arh_to_landing_pad (struct eh_region *region, void *data)
3129 {
3130 rtx *p_handlers = (rtx *) data;
3131 if (! *p_handlers)
3132 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3133 }
3134
3135 static void
3136 arh_to_label (struct eh_region *region, void *data)
3137 {
3138 rtx *p_handlers = (rtx *) data;
3139 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3140 }
3141
3142 rtx
3143 reachable_handlers (rtx insn)
3144 {
3145 bool is_resx = false;
3146 rtx handlers = NULL;
3147 int region_number;
3148
3149 if (JUMP_P (insn)
3150 && GET_CODE (PATTERN (insn)) == RESX)
3151 {
3152 region_number = XINT (PATTERN (insn), 0);
3153 is_resx = true;
3154 }
3155 else
3156 {
3157 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3158 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3159 return NULL;
3160 region_number = INTVAL (XEXP (note, 0));
3161 }
3162
3163 foreach_reachable_handler (region_number, is_resx, false,
3164 (crtl->eh.built_landing_pads
3165 ? arh_to_landing_pad
3166 : arh_to_label),
3167 &handlers);
3168
3169 return handlers;
3170 }
3171
3172 /* Determine if the given INSN can throw an exception that is caught
3173 within the function. */
3174
3175 bool
3176 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3177 {
3178 struct eh_region *region;
3179 tree type_thrown;
3180
3181 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3182 if (!region)
3183 return false;
3184
3185 type_thrown = NULL_TREE;
3186 if (is_resx)
3187 region = region->outer;
3188 else if (region->type == ERT_THROW)
3189 {
3190 type_thrown = region->u.eh_throw.type;
3191 region = region->outer;
3192 }
3193
3194 /* If this exception is ignored by each and every containing region,
3195 then control passes straight out. The runtime may handle some
3196 regions, which also do not require processing internally. */
3197 for (; region; region = region->outer)
3198 {
3199 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3200 inlinable_call || is_resx);
3201 if (how == RNL_BLOCKED)
3202 return false;
3203 if (how != RNL_NOT_CAUGHT)
3204 return true;
3205 }
3206
3207 return false;
3208 }
3209
3210 bool
3211 can_throw_internal (const_rtx insn)
3212 {
3213 rtx note;
3214
3215 if (! INSN_P (insn))
3216 return false;
3217
3218 if (JUMP_P (insn)
3219 && GET_CODE (PATTERN (insn)) == RESX
3220 && XINT (PATTERN (insn), 0) > 0)
3221 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3222
3223 if (NONJUMP_INSN_P (insn)
3224 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3225 insn = XVECEXP (PATTERN (insn), 0, 0);
3226
3227 /* Every insn that might throw has an EH_REGION note. */
3228 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3229 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3230 return false;
3231
3232 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3233 }
3234
3235 /* Determine if the given INSN can throw an exception that is
3236 visible outside the function. */
3237
3238 bool
3239 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3240 {
3241 struct eh_region *region;
3242 tree type_thrown;
3243
3244 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3245 if (!region)
3246 return true;
3247
3248 type_thrown = NULL_TREE;
3249 if (is_resx)
3250 region = region->outer;
3251 else if (region->type == ERT_THROW)
3252 {
3253 type_thrown = region->u.eh_throw.type;
3254 region = region->outer;
3255 }
3256
3257 /* If the exception is caught or blocked by any containing region,
3258 then it is not seen by any calling function. */
3259 for (; region ; region = region->outer)
3260 if (reachable_next_level (region, type_thrown, NULL,
3261 inlinable_call || is_resx) >= RNL_CAUGHT)
3262 return false;
3263
3264 return true;
3265 }
3266
3267 bool
3268 can_throw_external (const_rtx insn)
3269 {
3270 rtx note;
3271
3272 if (! INSN_P (insn))
3273 return false;
3274
3275 if (JUMP_P (insn)
3276 && GET_CODE (PATTERN (insn)) == RESX
3277 && XINT (PATTERN (insn), 0) > 0)
3278 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3279
3280 if (NONJUMP_INSN_P (insn)
3281 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3282 {
3283 rtx seq = PATTERN (insn);
3284 int i, n = XVECLEN (seq, 0);
3285
3286 for (i = 0; i < n; i++)
3287 if (can_throw_external (XVECEXP (seq, 0, i)))
3288 return true;
3289
3290 return false;
3291 }
3292
3293 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3294 if (!note)
3295 {
3296 /* Calls (and trapping insns) without notes are outside any
3297 exception handling region in this function. We have to
3298 assume it might throw. Given that the front end and middle
3299 ends mark known NOTHROW functions, this isn't so wildly
3300 inaccurate. */
3301 return (CALL_P (insn)
3302 || (flag_non_call_exceptions
3303 && may_trap_p (PATTERN (insn))));
3304 }
3305 if (INTVAL (XEXP (note, 0)) <= 0)
3306 return false;
3307
3308 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3309 }
3310
3311 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3312
3313 unsigned int
3314 set_nothrow_function_flags (void)
3315 {
3316 rtx insn;
3317
3318 crtl->nothrow = 1;
3319
3320 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3321 something that can throw an exception. We specifically exempt
3322 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3323 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3324 is optimistic. */
3325
3326 crtl->all_throwers_are_sibcalls = 1;
3327
3328 /* If we don't know that this implementation of the function will
3329 actually be used, then we must not set TREE_NOTHROW, since
3330 callers must not assume that this function does not throw. */
3331 if (TREE_NOTHROW (current_function_decl))
3332 return 0;
3333
3334 if (! flag_exceptions)
3335 return 0;
3336
3337 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3338 if (can_throw_external (insn))
3339 {
3340 crtl->nothrow = 0;
3341
3342 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3343 {
3344 crtl->all_throwers_are_sibcalls = 0;
3345 return 0;
3346 }
3347 }
3348
3349 for (insn = crtl->epilogue_delay_list; insn;
3350 insn = XEXP (insn, 1))
3351 if (can_throw_external (insn))
3352 {
3353 crtl->nothrow = 0;
3354
3355 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3356 {
3357 crtl->all_throwers_are_sibcalls = 0;
3358 return 0;
3359 }
3360 }
3361 if (crtl->nothrow
3362 && (cgraph_function_body_availability (cgraph_node
3363 (current_function_decl))
3364 >= AVAIL_AVAILABLE))
3365 {
3366 struct cgraph_node *node = cgraph_node (current_function_decl);
3367 struct cgraph_edge *e;
3368 for (e = node->callers; e; e = e->next_caller)
3369 e->can_throw_external = false;
3370 TREE_NOTHROW (current_function_decl) = 1;
3371
3372 if (dump_file)
3373 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3374 current_function_name ());
3375 }
3376 return 0;
3377 }
3378
3379 struct rtl_opt_pass pass_set_nothrow_function_flags =
3380 {
3381 {
3382 RTL_PASS,
3383 "nothrow", /* name */
3384 NULL, /* gate */
3385 set_nothrow_function_flags, /* execute */
3386 NULL, /* sub */
3387 NULL, /* next */
3388 0, /* static_pass_number */
3389 TV_NONE, /* tv_id */
3390 0, /* properties_required */
3391 0, /* properties_provided */
3392 0, /* properties_destroyed */
3393 0, /* todo_flags_start */
3394 TODO_dump_func, /* todo_flags_finish */
3395 }
3396 };
3397
3398 \f
3399 /* Various hooks for unwind library. */
3400
3401 /* Do any necessary initialization to access arbitrary stack frames.
3402 On the SPARC, this means flushing the register windows. */
3403
3404 void
3405 expand_builtin_unwind_init (void)
3406 {
3407 /* Set this so all the registers get saved in our frame; we need to be
3408 able to copy the saved values for any registers from frames we unwind. */
3409 crtl->saves_all_registers = 1;
3410
3411 #ifdef SETUP_FRAME_ADDRESSES
3412 SETUP_FRAME_ADDRESSES ();
3413 #endif
3414 }
3415
3416 rtx
3417 expand_builtin_eh_return_data_regno (tree exp)
3418 {
3419 tree which = CALL_EXPR_ARG (exp, 0);
3420 unsigned HOST_WIDE_INT iwhich;
3421
3422 if (TREE_CODE (which) != INTEGER_CST)
3423 {
3424 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3425 return constm1_rtx;
3426 }
3427
3428 iwhich = tree_low_cst (which, 1);
3429 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3430 if (iwhich == INVALID_REGNUM)
3431 return constm1_rtx;
3432
3433 #ifdef DWARF_FRAME_REGNUM
3434 iwhich = DWARF_FRAME_REGNUM (iwhich);
3435 #else
3436 iwhich = DBX_REGISTER_NUMBER (iwhich);
3437 #endif
3438
3439 return GEN_INT (iwhich);
3440 }
3441
3442 /* Given a value extracted from the return address register or stack slot,
3443 return the actual address encoded in that value. */
3444
3445 rtx
3446 expand_builtin_extract_return_addr (tree addr_tree)
3447 {
3448 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3449
3450 if (GET_MODE (addr) != Pmode
3451 && GET_MODE (addr) != VOIDmode)
3452 {
3453 #ifdef POINTERS_EXTEND_UNSIGNED
3454 addr = convert_memory_address (Pmode, addr);
3455 #else
3456 addr = convert_to_mode (Pmode, addr, 0);
3457 #endif
3458 }
3459
3460 /* First mask out any unwanted bits. */
3461 #ifdef MASK_RETURN_ADDR
3462 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3463 #endif
3464
3465 /* Then adjust to find the real return address. */
3466 #if defined (RETURN_ADDR_OFFSET)
3467 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3468 #endif
3469
3470 return addr;
3471 }
3472
3473 /* Given an actual address in addr_tree, do any necessary encoding
3474 and return the value to be stored in the return address register or
3475 stack slot so the epilogue will return to that address. */
3476
3477 rtx
3478 expand_builtin_frob_return_addr (tree addr_tree)
3479 {
3480 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3481
3482 addr = convert_memory_address (Pmode, addr);
3483
3484 #ifdef RETURN_ADDR_OFFSET
3485 addr = force_reg (Pmode, addr);
3486 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3487 #endif
3488
3489 return addr;
3490 }
3491
3492 /* Set up the epilogue with the magic bits we'll need to return to the
3493 exception handler. */
3494
3495 void
3496 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3497 tree handler_tree)
3498 {
3499 rtx tmp;
3500
3501 #ifdef EH_RETURN_STACKADJ_RTX
3502 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3503 VOIDmode, EXPAND_NORMAL);
3504 tmp = convert_memory_address (Pmode, tmp);
3505 if (!crtl->eh.ehr_stackadj)
3506 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3507 else if (tmp != crtl->eh.ehr_stackadj)
3508 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3509 #endif
3510
3511 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3512 VOIDmode, EXPAND_NORMAL);
3513 tmp = convert_memory_address (Pmode, tmp);
3514 if (!crtl->eh.ehr_handler)
3515 crtl->eh.ehr_handler = copy_to_reg (tmp);
3516 else if (tmp != crtl->eh.ehr_handler)
3517 emit_move_insn (crtl->eh.ehr_handler, tmp);
3518
3519 if (!crtl->eh.ehr_label)
3520 crtl->eh.ehr_label = gen_label_rtx ();
3521 emit_jump (crtl->eh.ehr_label);
3522 }
3523
3524 void
3525 expand_eh_return (void)
3526 {
3527 rtx around_label;
3528
3529 if (! crtl->eh.ehr_label)
3530 return;
3531
3532 crtl->calls_eh_return = 1;
3533
3534 #ifdef EH_RETURN_STACKADJ_RTX
3535 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3536 #endif
3537
3538 around_label = gen_label_rtx ();
3539 emit_jump (around_label);
3540
3541 emit_label (crtl->eh.ehr_label);
3542 clobber_return_register ();
3543
3544 #ifdef EH_RETURN_STACKADJ_RTX
3545 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3546 #endif
3547
3548 #ifdef HAVE_eh_return
3549 if (HAVE_eh_return)
3550 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3551 else
3552 #endif
3553 {
3554 #ifdef EH_RETURN_HANDLER_RTX
3555 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3556 #else
3557 error ("__builtin_eh_return not supported on this target");
3558 #endif
3559 }
3560
3561 emit_label (around_label);
3562 }
3563
3564 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3565 POINTERS_EXTEND_UNSIGNED and return it. */
3566
3567 rtx
3568 expand_builtin_extend_pointer (tree addr_tree)
3569 {
3570 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3571 int extend;
3572
3573 #ifdef POINTERS_EXTEND_UNSIGNED
3574 extend = POINTERS_EXTEND_UNSIGNED;
3575 #else
3576 /* The previous EH code did an unsigned extend by default, so we do this also
3577 for consistency. */
3578 extend = 1;
3579 #endif
3580
3581 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3582 }
3583 \f
3584 /* In the following functions, we represent entries in the action table
3585 as 1-based indices. Special cases are:
3586
3587 0: null action record, non-null landing pad; implies cleanups
3588 -1: null action record, null landing pad; implies no action
3589 -2: no call-site entry; implies must_not_throw
3590 -3: we have yet to process outer regions
3591
3592 Further, no special cases apply to the "next" field of the record.
3593 For next, 0 means end of list. */
3594
3595 struct action_record
3596 {
3597 int offset;
3598 int filter;
3599 int next;
3600 };
3601
3602 static int
3603 action_record_eq (const void *pentry, const void *pdata)
3604 {
3605 const struct action_record *entry = (const struct action_record *) pentry;
3606 const struct action_record *data = (const struct action_record *) pdata;
3607 return entry->filter == data->filter && entry->next == data->next;
3608 }
3609
3610 static hashval_t
3611 action_record_hash (const void *pentry)
3612 {
3613 const struct action_record *entry = (const struct action_record *) pentry;
3614 return entry->next * 1009 + entry->filter;
3615 }
3616
3617 static int
3618 add_action_record (htab_t ar_hash, int filter, int next)
3619 {
3620 struct action_record **slot, *new_ar, tmp;
3621
3622 tmp.filter = filter;
3623 tmp.next = next;
3624 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3625
3626 if ((new_ar = *slot) == NULL)
3627 {
3628 new_ar = XNEW (struct action_record);
3629 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3630 new_ar->filter = filter;
3631 new_ar->next = next;
3632 *slot = new_ar;
3633
3634 /* The filter value goes in untouched. The link to the next
3635 record is a "self-relative" byte offset, or zero to indicate
3636 that there is no next record. So convert the absolute 1 based
3637 indices we've been carrying around into a displacement. */
3638
3639 push_sleb128 (&crtl->eh.action_record_data, filter);
3640 if (next)
3641 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3642 push_sleb128 (&crtl->eh.action_record_data, next);
3643 }
3644
3645 return new_ar->offset;
3646 }
3647
3648 static int
3649 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3650 {
3651 struct eh_region *c;
3652 int next;
3653
3654 /* If we've reached the top of the region chain, then we have
3655 no actions, and require no landing pad. */
3656 if (region == NULL)
3657 return -1;
3658
3659 switch (region->type)
3660 {
3661 case ERT_CLEANUP:
3662 /* A cleanup adds a zero filter to the beginning of the chain, but
3663 there are special cases to look out for. If there are *only*
3664 cleanups along a path, then it compresses to a zero action.
3665 Further, if there are multiple cleanups along a path, we only
3666 need to represent one of them, as that is enough to trigger
3667 entry to the landing pad at runtime. */
3668 next = collect_one_action_chain (ar_hash, region->outer);
3669 if (next <= 0)
3670 return 0;
3671 for (c = region->outer; c ; c = c->outer)
3672 if (c->type == ERT_CLEANUP)
3673 return next;
3674 return add_action_record (ar_hash, 0, next);
3675
3676 case ERT_TRY:
3677 /* Process the associated catch regions in reverse order.
3678 If there's a catch-all handler, then we don't need to
3679 search outer regions. Use a magic -3 value to record
3680 that we haven't done the outer search. */
3681 next = -3;
3682 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3683 {
3684 if (c->u.eh_catch.type_list == NULL)
3685 {
3686 /* Retrieve the filter from the head of the filter list
3687 where we have stored it (see assign_filter_values). */
3688 int filter
3689 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3690
3691 next = add_action_record (ar_hash, filter, 0);
3692 }
3693 else
3694 {
3695 /* Once the outer search is done, trigger an action record for
3696 each filter we have. */
3697 tree flt_node;
3698
3699 if (next == -3)
3700 {
3701 next = collect_one_action_chain (ar_hash, region->outer);
3702
3703 /* If there is no next action, terminate the chain. */
3704 if (next == -1)
3705 next = 0;
3706 /* If all outer actions are cleanups or must_not_throw,
3707 we'll have no action record for it, since we had wanted
3708 to encode these states in the call-site record directly.
3709 Add a cleanup action to the chain to catch these. */
3710 else if (next <= 0)
3711 next = add_action_record (ar_hash, 0, 0);
3712 }
3713
3714 flt_node = c->u.eh_catch.filter_list;
3715 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3716 {
3717 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3718 next = add_action_record (ar_hash, filter, next);
3719 }
3720 }
3721 }
3722 return next;
3723
3724 case ERT_ALLOWED_EXCEPTIONS:
3725 /* An exception specification adds its filter to the
3726 beginning of the chain. */
3727 next = collect_one_action_chain (ar_hash, region->outer);
3728
3729 /* If there is no next action, terminate the chain. */
3730 if (next == -1)
3731 next = 0;
3732 /* If all outer actions are cleanups or must_not_throw,
3733 we'll have no action record for it, since we had wanted
3734 to encode these states in the call-site record directly.
3735 Add a cleanup action to the chain to catch these. */
3736 else if (next <= 0)
3737 next = add_action_record (ar_hash, 0, 0);
3738
3739 return add_action_record (ar_hash, region->u.allowed.filter, next);
3740
3741 case ERT_MUST_NOT_THROW:
3742 /* A must-not-throw region with no inner handlers or cleanups
3743 requires no call-site entry. Note that this differs from
3744 the no handler or cleanup case in that we do require an lsda
3745 to be generated. Return a magic -2 value to record this. */
3746 return -2;
3747
3748 case ERT_CATCH:
3749 case ERT_THROW:
3750 /* CATCH regions are handled in TRY above. THROW regions are
3751 for optimization information only and produce no output. */
3752 return collect_one_action_chain (ar_hash, region->outer);
3753
3754 default:
3755 gcc_unreachable ();
3756 }
3757 }
3758
3759 static int
3760 add_call_site (rtx landing_pad, int action)
3761 {
3762 call_site_record record;
3763
3764 record = GGC_NEW (struct call_site_record);
3765 record->landing_pad = landing_pad;
3766 record->action = action;
3767
3768 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3769
3770 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3771 }
3772
3773 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3774 The new note numbers will not refer to region numbers, but
3775 instead to call site entries. */
3776
3777 unsigned int
3778 convert_to_eh_region_ranges (void)
3779 {
3780 rtx insn, iter, note;
3781 htab_t ar_hash;
3782 int last_action = -3;
3783 rtx last_action_insn = NULL_RTX;
3784 rtx last_landing_pad = NULL_RTX;
3785 rtx first_no_action_insn = NULL_RTX;
3786 int call_site = 0;
3787
3788 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3789 return 0;
3790
3791 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3792
3793 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3794
3795 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3796 if (INSN_P (iter))
3797 {
3798 struct eh_region *region;
3799 int this_action;
3800 rtx this_landing_pad;
3801
3802 insn = iter;
3803 if (NONJUMP_INSN_P (insn)
3804 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3805 insn = XVECEXP (PATTERN (insn), 0, 0);
3806
3807 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3808 if (!note)
3809 {
3810 if (! (CALL_P (insn)
3811 || (flag_non_call_exceptions
3812 && may_trap_p (PATTERN (insn)))))
3813 continue;
3814 this_action = -1;
3815 region = NULL;
3816 }
3817 else
3818 {
3819 if (INTVAL (XEXP (note, 0)) <= 0)
3820 continue;
3821 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3822 this_action = collect_one_action_chain (ar_hash, region);
3823 }
3824
3825 /* Existence of catch handlers, or must-not-throw regions
3826 implies that an lsda is needed (even if empty). */
3827 if (this_action != -1)
3828 crtl->uses_eh_lsda = 1;
3829
3830 /* Delay creation of region notes for no-action regions
3831 until we're sure that an lsda will be required. */
3832 else if (last_action == -3)
3833 {
3834 first_no_action_insn = iter;
3835 last_action = -1;
3836 }
3837
3838 /* Cleanups and handlers may share action chains but not
3839 landing pads. Collect the landing pad for this region. */
3840 if (this_action >= 0)
3841 {
3842 struct eh_region *o;
3843 for (o = region; ! o->landing_pad ; o = o->outer)
3844 continue;
3845 this_landing_pad = o->landing_pad;
3846 }
3847 else
3848 this_landing_pad = NULL_RTX;
3849
3850 /* Differing actions or landing pads implies a change in call-site
3851 info, which implies some EH_REGION note should be emitted. */
3852 if (last_action != this_action
3853 || last_landing_pad != this_landing_pad)
3854 {
3855 /* If we'd not seen a previous action (-3) or the previous
3856 action was must-not-throw (-2), then we do not need an
3857 end note. */
3858 if (last_action >= -1)
3859 {
3860 /* If we delayed the creation of the begin, do it now. */
3861 if (first_no_action_insn)
3862 {
3863 call_site = add_call_site (NULL_RTX, 0);
3864 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3865 first_no_action_insn);
3866 NOTE_EH_HANDLER (note) = call_site;
3867 first_no_action_insn = NULL_RTX;
3868 }
3869
3870 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3871 last_action_insn);
3872 NOTE_EH_HANDLER (note) = call_site;
3873 }
3874
3875 /* If the new action is must-not-throw, then no region notes
3876 are created. */
3877 if (this_action >= -1)
3878 {
3879 call_site = add_call_site (this_landing_pad,
3880 this_action < 0 ? 0 : this_action);
3881 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3882 NOTE_EH_HANDLER (note) = call_site;
3883 }
3884
3885 last_action = this_action;
3886 last_landing_pad = this_landing_pad;
3887 }
3888 last_action_insn = iter;
3889 }
3890
3891 if (last_action >= -1 && ! first_no_action_insn)
3892 {
3893 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3894 NOTE_EH_HANDLER (note) = call_site;
3895 }
3896
3897 htab_delete (ar_hash);
3898 return 0;
3899 }
3900
3901 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3902 {
3903 {
3904 RTL_PASS,
3905 "eh_ranges", /* name */
3906 NULL, /* gate */
3907 convert_to_eh_region_ranges, /* execute */
3908 NULL, /* sub */
3909 NULL, /* next */
3910 0, /* static_pass_number */
3911 TV_NONE, /* tv_id */
3912 0, /* properties_required */
3913 0, /* properties_provided */
3914 0, /* properties_destroyed */
3915 0, /* todo_flags_start */
3916 TODO_dump_func, /* todo_flags_finish */
3917 }
3918 };
3919
3920 \f
3921 static void
3922 push_uleb128 (varray_type *data_area, unsigned int value)
3923 {
3924 do
3925 {
3926 unsigned char byte = value & 0x7f;
3927 value >>= 7;
3928 if (value)
3929 byte |= 0x80;
3930 VARRAY_PUSH_UCHAR (*data_area, byte);
3931 }
3932 while (value);
3933 }
3934
3935 static void
3936 push_sleb128 (varray_type *data_area, int value)
3937 {
3938 unsigned char byte;
3939 int more;
3940
3941 do
3942 {
3943 byte = value & 0x7f;
3944 value >>= 7;
3945 more = ! ((value == 0 && (byte & 0x40) == 0)
3946 || (value == -1 && (byte & 0x40) != 0));
3947 if (more)
3948 byte |= 0x80;
3949 VARRAY_PUSH_UCHAR (*data_area, byte);
3950 }
3951 while (more);
3952 }
3953
3954 \f
3955 #ifndef HAVE_AS_LEB128
3956 static int
3957 dw2_size_of_call_site_table (void)
3958 {
3959 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3960 int size = n * (4 + 4 + 4);
3961 int i;
3962
3963 for (i = 0; i < n; ++i)
3964 {
3965 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3966 size += size_of_uleb128 (cs->action);
3967 }
3968
3969 return size;
3970 }
3971
3972 static int
3973 sjlj_size_of_call_site_table (void)
3974 {
3975 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3976 int size = 0;
3977 int i;
3978
3979 for (i = 0; i < n; ++i)
3980 {
3981 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3982 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3983 size += size_of_uleb128 (cs->action);
3984 }
3985
3986 return size;
3987 }
3988 #endif
3989
3990 static void
3991 dw2_output_call_site_table (void)
3992 {
3993 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3994 int i;
3995
3996 for (i = 0; i < n; ++i)
3997 {
3998 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3999 char reg_start_lab[32];
4000 char reg_end_lab[32];
4001 char landing_pad_lab[32];
4002
4003 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
4004 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
4005
4006 if (cs->landing_pad)
4007 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
4008 CODE_LABEL_NUMBER (cs->landing_pad));
4009
4010 /* ??? Perhaps use insn length scaling if the assembler supports
4011 generic arithmetic. */
4012 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
4013 data4 if the function is small enough. */
4014 #ifdef HAVE_AS_LEB128
4015 dw2_asm_output_delta_uleb128 (reg_start_lab,
4016 current_function_func_begin_label,
4017 "region %d start", i);
4018 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
4019 "length");
4020 if (cs->landing_pad)
4021 dw2_asm_output_delta_uleb128 (landing_pad_lab,
4022 current_function_func_begin_label,
4023 "landing pad");
4024 else
4025 dw2_asm_output_data_uleb128 (0, "landing pad");
4026 #else
4027 dw2_asm_output_delta (4, reg_start_lab,
4028 current_function_func_begin_label,
4029 "region %d start", i);
4030 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
4031 if (cs->landing_pad)
4032 dw2_asm_output_delta (4, landing_pad_lab,
4033 current_function_func_begin_label,
4034 "landing pad");
4035 else
4036 dw2_asm_output_data (4, 0, "landing pad");
4037 #endif
4038 dw2_asm_output_data_uleb128 (cs->action, "action");
4039 }
4040
4041 call_site_base += n;
4042 }
4043
4044 static void
4045 sjlj_output_call_site_table (void)
4046 {
4047 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4048 int i;
4049
4050 for (i = 0; i < n; ++i)
4051 {
4052 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
4053
4054 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4055 "region %d landing pad", i);
4056 dw2_asm_output_data_uleb128 (cs->action, "action");
4057 }
4058
4059 call_site_base += n;
4060 }
4061
4062 #ifndef TARGET_UNWIND_INFO
4063 /* Switch to the section that should be used for exception tables. */
4064
4065 static void
4066 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4067 {
4068 section *s;
4069
4070 if (exception_section)
4071 s = exception_section;
4072 else
4073 {
4074 /* Compute the section and cache it into exception_section,
4075 unless it depends on the function name. */
4076 if (targetm.have_named_sections)
4077 {
4078 int flags;
4079
4080 if (EH_TABLES_CAN_BE_READ_ONLY)
4081 {
4082 int tt_format =
4083 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4084 flags = ((! flag_pic
4085 || ((tt_format & 0x70) != DW_EH_PE_absptr
4086 && (tt_format & 0x70) != DW_EH_PE_aligned))
4087 ? 0 : SECTION_WRITE);
4088 }
4089 else
4090 flags = SECTION_WRITE;
4091
4092 #ifdef HAVE_LD_EH_GC_SECTIONS
4093 if (flag_function_sections)
4094 {
4095 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4096 sprintf (section_name, ".gcc_except_table.%s", fnname);
4097 s = get_section (section_name, flags, NULL);
4098 free (section_name);
4099 }
4100 else
4101 #endif
4102 exception_section
4103 = s = get_section (".gcc_except_table", flags, NULL);
4104 }
4105 else
4106 exception_section
4107 = s = flag_pic ? data_section : readonly_data_section;
4108 }
4109
4110 switch_to_section (s);
4111 }
4112 #endif
4113
4114
4115 /* Output a reference from an exception table to the type_info object TYPE.
4116 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4117 the value. */
4118
4119 static void
4120 output_ttype (tree type, int tt_format, int tt_format_size)
4121 {
4122 rtx value;
4123 bool is_public = true;
4124
4125 if (type == NULL_TREE)
4126 value = const0_rtx;
4127 else
4128 {
4129 struct varpool_node *node;
4130
4131 type = lookup_type_for_runtime (type);
4132 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4133
4134 /* Let cgraph know that the rtti decl is used. Not all of the
4135 paths below go through assemble_integer, which would take
4136 care of this for us. */
4137 STRIP_NOPS (type);
4138 if (TREE_CODE (type) == ADDR_EXPR)
4139 {
4140 type = TREE_OPERAND (type, 0);
4141 if (TREE_CODE (type) == VAR_DECL)
4142 {
4143 node = varpool_node (type);
4144 if (node)
4145 varpool_mark_needed_node (node);
4146 is_public = TREE_PUBLIC (type);
4147 }
4148 }
4149 else
4150 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4151 }
4152
4153 /* Allow the target to override the type table entry format. */
4154 if (targetm.asm_out.ttype (value))
4155 return;
4156
4157 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4158 assemble_integer (value, tt_format_size,
4159 tt_format_size * BITS_PER_UNIT, 1);
4160 else
4161 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4162 }
4163
4164 void
4165 output_function_exception_table (const char * ARG_UNUSED (fnname))
4166 {
4167 int tt_format, cs_format, lp_format, i, n;
4168 #ifdef HAVE_AS_LEB128
4169 char ttype_label[32];
4170 char cs_after_size_label[32];
4171 char cs_end_label[32];
4172 #else
4173 int call_site_len;
4174 #endif
4175 int have_tt_data;
4176 int tt_format_size = 0;
4177
4178 /* Not all functions need anything. */
4179 if (! crtl->uses_eh_lsda)
4180 return;
4181
4182 if (eh_personality_libfunc)
4183 assemble_external_libcall (eh_personality_libfunc);
4184
4185 #ifdef TARGET_UNWIND_INFO
4186 /* TODO: Move this into target file. */
4187 fputs ("\t.personality\t", asm_out_file);
4188 output_addr_const (asm_out_file, eh_personality_libfunc);
4189 fputs ("\n\t.handlerdata\n", asm_out_file);
4190 /* Note that varasm still thinks we're in the function's code section.
4191 The ".endp" directive that will immediately follow will take us back. */
4192 #else
4193 switch_to_exception_section (fnname);
4194 #endif
4195
4196 /* If the target wants a label to begin the table, emit it here. */
4197 targetm.asm_out.except_table_label (asm_out_file);
4198
4199 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4200 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4201
4202 /* Indicate the format of the @TType entries. */
4203 if (! have_tt_data)
4204 tt_format = DW_EH_PE_omit;
4205 else
4206 {
4207 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4208 #ifdef HAVE_AS_LEB128
4209 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
4210 current_function_funcdef_no);
4211 #endif
4212 tt_format_size = size_of_encoded_value (tt_format);
4213
4214 assemble_align (tt_format_size * BITS_PER_UNIT);
4215 }
4216
4217 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4218 current_function_funcdef_no);
4219
4220 /* The LSDA header. */
4221
4222 /* Indicate the format of the landing pad start pointer. An omitted
4223 field implies @LPStart == @Start. */
4224 /* Currently we always put @LPStart == @Start. This field would
4225 be most useful in moving the landing pads completely out of
4226 line to another section, but it could also be used to minimize
4227 the size of uleb128 landing pad offsets. */
4228 lp_format = DW_EH_PE_omit;
4229 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4230 eh_data_format_name (lp_format));
4231
4232 /* @LPStart pointer would go here. */
4233
4234 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4235 eh_data_format_name (tt_format));
4236
4237 #ifndef HAVE_AS_LEB128
4238 if (USING_SJLJ_EXCEPTIONS)
4239 call_site_len = sjlj_size_of_call_site_table ();
4240 else
4241 call_site_len = dw2_size_of_call_site_table ();
4242 #endif
4243
4244 /* A pc-relative 4-byte displacement to the @TType data. */
4245 if (have_tt_data)
4246 {
4247 #ifdef HAVE_AS_LEB128
4248 char ttype_after_disp_label[32];
4249 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4250 current_function_funcdef_no);
4251 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4252 "@TType base offset");
4253 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4254 #else
4255 /* Ug. Alignment queers things. */
4256 unsigned int before_disp, after_disp, last_disp, disp;
4257
4258 before_disp = 1 + 1;
4259 after_disp = (1 + size_of_uleb128 (call_site_len)
4260 + call_site_len
4261 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4262 + (VEC_length (tree, crtl->eh.ttype_data)
4263 * tt_format_size));
4264
4265 disp = after_disp;
4266 do
4267 {
4268 unsigned int disp_size, pad;
4269
4270 last_disp = disp;
4271 disp_size = size_of_uleb128 (disp);
4272 pad = before_disp + disp_size + after_disp;
4273 if (pad % tt_format_size)
4274 pad = tt_format_size - (pad % tt_format_size);
4275 else
4276 pad = 0;
4277 disp = after_disp + pad;
4278 }
4279 while (disp != last_disp);
4280
4281 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4282 #endif
4283 }
4284
4285 /* Indicate the format of the call-site offsets. */
4286 #ifdef HAVE_AS_LEB128
4287 cs_format = DW_EH_PE_uleb128;
4288 #else
4289 cs_format = DW_EH_PE_udata4;
4290 #endif
4291 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4292 eh_data_format_name (cs_format));
4293
4294 #ifdef HAVE_AS_LEB128
4295 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4296 current_function_funcdef_no);
4297 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4298 current_function_funcdef_no);
4299 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4300 "Call-site table length");
4301 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4302 if (USING_SJLJ_EXCEPTIONS)
4303 sjlj_output_call_site_table ();
4304 else
4305 dw2_output_call_site_table ();
4306 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4307 #else
4308 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4309 if (USING_SJLJ_EXCEPTIONS)
4310 sjlj_output_call_site_table ();
4311 else
4312 dw2_output_call_site_table ();
4313 #endif
4314
4315 /* ??? Decode and interpret the data for flag_debug_asm. */
4316 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4317 for (i = 0; i < n; ++i)
4318 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4319 (i ? NULL : "Action record table"));
4320
4321 if (have_tt_data)
4322 assemble_align (tt_format_size * BITS_PER_UNIT);
4323
4324 i = VEC_length (tree, crtl->eh.ttype_data);
4325 while (i-- > 0)
4326 {
4327 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4328 output_ttype (type, tt_format, tt_format_size);
4329 }
4330
4331 #ifdef HAVE_AS_LEB128
4332 if (have_tt_data)
4333 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4334 #endif
4335
4336 /* ??? Decode and interpret the data for flag_debug_asm. */
4337 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4338 for (i = 0; i < n; ++i)
4339 {
4340 if (targetm.arm_eabi_unwinder)
4341 {
4342 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4343 output_ttype (type, tt_format, tt_format_size);
4344 }
4345 else
4346 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4347 (i ? NULL : "Exception specification table"));
4348 }
4349
4350 switch_to_section (current_function_section ());
4351 }
4352
4353 void
4354 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4355 {
4356 fun->eh->throw_stmt_table = table;
4357 }
4358
4359 htab_t
4360 get_eh_throw_stmt_table (struct function *fun)
4361 {
4362 return fun->eh->throw_stmt_table;
4363 }
4364
4365 /* Dump EH information to OUT. */
4366
4367 void
4368 dump_eh_tree (FILE * out, struct function *fun)
4369 {
4370 struct eh_region *i;
4371 int depth = 0;
4372 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4373 "allowed_exceptions", "must_not_throw",
4374 "throw"
4375 };
4376
4377 i = fun->eh->region_tree;
4378 if (!i)
4379 return;
4380
4381 fprintf (out, "Eh tree:\n");
4382 while (1)
4383 {
4384 fprintf (out, " %*s %i %s", depth * 2, "",
4385 i->region_number, type_name[(int) i->type]);
4386 if (i->tree_label)
4387 {
4388 fprintf (out, " tree_label:");
4389 print_generic_expr (out, i->tree_label, 0);
4390 }
4391 if (i->label)
4392 fprintf (out, " label:%i", INSN_UID (i->label));
4393 if (i->landing_pad)
4394 {
4395 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4396 if (GET_CODE (i->landing_pad) == NOTE)
4397 fprintf (out, " (deleted)");
4398 }
4399 if (i->post_landing_pad)
4400 {
4401 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4402 if (GET_CODE (i->post_landing_pad) == NOTE)
4403 fprintf (out, " (deleted)");
4404 }
4405 if (i->resume)
4406 {
4407 fprintf (out, " resume:%i", INSN_UID (i->resume));
4408 if (GET_CODE (i->resume) == NOTE)
4409 fprintf (out, " (deleted)");
4410 }
4411 if (i->may_contain_throw)
4412 fprintf (out, " may_contain_throw");
4413 switch (i->type)
4414 {
4415 case ERT_CLEANUP:
4416 break;
4417
4418 case ERT_TRY:
4419 {
4420 struct eh_region *c;
4421 fprintf (out, " catch regions:");
4422 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4423 fprintf (out, " %i", c->region_number);
4424 }
4425 break;
4426
4427 case ERT_CATCH:
4428 if (i->u.eh_catch.prev_catch)
4429 fprintf (out, " prev: %i",
4430 i->u.eh_catch.prev_catch->region_number);
4431 if (i->u.eh_catch.next_catch)
4432 fprintf (out, " next %i",
4433 i->u.eh_catch.next_catch->region_number);
4434 fprintf (out, " type:");
4435 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4436 break;
4437
4438 case ERT_ALLOWED_EXCEPTIONS:
4439 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4440 print_generic_expr (out, i->u.allowed.type_list, 0);
4441 break;
4442
4443 case ERT_THROW:
4444 fprintf (out, " type:");
4445 print_generic_expr (out, i->u.eh_throw.type, 0);
4446 break;
4447
4448 case ERT_MUST_NOT_THROW:
4449 break;
4450
4451 case ERT_UNKNOWN:
4452 break;
4453 }
4454 if (i->aka)
4455 {
4456 fprintf (out, " also known as:");
4457 dump_bitmap (out, i->aka);
4458 }
4459 else
4460 fprintf (out, "\n");
4461 /* If there are sub-regions, process them. */
4462 if (i->inner)
4463 i = i->inner, depth++;
4464 /* If there are peers, process them. */
4465 else if (i->next_peer)
4466 i = i->next_peer;
4467 /* Otherwise, step back up the tree to the next peer. */
4468 else
4469 {
4470 do
4471 {
4472 i = i->outer;
4473 depth--;
4474 if (i == NULL)
4475 return;
4476 }
4477 while (i->next_peer == NULL);
4478 i = i->next_peer;
4479 }
4480 }
4481 }
4482
4483 /* Dump the EH tree for FN on stderr. */
4484
4485 void
4486 debug_eh_tree (struct function *fn)
4487 {
4488 dump_eh_tree (stderr, fn);
4489 }
4490
4491
4492 /* Verify EH region invariants. */
4493
4494 static bool
4495 verify_eh_region (struct eh_region *region)
4496 {
4497 bool found = false;
4498 if (!region)
4499 return false;
4500 switch (region->type)
4501 {
4502 case ERT_TRY:
4503 {
4504 struct eh_region *c, *prev = NULL;
4505 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4506 {
4507 error ("Try region %i has wrong rh_catch pointer to %i",
4508 region->region_number,
4509 region->u.eh_try.eh_catch->region_number);
4510 found = true;
4511 }
4512 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4513 {
4514 if (c->outer != region->outer)
4515 {
4516 error
4517 ("Catch region %i has different outer region than try region %i",
4518 c->region_number, region->region_number);
4519 found = true;
4520 }
4521 if (c->u.eh_catch.prev_catch != prev)
4522 {
4523 error ("Catch region %i has corrupted catchlist",
4524 c->region_number);
4525 found = true;
4526 }
4527 prev = c;
4528 }
4529 if (prev != region->u.eh_try.last_catch)
4530 {
4531 error
4532 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4533 region->region_number,
4534 region->u.eh_try.last_catch->region_number,
4535 prev->region_number);
4536 found = true;
4537 }
4538 }
4539 break;
4540 case ERT_CATCH:
4541 if (!region->u.eh_catch.prev_catch
4542 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4543 {
4544 error ("Catch region %i should be followed by try", region->region_number);
4545 found = true;
4546 }
4547 break;
4548 case ERT_CLEANUP:
4549 case ERT_ALLOWED_EXCEPTIONS:
4550 case ERT_MUST_NOT_THROW:
4551 case ERT_THROW:
4552 break;
4553 case ERT_UNKNOWN:
4554 gcc_unreachable ();
4555 }
4556 for (region = region->inner; region; region = region->next_peer)
4557 found |= verify_eh_region (region);
4558 return found;
4559 }
4560
4561 /* Verify invariants on EH datastructures. */
4562
4563 void
4564 verify_eh_tree (struct function *fun)
4565 {
4566 struct eh_region *i, *outer = NULL;
4567 bool err = false;
4568 int nvisited = 0;
4569 int count = 0;
4570 int j;
4571 int depth = 0;
4572
4573 if (!fun->eh->region_tree)
4574 return;
4575 for (j = fun->eh->last_region_number; j > 0; --j)
4576 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4577 {
4578 if (i->region_number == j)
4579 count++;
4580 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4581 {
4582 error ("region_array is corrupted for region %i",
4583 i->region_number);
4584 err = true;
4585 }
4586 }
4587 i = fun->eh->region_tree;
4588
4589 while (1)
4590 {
4591 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4592 {
4593 error ("region_array is corrupted for region %i", i->region_number);
4594 err = true;
4595 }
4596 if (i->outer != outer)
4597 {
4598 error ("outer block of region %i is wrong", i->region_number);
4599 err = true;
4600 }
4601 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4602 {
4603 error
4604 ("region %i may contain throw and is contained in region that may not",
4605 i->region_number);
4606 err = true;
4607 }
4608 if (depth < 0)
4609 {
4610 error ("negative nesting depth of region %i", i->region_number);
4611 err = true;
4612 }
4613 nvisited++;
4614 /* If there are sub-regions, process them. */
4615 if (i->inner)
4616 outer = i, i = i->inner, depth++;
4617 /* If there are peers, process them. */
4618 else if (i->next_peer)
4619 i = i->next_peer;
4620 /* Otherwise, step back up the tree to the next peer. */
4621 else
4622 {
4623 do
4624 {
4625 i = i->outer;
4626 depth--;
4627 if (i == NULL)
4628 {
4629 if (depth != -1)
4630 {
4631 error ("tree list ends on depth %i", depth + 1);
4632 err = true;
4633 }
4634 if (count != nvisited)
4635 {
4636 error ("array does not match the region tree");
4637 err = true;
4638 }
4639 if (!err)
4640 for (i = fun->eh->region_tree; i; i = i->next_peer)
4641 err |= verify_eh_region (i);
4642
4643 if (err)
4644 {
4645 dump_eh_tree (stderr, fun);
4646 internal_error ("verify_eh_tree failed");
4647 }
4648 return;
4649 }
4650 outer = i->outer;
4651 }
4652 while (i->next_peer == NULL);
4653 i = i->next_peer;
4654 }
4655 }
4656 }
4657
4658 /* Initialize unwind_resume_libfunc. */
4659
4660 void
4661 default_init_unwind_resume_libfunc (void)
4662 {
4663 /* The default c++ routines aren't actually c++ specific, so use those. */
4664 unwind_resume_libfunc =
4665 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4666 : "_Unwind_Resume");
4667 }
4668
4669 \f
4670 static bool
4671 gate_handle_eh (void)
4672 {
4673 return doing_eh (0);
4674 }
4675
4676 /* Complete generation of exception handling code. */
4677 static unsigned int
4678 rest_of_handle_eh (void)
4679 {
4680 finish_eh_generation ();
4681 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4682 return 0;
4683 }
4684
4685 struct rtl_opt_pass pass_rtl_eh =
4686 {
4687 {
4688 RTL_PASS,
4689 "eh", /* name */
4690 gate_handle_eh, /* gate */
4691 rest_of_handle_eh, /* execute */
4692 NULL, /* sub */
4693 NULL, /* next */
4694 0, /* static_pass_number */
4695 TV_JUMP, /* tv_id */
4696 0, /* properties_required */
4697 0, /* properties_provided */
4698 0, /* properties_destroyed */
4699 0, /* todo_flags_start */
4700 TODO_dump_func /* todo_flags_finish */
4701 }
4702 };
4703
4704 #include "gt-except.h"