tree-eh.c (struct leh_state): Remove prev_try.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
81
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
87
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
91
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
94
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
97
98 /* A hash table of label to region number. */
99
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region *region;
103 };
104
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
108
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
116 \f
117
118 struct GTY(()) call_site_record
119 {
120 rtx landing_pad;
121 int action;
122 };
123 \f
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
126
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
137
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
146
147 static void remove_eh_handler (struct eh_region *);
148 static void remove_eh_handler_and_replace (struct eh_region *,
149 struct eh_region *, bool);
150
151 /* The return value of reachable_next_level. */
152 enum reachable_code
153 {
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
162 };
163
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region *, tree,
166 struct reachable_info *, bool);
167
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region *);
172 static int add_call_site (rtx, int);
173
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
182
183 \f
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
187
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
190
191 int
192 doing_eh (int do_warn)
193 {
194 if (! flag_exceptions)
195 {
196 static int warned = 0;
197 if (! warned && do_warn)
198 {
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
201 }
202 return 0;
203 }
204 return 1;
205 }
206
207 \f
208 void
209 init_eh (void)
210 {
211 if (! flag_exceptions)
212 return;
213
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
215
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
219 {
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
221
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
223
224 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
225 build_pointer_type (sjlj_fc_type_node));
226 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
227
228 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
229 integer_type_node);
230 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
231
232 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
233 tmp = build_array_type (lang_hooks.types.type_for_mode
234 (targetm.unwind_word_mode (), 1),
235 tmp);
236 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
237 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
238
239 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
240 ptr_type_node);
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
242
243 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
246
247 #ifdef DONT_USE_BUILTIN_SETJMP
248 #ifdef JMP_BUF_SIZE
249 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
250 #else
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
256 #endif
257 #else
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
260 #endif
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
268 DECL_USER_ALIGN (f_jbuf) = 1;
269 #endif
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
271
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
273 TREE_CHAIN (f_prev) = f_cs;
274 TREE_CHAIN (f_cs) = f_data;
275 TREE_CHAIN (f_data) = f_per;
276 TREE_CHAIN (f_per) = f_lsda;
277 TREE_CHAIN (f_lsda) = f_jbuf;
278
279 layout_type (sjlj_fc_type_node);
280
281 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */
283 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
286 sjlj_fc_data_ofs
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
292 sjlj_fc_lsda_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
295 sjlj_fc_jbuf_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
298 }
299 }
300
301 void
302 init_eh_for_function (void)
303 {
304 cfun->eh = GGC_CNEW (struct eh_status);
305 }
306 \f
307 /* Routines to generate the exception tree somewhat directly.
308 These are used from tree-eh.c when processing exception related
309 nodes during tree optimization. */
310
311 static struct eh_region *
312 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
313 {
314 struct eh_region *new_eh;
315
316 #ifdef ENABLE_CHECKING
317 gcc_assert (doing_eh (0));
318 #endif
319
320 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = GGC_CNEW (struct eh_region);
322 new_eh->type = type;
323 new_eh->outer = outer;
324 if (outer)
325 {
326 new_eh->next_peer = outer->inner;
327 outer->inner = new_eh;
328 }
329 else
330 {
331 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh;
333 }
334
335 new_eh->region_number = ++cfun->eh->last_region_number;
336
337 return new_eh;
338 }
339
340 struct eh_region *
341 gen_eh_region_cleanup (struct eh_region *outer)
342 {
343 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
344 return cleanup;
345 }
346
347 struct eh_region *
348 gen_eh_region_try (struct eh_region *outer)
349 {
350 return gen_eh_region (ERT_TRY, outer);
351 }
352
353 struct eh_region *
354 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
355 {
356 struct eh_region *c, *l;
357 tree type_list, type_node;
358
359 /* Ensure to always end up with a type list to normalize further
360 processing, then register each type against the runtime types map. */
361 type_list = type_or_list;
362 if (type_or_list)
363 {
364 if (TREE_CODE (type_or_list) != TREE_LIST)
365 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
366
367 type_node = type_list;
368 for (; type_node; type_node = TREE_CHAIN (type_node))
369 add_type_for_runtime (TREE_VALUE (type_node));
370 }
371
372 c = gen_eh_region (ERT_CATCH, t->outer);
373 c->u.eh_catch.type_list = type_list;
374 l = t->u.eh_try.last_catch;
375 c->u.eh_catch.prev_catch = l;
376 if (l)
377 l->u.eh_catch.next_catch = c;
378 else
379 t->u.eh_try.eh_catch = c;
380 t->u.eh_try.last_catch = c;
381
382 return c;
383 }
384
385 struct eh_region *
386 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
387 {
388 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
389 region->u.allowed.type_list = allowed;
390
391 for (; allowed ; allowed = TREE_CHAIN (allowed))
392 add_type_for_runtime (TREE_VALUE (allowed));
393
394 return region;
395 }
396
397 struct eh_region *
398 gen_eh_region_must_not_throw (struct eh_region *outer)
399 {
400 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
401 }
402
403 int
404 get_eh_region_number (struct eh_region *region)
405 {
406 return region->region_number;
407 }
408
409 bool
410 get_eh_region_may_contain_throw (struct eh_region *region)
411 {
412 return region->may_contain_throw;
413 }
414
415 tree
416 get_eh_region_tree_label (struct eh_region *region)
417 {
418 return region->tree_label;
419 }
420
421 tree
422 get_eh_region_no_tree_label (int region)
423 {
424 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
425 }
426
427 void
428 set_eh_region_tree_label (struct eh_region *region, tree lab)
429 {
430 region->tree_label = lab;
431 }
432 \f
433 void
434 expand_resx_expr (tree exp)
435 {
436 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
437 struct eh_region *reg = VEC_index (eh_region,
438 cfun->eh->region_array, region_nr);
439
440 gcc_assert (!reg->resume);
441 do_pending_stack_adjust ();
442 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
443 emit_barrier ();
444 }
445
446 /* Note that the current EH region (if any) may contain a throw, or a
447 call to a function which itself may contain a throw. */
448
449 void
450 note_eh_region_may_contain_throw (struct eh_region *region)
451 {
452 while (region && !region->may_contain_throw)
453 {
454 region->may_contain_throw = 1;
455 region = region->outer;
456 }
457 }
458
459
460 /* Return an rtl expression for a pointer to the exception object
461 within a handler. */
462
463 rtx
464 get_exception_pointer (void)
465 {
466 if (! crtl->eh.exc_ptr)
467 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
468 return crtl->eh.exc_ptr;
469 }
470
471 /* Return an rtl expression for the exception dispatch filter
472 within a handler. */
473
474 rtx
475 get_exception_filter (void)
476 {
477 if (! crtl->eh.filter)
478 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
479 return crtl->eh.filter;
480 }
481 \f
482 /* This section is for the exception handling specific optimization pass. */
483
484 /* Random access the exception region tree. */
485
486 void
487 collect_eh_region_array (void)
488 {
489 struct eh_region *i;
490
491 i = cfun->eh->region_tree;
492 if (! i)
493 return;
494
495 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
496 cfun->eh->last_region_number + 1);
497 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
498
499 while (1)
500 {
501 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
502
503 /* If there are sub-regions, process them. */
504 if (i->inner)
505 i = i->inner;
506 /* If there are peers, process them. */
507 else if (i->next_peer)
508 i = i->next_peer;
509 /* Otherwise, step back up the tree to the next peer. */
510 else
511 {
512 do {
513 i = i->outer;
514 if (i == NULL)
515 return;
516 } while (i->next_peer == NULL);
517 i = i->next_peer;
518 }
519 }
520 }
521
522 /* R is MUST_NOT_THROW region that is not reachable via local
523 RESX instructions. It still must be kept in the tree in case runtime
524 can unwind through it, or we will eliminate out terminate call
525 runtime would do otherwise. Return TRUE if R contains throwing statements
526 or some of the exceptions in inner regions can be unwound up to R.
527
528 CONTAINS_STMT is bitmap of all regions that contains some throwing
529 statements.
530
531 Function looks O(^3) at first sight. In fact the function is called at most
532 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
533 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
534 the outer loop examines every region at most once. The inner loop
535 is doing unwinding from the throwing statement same way as we do during
536 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
537 of CFG. In practice Eh trees are wide, not deep, so this is not
538 a problem. */
539
540 static bool
541 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
542 {
543 struct eh_region *i = r->inner;
544 unsigned n;
545 bitmap_iterator bi;
546
547 if (TEST_BIT (contains_stmt, r->region_number))
548 return true;
549 if (r->aka)
550 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
551 if (TEST_BIT (contains_stmt, n))
552 return true;
553 if (!i)
554 return false;
555 while (1)
556 {
557 /* It is pointless to look into MUST_NOT_THROW
558 or dive into subregions. They never unwind up. */
559 if (i->type != ERT_MUST_NOT_THROW)
560 {
561 bool found = TEST_BIT (contains_stmt, i->region_number);
562 if (!found)
563 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
564 if (TEST_BIT (contains_stmt, n))
565 {
566 found = true;
567 break;
568 }
569 /* We have nested region that contains throwing statement.
570 See if resuming might lead up to the resx or we get locally
571 caught sooner. If we get locally caught sooner, we either
572 know region R is not reachable or it would have direct edge
573 from the EH resx and thus consider region reachable at
574 firest place. */
575 if (found)
576 {
577 struct eh_region *i1 = i;
578 tree type_thrown = NULL_TREE;
579
580 if (i1->type == ERT_THROW)
581 {
582 type_thrown = i1->u.eh_throw.type;
583 i1 = i1->outer;
584 }
585 for (; i1 != r; i1 = i1->outer)
586 if (reachable_next_level (i1, type_thrown, NULL,
587 false) >= RNL_CAUGHT)
588 break;
589 if (i1 == r)
590 return true;
591 }
592 }
593 /* If there are sub-regions, process them. */
594 if (i->type != ERT_MUST_NOT_THROW && i->inner)
595 i = i->inner;
596 /* If there are peers, process them. */
597 else if (i->next_peer)
598 i = i->next_peer;
599 /* Otherwise, step back up the tree to the next peer. */
600 else
601 {
602 do
603 {
604 i = i->outer;
605 if (i == r)
606 return false;
607 }
608 while (i->next_peer == NULL);
609 i = i->next_peer;
610 }
611 }
612 }
613
614 /* Bring region R to the root of tree. */
615
616 static void
617 bring_to_root (struct eh_region *r)
618 {
619 struct eh_region **pp;
620 struct eh_region *outer = r->outer;
621 if (!r->outer)
622 return;
623 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
624 continue;
625 *pp = r->next_peer;
626 r->outer = NULL;
627 r->next_peer = cfun->eh->region_tree;
628 cfun->eh->region_tree = r;
629 }
630
631 /* Return true if region R2 can be replaced by R1. */
632
633 static bool
634 eh_region_replaceable_by_p (const struct eh_region *r1,
635 const struct eh_region *r2)
636 {
637 /* Regions are semantically same if they are of same type,
638 have same label and type. */
639 if (r1->type != r2->type)
640 return false;
641 if (r1->tree_label != r2->tree_label)
642 return false;
643
644 /* Verify that also region type dependent data are the same. */
645 switch (r1->type)
646 {
647 case ERT_MUST_NOT_THROW:
648 case ERT_CLEANUP:
649 break;
650 case ERT_TRY:
651 {
652 struct eh_region *c1, *c2;
653 for (c1 = r1->u.eh_try.eh_catch,
654 c2 = r2->u.eh_try.eh_catch;
655 c1 && c2;
656 c1 = c1->u.eh_catch.next_catch,
657 c2 = c2->u.eh_catch.next_catch)
658 if (!eh_region_replaceable_by_p (c1, c2))
659 return false;
660 if (c1 || c2)
661 return false;
662 }
663 break;
664 case ERT_CATCH:
665 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
666 return false;
667 if (!list_equal_p (r1->u.eh_catch.filter_list,
668 r2->u.eh_catch.filter_list))
669 return false;
670 break;
671 case ERT_ALLOWED_EXCEPTIONS:
672 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
673 return false;
674 if (r1->u.allowed.filter != r2->u.allowed.filter)
675 return false;
676 break;
677 case ERT_THROW:
678 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
679 return false;
680 break;
681 default:
682 gcc_unreachable ();
683 }
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
686 r2->region_number);
687 return true;
688 }
689
690 /* Replace region R2 by R1. */
691
692 static void
693 replace_region (struct eh_region *r1, struct eh_region *r2)
694 {
695 struct eh_region *next1 = r1->u.eh_try.eh_catch;
696 struct eh_region *next2 = r2->u.eh_try.eh_catch;
697 bool is_try = r1->type == ERT_TRY;
698
699 gcc_assert (r1->type != ERT_CATCH);
700 remove_eh_handler_and_replace (r2, r1, false);
701 if (is_try)
702 {
703 while (next1)
704 {
705 r1 = next1;
706 r2 = next2;
707 gcc_assert (next1->type == ERT_CATCH);
708 gcc_assert (next2->type == ERT_CATCH);
709 next1 = next1->u.eh_catch.next_catch;
710 next2 = next2->u.eh_catch.next_catch;
711 remove_eh_handler_and_replace (r2, r1, false);
712 }
713 }
714 }
715
716 /* Return hash value of type list T. */
717
718 static hashval_t
719 hash_type_list (tree t)
720 {
721 hashval_t val = 0;
722 for (; t; t = TREE_CHAIN (t))
723 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
724 return val;
725 }
726
727 /* Hash EH regions so semantically same regions get same hash value. */
728
729 static hashval_t
730 hash_eh_region (const void *r)
731 {
732 const struct eh_region *region = (const struct eh_region *)r;
733 hashval_t val = region->type;
734
735 if (region->tree_label)
736 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
737 switch (region->type)
738 {
739 case ERT_MUST_NOT_THROW:
740 case ERT_CLEANUP:
741 break;
742 case ERT_TRY:
743 {
744 struct eh_region *c;
745 for (c = region->u.eh_try.eh_catch;
746 c; c = c->u.eh_catch.next_catch)
747 val = iterative_hash_hashval_t (hash_eh_region (c), val);
748 }
749 break;
750 case ERT_CATCH:
751 val = iterative_hash_hashval_t (hash_type_list
752 (region->u.eh_catch.type_list), val);
753 break;
754 case ERT_ALLOWED_EXCEPTIONS:
755 val = iterative_hash_hashval_t
756 (hash_type_list (region->u.allowed.type_list), val);
757 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
758 break;
759 case ERT_THROW:
760 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
761 break;
762 default:
763 gcc_unreachable ();
764 }
765 return val;
766 }
767
768 /* Return true if regions R1 and R2 are equal. */
769
770 static int
771 eh_regions_equal_p (const void *r1, const void *r2)
772 {
773 return eh_region_replaceable_by_p ((const struct eh_region *)r1,
774 (const struct eh_region *)r2);
775 }
776
777 /* Walk all peers of REGION and try to merge those regions
778 that are semantically equivalent. Look into subregions
779 recursively too. */
780
781 static bool
782 merge_peers (struct eh_region *region)
783 {
784 struct eh_region *r1, *r2, *outer = NULL, *next;
785 bool merged = false;
786 int num_regions = 0;
787 if (region)
788 outer = region->outer;
789 else
790 return false;
791
792 /* First see if there is inner region equivalent to region
793 in question. EH control flow is acyclic so we know we
794 can merge them. */
795 if (outer)
796 for (r1 = region; r1; r1 = next)
797 {
798 next = r1->next_peer;
799 if (r1->type == ERT_CATCH)
800 continue;
801 if (eh_region_replaceable_by_p (r1->outer, r1))
802 {
803 replace_region (r1->outer, r1);
804 merged = true;
805 }
806 else
807 num_regions ++;
808 }
809
810 /* Get new first region and try to match the peers
811 for equivalence. */
812 if (outer)
813 region = outer->inner;
814 else
815 region = cfun->eh->region_tree;
816
817 /* There are few regions to inspect:
818 N^2 loop matching each region with each region
819 will do the job well. */
820 if (num_regions < 10)
821 {
822 for (r1 = region; r1; r1 = r1->next_peer)
823 {
824 if (r1->type == ERT_CATCH)
825 continue;
826 for (r2 = r1->next_peer; r2; r2 = next)
827 {
828 next = r2->next_peer;
829 if (eh_region_replaceable_by_p (r1, r2))
830 {
831 replace_region (r1, r2);
832 merged = true;
833 }
834 }
835 }
836 }
837 /* Or use hashtable to avoid N^2 behaviour. */
838 else
839 {
840 htab_t hash;
841 hash = htab_create (num_regions, hash_eh_region,
842 eh_regions_equal_p, NULL);
843 for (r1 = region; r1; r1 = next)
844 {
845 void **slot;
846
847 next = r1->next_peer;
848 if (r1->type == ERT_CATCH)
849 continue;
850 slot = htab_find_slot (hash, r1, INSERT);
851 if (!*slot)
852 *slot = r1;
853 else
854 replace_region ((struct eh_region *)*slot, r1);
855 }
856 htab_delete (hash);
857 }
858 for (r1 = region; r1; r1 = r1->next_peer)
859 merged |= merge_peers (r1->inner);
860 return merged;
861 }
862
863 /* Remove all regions whose labels are not reachable.
864 REACHABLE is bitmap of all regions that are used by the function
865 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
866
867 void
868 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
869 {
870 int i;
871 struct eh_region *r;
872 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
873 struct eh_region *local_must_not_throw = NULL;
874 struct eh_region *first_must_not_throw = NULL;
875
876 for (i = cfun->eh->last_region_number; i > 0; --i)
877 {
878 r = VEC_index (eh_region, cfun->eh->region_array, i);
879 if (!r || r->region_number != i)
880 continue;
881 if (!TEST_BIT (reachable, i) && !r->resume)
882 {
883 bool kill_it = true;
884
885 r->tree_label = NULL;
886 switch (r->type)
887 {
888 case ERT_THROW:
889 /* Don't remove ERT_THROW regions if their outer region
890 is reachable. */
891 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
892 kill_it = false;
893 break;
894 case ERT_MUST_NOT_THROW:
895 /* MUST_NOT_THROW regions are implementable solely in the
896 runtime, but we need them when inlining function.
897
898 Keep them if outer region is not MUST_NOT_THROW a well
899 and if they contain some statement that might unwind through
900 them. */
901 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
902 && (!contains_stmt
903 || can_be_reached_by_runtime (contains_stmt, r)))
904 kill_it = false;
905 break;
906 case ERT_TRY:
907 {
908 /* TRY regions are reachable if any of its CATCH regions
909 are reachable. */
910 struct eh_region *c;
911 for (c = r->u.eh_try.eh_catch; c;
912 c = c->u.eh_catch.next_catch)
913 if (TEST_BIT (reachable, c->region_number))
914 {
915 kill_it = false;
916 break;
917 }
918 break;
919 }
920
921 default:
922 break;
923 }
924
925 if (kill_it)
926 {
927 if (dump_file)
928 fprintf (dump_file, "Removing unreachable eh region %i\n",
929 r->region_number);
930 remove_eh_handler (r);
931 }
932 else if (r->type == ERT_MUST_NOT_THROW)
933 {
934 if (!first_must_not_throw)
935 first_must_not_throw = r;
936 VEC_safe_push (eh_region, heap, must_not_throws, r);
937 }
938 }
939 else
940 if (r->type == ERT_MUST_NOT_THROW)
941 {
942 if (!local_must_not_throw)
943 local_must_not_throw = r;
944 if (r->outer)
945 VEC_safe_push (eh_region, heap, must_not_throws, r);
946 }
947 }
948
949 /* MUST_NOT_THROW regions without local handler are all the same; they
950 trigger terminate call in runtime.
951 MUST_NOT_THROW handled locally can differ in debug info associated
952 to std::terminate () call or if one is coming from Java and other
953 from C++ whether they call terminate or abort.
954
955 We merge all MUST_NOT_THROW regions handled by the run-time into one.
956 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
957 (since unwinding never continues to the outer region anyway).
958 If MUST_NOT_THROW with local handler is present in the tree, we use
959 that region to merge into, since it will remain in tree anyway;
960 otherwise we use first MUST_NOT_THROW.
961
962 Merging of locally handled regions needs changes to the CFG. Crossjumping
963 should take care of this, by looking at the actual code and
964 ensuring that the cleanup actions are really the same. */
965
966 if (local_must_not_throw)
967 first_must_not_throw = local_must_not_throw;
968
969 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
970 {
971 if (!r->label && !r->tree_label && r != first_must_not_throw)
972 {
973 if (dump_file)
974 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
975 r->region_number,
976 first_must_not_throw->region_number);
977 remove_eh_handler_and_replace (r, first_must_not_throw, false);
978 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
979 }
980 else
981 bring_to_root (r);
982 }
983 merge_peers (cfun->eh->region_tree);
984 #ifdef ENABLE_CHECKING
985 verify_eh_tree (cfun);
986 #endif
987 VEC_free (eh_region, heap, must_not_throws);
988 }
989
990 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
991 is identical to label. */
992
993 VEC (int, heap) *
994 label_to_region_map (void)
995 {
996 VEC (int, heap) * label_to_region = NULL;
997 int i;
998 int idx;
999
1000 VEC_safe_grow_cleared (int, heap, label_to_region,
1001 cfun->cfg->last_label_uid + 1);
1002 for (i = cfun->eh->last_region_number; i > 0; --i)
1003 {
1004 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1005 if (r && r->region_number == i
1006 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1007 {
1008 if ((idx = VEC_index (int, label_to_region,
1009 LABEL_DECL_UID (r->tree_label))) != 0)
1010 r->next_region_sharing_label =
1011 VEC_index (eh_region, cfun->eh->region_array, idx);
1012 else
1013 r->next_region_sharing_label = NULL;
1014 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1015 i);
1016 }
1017 }
1018 return label_to_region;
1019 }
1020
1021 /* Return number of EH regions. */
1022 int
1023 num_eh_regions (void)
1024 {
1025 return cfun->eh->last_region_number + 1;
1026 }
1027
1028 /* Return next region sharing same label as REGION. */
1029
1030 int
1031 get_next_region_sharing_label (int region)
1032 {
1033 struct eh_region *r;
1034 if (!region)
1035 return 0;
1036 r = VEC_index (eh_region, cfun->eh->region_array, region);
1037 if (!r || !r->next_region_sharing_label)
1038 return 0;
1039 return r->next_region_sharing_label->region_number;
1040 }
1041
1042 /* Set up EH labels for RTL. */
1043
1044 void
1045 convert_from_eh_region_ranges (void)
1046 {
1047 int i, n = cfun->eh->last_region_number;
1048
1049 /* Most of the work is already done at the tree level. All we need to
1050 do is collect the rtl labels that correspond to the tree labels that
1051 collect the rtl labels that correspond to the tree labels
1052 we allocated earlier. */
1053 for (i = 1; i <= n; ++i)
1054 {
1055 struct eh_region *region;
1056
1057 region = VEC_index (eh_region, cfun->eh->region_array, i);
1058 if (region && region->tree_label)
1059 region->label = DECL_RTL_IF_SET (region->tree_label);
1060 }
1061 }
1062
1063 void
1064 find_exception_handler_labels (void)
1065 {
1066 int i;
1067
1068 if (cfun->eh->region_tree == NULL)
1069 return;
1070
1071 for (i = cfun->eh->last_region_number; i > 0; --i)
1072 {
1073 struct eh_region *region;
1074 rtx lab;
1075
1076 region = VEC_index (eh_region, cfun->eh->region_array, i);
1077 if (! region || region->region_number != i)
1078 continue;
1079 if (crtl->eh.built_landing_pads)
1080 lab = region->landing_pad;
1081 else
1082 lab = region->label;
1083 }
1084 }
1085
1086 /* Returns true if the current function has exception handling regions. */
1087
1088 bool
1089 current_function_has_exception_handlers (void)
1090 {
1091 int i;
1092
1093 for (i = cfun->eh->last_region_number; i > 0; --i)
1094 {
1095 struct eh_region *region;
1096
1097 region = VEC_index (eh_region, cfun->eh->region_array, i);
1098 if (region
1099 && region->region_number == i
1100 && region->type != ERT_THROW)
1101 return true;
1102 }
1103
1104 return false;
1105 }
1106 \f
1107 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1108 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1109
1110 static void
1111 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1112 {
1113 int i;
1114
1115 if (o->aka)
1116 {
1117 i = bitmap_first_set_bit (o->aka);
1118 if (i < *min)
1119 *min = i;
1120 i = bitmap_last_set_bit (o->aka);
1121 if (i > *max)
1122 *max = i;
1123 }
1124 if (o->region_number < *min)
1125 *min = o->region_number;
1126 if (o->region_number > *max)
1127 *max = o->region_number;
1128
1129 if (o->inner)
1130 {
1131 o = o->inner;
1132 duplicate_eh_regions_0 (o, min, max);
1133 while (o->next_peer)
1134 {
1135 o = o->next_peer;
1136 duplicate_eh_regions_0 (o, min, max);
1137 }
1138 }
1139 }
1140
1141 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1142 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1143 about the other internal pointers just yet, just the tree-like pointers. */
1144
1145 static eh_region
1146 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1147 {
1148 eh_region ret, n;
1149
1150 ret = n = GGC_NEW (struct eh_region);
1151
1152 *n = *old;
1153 n->outer = outer;
1154 n->next_peer = NULL;
1155 if (old->aka)
1156 {
1157 unsigned i;
1158 bitmap_iterator bi;
1159 n->aka = BITMAP_GGC_ALLOC ();
1160
1161 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1162 {
1163 bitmap_set_bit (n->aka, i + eh_offset);
1164 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1165 }
1166 }
1167
1168 n->region_number += eh_offset;
1169 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1170
1171 if (old->inner)
1172 {
1173 old = old->inner;
1174 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1175 while (old->next_peer)
1176 {
1177 old = old->next_peer;
1178 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1179 }
1180 }
1181
1182 return ret;
1183 }
1184
1185 /* Look for first outer region of R (or R itself) that is
1186 TRY region. Return NULL if none. */
1187
1188 static struct eh_region *
1189 find_prev_try (struct eh_region * r)
1190 {
1191 for (; r && r->type != ERT_TRY; r = r->outer)
1192 if (r->type == ERT_MUST_NOT_THROW
1193 || (r->type == ERT_ALLOWED_EXCEPTIONS
1194 && !r->u.allowed.type_list))
1195 {
1196 r = NULL;
1197 break;
1198 }
1199 return r;
1200 }
1201
1202 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1203 function and root the tree below OUTER_REGION. Remap labels using MAP
1204 callback. The special case of COPY_REGION of 0 means all regions. */
1205
1206 int
1207 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1208 void *data, int copy_region, int outer_region)
1209 {
1210 eh_region cur, outer, *splice;
1211 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1212 int num_regions;
1213
1214 if (!ifun->eh)
1215 return 0;
1216 #ifdef ENABLE_CHECKING
1217 verify_eh_tree (ifun);
1218 #endif
1219
1220 /* Find the range of region numbers to be copied. The interface we
1221 provide here mandates a single offset to find new number from old,
1222 which means we must look at the numbers present, instead of the
1223 count or something else. */
1224 if (copy_region > 0)
1225 {
1226 min_region = INT_MAX;
1227 max_region = 0;
1228
1229 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1230 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1231 }
1232 else
1233 {
1234 min_region = 1;
1235 max_region = ifun->eh->last_region_number;
1236 }
1237 num_regions = max_region - min_region + 1;
1238 cfun_last_region_number = cfun->eh->last_region_number;
1239 eh_offset = cfun_last_region_number + 1 - min_region;
1240
1241 /* If we've not yet created a region array, do so now. */
1242 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1243 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1244 cfun->eh->last_region_number + 1);
1245
1246 /* Locate the spot at which to insert the new tree. */
1247 if (outer_region > 0)
1248 {
1249 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1250 if (outer)
1251 splice = &outer->inner;
1252 else
1253 splice = &cfun->eh->region_tree;
1254 }
1255 else
1256 {
1257 outer = NULL;
1258 splice = &cfun->eh->region_tree;
1259 }
1260 while (*splice)
1261 splice = &(*splice)->next_peer;
1262
1263 if (!ifun->eh->region_tree)
1264 {
1265 if (outer)
1266 for (i = cfun_last_region_number + 1;
1267 i <= cfun->eh->last_region_number; i++)
1268 {
1269 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1270 if (outer->aka == NULL)
1271 outer->aka = BITMAP_GGC_ALLOC ();
1272 bitmap_set_bit (outer->aka, i);
1273 }
1274 return eh_offset;
1275 }
1276
1277 /* Copy all the regions in the subtree. */
1278 if (copy_region > 0)
1279 {
1280 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1281 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1282 }
1283 else
1284 {
1285 eh_region n;
1286
1287 cur = ifun->eh->region_tree;
1288 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1289 while (cur->next_peer)
1290 {
1291 cur = cur->next_peer;
1292 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1293 }
1294 }
1295
1296 /* Remap all the labels in the new regions. */
1297 for (i = cfun_last_region_number + 1;
1298 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1299 if (cur && cur->tree_label)
1300 cur->tree_label = map (cur->tree_label, data);
1301
1302 /* Remap all of the internal catch and cleanup linkages. Since we
1303 duplicate entire subtrees, all of the referenced regions will have
1304 been copied too. And since we renumbered them as a block, a simple
1305 bit of arithmetic finds us the index for the replacement region. */
1306 for (i = cfun_last_region_number + 1;
1307 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1308 {
1309 /* All removed EH that is toplevel in input function is now
1310 in outer EH of output function. */
1311 if (cur == NULL)
1312 {
1313 gcc_assert (VEC_index
1314 (eh_region, ifun->eh->region_array,
1315 i - eh_offset) == NULL);
1316 if (outer)
1317 {
1318 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1319 if (outer->aka == NULL)
1320 outer->aka = BITMAP_GGC_ALLOC ();
1321 bitmap_set_bit (outer->aka, i);
1322 }
1323 continue;
1324 }
1325 if (i != cur->region_number)
1326 continue;
1327
1328 #define REMAP(REG) \
1329 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1330 (REG)->region_number + eh_offset)
1331
1332 switch (cur->type)
1333 {
1334 case ERT_TRY:
1335 if (cur->u.eh_try.eh_catch)
1336 REMAP (cur->u.eh_try.eh_catch);
1337 if (cur->u.eh_try.last_catch)
1338 REMAP (cur->u.eh_try.last_catch);
1339 break;
1340
1341 case ERT_CATCH:
1342 if (cur->u.eh_catch.next_catch)
1343 REMAP (cur->u.eh_catch.next_catch);
1344 if (cur->u.eh_catch.prev_catch)
1345 REMAP (cur->u.eh_catch.prev_catch);
1346 break;
1347
1348 default:
1349 break;
1350 }
1351
1352 #undef REMAP
1353 }
1354 #ifdef ENABLE_CHECKING
1355 verify_eh_tree (cfun);
1356 #endif
1357
1358 return eh_offset;
1359 }
1360
1361 /* Return new copy of eh region OLD inside region NEW_OUTER.
1362 Do not care about updating the tree otherwise. */
1363
1364 static struct eh_region *
1365 copy_eh_region_1 (struct eh_region *old, struct eh_region *new_outer)
1366 {
1367 struct eh_region *new_eh = gen_eh_region (old->type, new_outer);
1368 new_eh->u = old->u;
1369 new_eh->tree_label = old->tree_label;
1370 new_eh->may_contain_throw = old->may_contain_throw;
1371 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1372 cfun->eh->last_region_number + 1);
1373 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1375 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1376 return new_eh;
1377 }
1378
1379 /* Return new copy of eh region OLD inside region NEW_OUTER.
1380
1381 Copy whole catch-try chain if neccesary. */
1382
1383 static struct eh_region *
1384 copy_eh_region (struct eh_region *old, struct eh_region *new_outer)
1385 {
1386 struct eh_region *r, *n, *old_try, *new_try, *ret = NULL;
1387 VEC(eh_region,heap) *catch_list = NULL;
1388
1389 if (old->type != ERT_CATCH)
1390 {
1391 gcc_assert (old->type != ERT_TRY);
1392 r = copy_eh_region_1 (old, new_outer);
1393 return r;
1394 }
1395
1396 /* Locate and copy corresponding TRY. */
1397 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1398 continue;
1399 gcc_assert (old_try->type == ERT_TRY);
1400 new_try = gen_eh_region_try (new_outer);
1401 new_try->tree_label = old_try->tree_label;
1402 new_try->may_contain_throw = old_try->may_contain_throw;
1403 if (dump_file && (dump_flags & TDF_DETAILS))
1404 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1405 old_try->region_number, new_try->region_number);
1406 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1407 cfun->eh->last_region_number + 1);
1408 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1409
1410 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1411 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1412 VEC_safe_push (eh_region, heap, catch_list, r);
1413
1414 while (VEC_length (eh_region, catch_list))
1415 {
1416 r = VEC_pop (eh_region, catch_list);
1417
1418 /* Duplicate CATCH. */
1419 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1420 n->tree_label = r->tree_label;
1421 n->may_contain_throw = r->may_contain_throw;
1422 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1423 cfun->eh->last_region_number + 1);
1424 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1425 n->tree_label = r->tree_label;
1426
1427 if (dump_file && (dump_flags & TDF_DETAILS))
1428 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1429 r->region_number, n->region_number);
1430 if (r == old)
1431 ret = n;
1432 }
1433 VEC_free (eh_region, heap, catch_list);
1434 gcc_assert (ret);
1435 return ret;
1436 }
1437
1438 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1439
1440 static void
1441 push_reachable_handler (struct eh_region *region, void *data)
1442 {
1443 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1444 VEC_safe_push (eh_region, heap, *trace, region);
1445 }
1446
1447 /* Redirect EH edge E that to NEW_DEST_LABEL.
1448 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1449 foreach_reachable_handler. */
1450
1451 struct eh_region *
1452 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1453 bool inlinable_call, int region_number)
1454 {
1455 struct eh_region *outer;
1456 struct eh_region *region;
1457 VEC (eh_region, heap) * trace = NULL;
1458 int i;
1459 int start_here = -1;
1460 basic_block old_bb = e->dest;
1461 struct eh_region *old, *r = NULL;
1462 bool update_inplace = true;
1463 edge_iterator ei;
1464 edge e2;
1465
1466 /* If there is only one EH edge, we don't need to duplicate;
1467 just update labels in the tree. */
1468 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1469 if ((e2->flags & EDGE_EH) && e2 != e)
1470 {
1471 update_inplace = false;
1472 break;
1473 }
1474
1475 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1476 gcc_assert (region);
1477
1478 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1479 push_reachable_handler, &trace);
1480 if (dump_file && (dump_flags & TDF_DETAILS))
1481 {
1482 dump_eh_tree (dump_file, cfun);
1483 fprintf (dump_file, "Trace: ");
1484 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1485 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1486 fprintf (dump_file, " inplace: %i\n", update_inplace);
1487 }
1488
1489 if (update_inplace)
1490 {
1491 /* In easy route just walk trace and update all occurences of the label. */
1492 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1493 {
1494 r = VEC_index (eh_region, trace, i);
1495 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1496 {
1497 r->tree_label = new_dest_label;
1498 if (dump_file && (dump_flags & TDF_DETAILS))
1499 fprintf (dump_file, "Updating label for region %i\n",
1500 r->region_number);
1501 }
1502 }
1503 r = region;
1504 }
1505 else
1506 {
1507 /* Now look for outermost handler that reffers to the basic block in question.
1508 We start our duplication there. */
1509 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1510 {
1511 r = VEC_index (eh_region, trace, i);
1512 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1513 start_here = i;
1514 }
1515 outer = VEC_index (eh_region, trace, start_here)->outer;
1516 gcc_assert (start_here >= 0);
1517
1518 /* And now do the dirty job! */
1519 for (i = start_here; i >= 0; i--)
1520 {
1521 old = VEC_index (eh_region, trace, i);
1522 gcc_assert (!outer || old->outer != outer->outer);
1523
1524 /* Copy region and update label. */
1525 r = copy_eh_region (old, outer);
1526 VEC_replace (eh_region, trace, i, r);
1527 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1528 {
1529 r->tree_label = new_dest_label;
1530 if (dump_file && (dump_flags & TDF_DETAILS))
1531 fprintf (dump_file, "Updating label for region %i\n",
1532 r->region_number);
1533 }
1534
1535 /* We got into copying CATCH. copy_eh_region already did job
1536 of copying all catch blocks corresponding to the try. Now
1537 we need to update labels in all of them and see trace.
1538
1539 We continue nesting into TRY region corresponding to CATCH:
1540 When duplicating EH tree contaiing subregions of CATCH,
1541 the CATCH region itself is never inserted to trace so we
1542 never get here anyway. */
1543 if (r->type == ERT_CATCH)
1544 {
1545 /* Walk other catch regions we copied and update labels as needed. */
1546 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1547 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1548 {
1549 r->tree_label = new_dest_label;
1550 if (dump_file && (dump_flags & TDF_DETAILS))
1551 fprintf (dump_file, "Updating label for region %i\n",
1552 r->region_number);
1553 }
1554 gcc_assert (r->type == ERT_TRY);
1555
1556 /* Skip sibling catch regions from the trace.
1557 They are already updated. */
1558 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1559 {
1560 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1561 i--;
1562 }
1563 }
1564
1565 outer = r;
1566 }
1567
1568 if (is_resx || region->type == ERT_THROW)
1569 r = copy_eh_region (region, outer);
1570 }
1571
1572 VEC_free (eh_region, heap, trace);
1573 if (dump_file && (dump_flags & TDF_DETAILS))
1574 {
1575 dump_eh_tree (dump_file, cfun);
1576 fprintf (dump_file, "New region: %i\n", r->region_number);
1577 }
1578 return r;
1579 }
1580
1581 /* Return region number of region that is outer to both if REGION_A and
1582 REGION_B in IFUN. */
1583
1584 int
1585 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1586 {
1587 struct eh_region *rp_a, *rp_b;
1588 sbitmap b_outer;
1589
1590 gcc_assert (ifun->eh->last_region_number > 0);
1591 gcc_assert (ifun->eh->region_tree);
1592
1593 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1594 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1595 gcc_assert (rp_a != NULL);
1596 gcc_assert (rp_b != NULL);
1597
1598 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1599 sbitmap_zero (b_outer);
1600
1601 do
1602 {
1603 SET_BIT (b_outer, rp_b->region_number);
1604 rp_b = rp_b->outer;
1605 }
1606 while (rp_b);
1607
1608 do
1609 {
1610 if (TEST_BIT (b_outer, rp_a->region_number))
1611 {
1612 sbitmap_free (b_outer);
1613 return rp_a->region_number;
1614 }
1615 rp_a = rp_a->outer;
1616 }
1617 while (rp_a);
1618
1619 sbitmap_free (b_outer);
1620 return -1;
1621 }
1622 \f
1623 static int
1624 t2r_eq (const void *pentry, const void *pdata)
1625 {
1626 const_tree const entry = (const_tree) pentry;
1627 const_tree const data = (const_tree) pdata;
1628
1629 return TREE_PURPOSE (entry) == data;
1630 }
1631
1632 static hashval_t
1633 t2r_hash (const void *pentry)
1634 {
1635 const_tree const entry = (const_tree) pentry;
1636 return TREE_HASH (TREE_PURPOSE (entry));
1637 }
1638
1639 void
1640 add_type_for_runtime (tree type)
1641 {
1642 tree *slot;
1643
1644 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1645 TREE_HASH (type), INSERT);
1646 if (*slot == NULL)
1647 {
1648 tree runtime = (*lang_eh_runtime_type) (type);
1649 *slot = tree_cons (type, runtime, NULL_TREE);
1650 }
1651 }
1652
1653 tree
1654 lookup_type_for_runtime (tree type)
1655 {
1656 tree *slot;
1657
1658 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1659 TREE_HASH (type), NO_INSERT);
1660
1661 /* We should have always inserted the data earlier. */
1662 return TREE_VALUE (*slot);
1663 }
1664
1665 \f
1666 /* Represent an entry in @TTypes for either catch actions
1667 or exception filter actions. */
1668 struct GTY(()) ttypes_filter {
1669 tree t;
1670 int filter;
1671 };
1672
1673 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1674 (a tree) for a @TTypes type node we are thinking about adding. */
1675
1676 static int
1677 ttypes_filter_eq (const void *pentry, const void *pdata)
1678 {
1679 const struct ttypes_filter *const entry
1680 = (const struct ttypes_filter *) pentry;
1681 const_tree const data = (const_tree) pdata;
1682
1683 return entry->t == data;
1684 }
1685
1686 static hashval_t
1687 ttypes_filter_hash (const void *pentry)
1688 {
1689 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1690 return TREE_HASH (entry->t);
1691 }
1692
1693 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1694 exception specification list we are thinking about adding. */
1695 /* ??? Currently we use the type lists in the order given. Someone
1696 should put these in some canonical order. */
1697
1698 static int
1699 ehspec_filter_eq (const void *pentry, const void *pdata)
1700 {
1701 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1702 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1703
1704 return type_list_equal (entry->t, data->t);
1705 }
1706
1707 /* Hash function for exception specification lists. */
1708
1709 static hashval_t
1710 ehspec_filter_hash (const void *pentry)
1711 {
1712 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1713 hashval_t h = 0;
1714 tree list;
1715
1716 for (list = entry->t; list ; list = TREE_CHAIN (list))
1717 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1718 return h;
1719 }
1720
1721 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1722 to speed up the search. Return the filter value to be used. */
1723
1724 static int
1725 add_ttypes_entry (htab_t ttypes_hash, tree type)
1726 {
1727 struct ttypes_filter **slot, *n;
1728
1729 slot = (struct ttypes_filter **)
1730 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1731
1732 if ((n = *slot) == NULL)
1733 {
1734 /* Filter value is a 1 based table index. */
1735
1736 n = XNEW (struct ttypes_filter);
1737 n->t = type;
1738 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1739 *slot = n;
1740
1741 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1742 }
1743
1744 return n->filter;
1745 }
1746
1747 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1748 to speed up the search. Return the filter value to be used. */
1749
1750 static int
1751 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1752 {
1753 struct ttypes_filter **slot, *n;
1754 struct ttypes_filter dummy;
1755
1756 dummy.t = list;
1757 slot = (struct ttypes_filter **)
1758 htab_find_slot (ehspec_hash, &dummy, INSERT);
1759
1760 if ((n = *slot) == NULL)
1761 {
1762 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1763
1764 n = XNEW (struct ttypes_filter);
1765 n->t = list;
1766 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1767 *slot = n;
1768
1769 /* Generate a 0 terminated list of filter values. */
1770 for (; list ; list = TREE_CHAIN (list))
1771 {
1772 if (targetm.arm_eabi_unwinder)
1773 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1774 else
1775 {
1776 /* Look up each type in the list and encode its filter
1777 value as a uleb128. */
1778 push_uleb128 (&crtl->eh.ehspec_data,
1779 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1780 }
1781 }
1782 if (targetm.arm_eabi_unwinder)
1783 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1784 else
1785 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1786 }
1787
1788 return n->filter;
1789 }
1790
1791 /* Generate the action filter values to be used for CATCH and
1792 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1793 we use lots of landing pads, and so every type or list can share
1794 the same filter value, which saves table space. */
1795
1796 static void
1797 assign_filter_values (void)
1798 {
1799 int i;
1800 htab_t ttypes, ehspec;
1801
1802 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1803 if (targetm.arm_eabi_unwinder)
1804 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1805 else
1806 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1807
1808 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1809 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1810
1811 for (i = cfun->eh->last_region_number; i > 0; --i)
1812 {
1813 struct eh_region *r;
1814
1815 r = VEC_index (eh_region, cfun->eh->region_array, i);
1816
1817 /* Mind we don't process a region more than once. */
1818 if (!r || r->region_number != i)
1819 continue;
1820
1821 switch (r->type)
1822 {
1823 case ERT_CATCH:
1824 /* Whatever type_list is (NULL or true list), we build a list
1825 of filters for the region. */
1826 r->u.eh_catch.filter_list = NULL_TREE;
1827
1828 if (r->u.eh_catch.type_list != NULL)
1829 {
1830 /* Get a filter value for each of the types caught and store
1831 them in the region's dedicated list. */
1832 tree tp_node = r->u.eh_catch.type_list;
1833
1834 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1835 {
1836 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1837 tree flt_node = build_int_cst (NULL_TREE, flt);
1838
1839 r->u.eh_catch.filter_list
1840 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1841 }
1842 }
1843 else
1844 {
1845 /* Get a filter value for the NULL list also since it will need
1846 an action record anyway. */
1847 int flt = add_ttypes_entry (ttypes, NULL);
1848 tree flt_node = build_int_cst (NULL_TREE, flt);
1849
1850 r->u.eh_catch.filter_list
1851 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1852 }
1853
1854 break;
1855
1856 case ERT_ALLOWED_EXCEPTIONS:
1857 r->u.allowed.filter
1858 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1859 break;
1860
1861 default:
1862 break;
1863 }
1864 }
1865
1866 htab_delete (ttypes);
1867 htab_delete (ehspec);
1868 }
1869
1870 /* Emit SEQ into basic block just before INSN (that is assumed to be
1871 first instruction of some existing BB and return the newly
1872 produced block. */
1873 static basic_block
1874 emit_to_new_bb_before (rtx seq, rtx insn)
1875 {
1876 rtx last;
1877 basic_block bb;
1878 edge e;
1879 edge_iterator ei;
1880
1881 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1882 call), we don't want it to go into newly created landing pad or other EH
1883 construct. */
1884 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1885 if (e->flags & EDGE_FALLTHRU)
1886 force_nonfallthru (e);
1887 else
1888 ei_next (&ei);
1889 last = emit_insn_before (seq, insn);
1890 if (BARRIER_P (last))
1891 last = PREV_INSN (last);
1892 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1893 update_bb_for_insn (bb);
1894 bb->flags |= BB_SUPERBLOCK;
1895 return bb;
1896 }
1897
1898 /* Generate the code to actually handle exceptions, which will follow the
1899 landing pads. */
1900
1901 static void
1902 build_post_landing_pads (void)
1903 {
1904 int i;
1905
1906 for (i = cfun->eh->last_region_number; i > 0; --i)
1907 {
1908 struct eh_region *region;
1909 rtx seq;
1910
1911 region = VEC_index (eh_region, cfun->eh->region_array, i);
1912 /* Mind we don't process a region more than once. */
1913 if (!region || region->region_number != i)
1914 continue;
1915
1916 switch (region->type)
1917 {
1918 case ERT_TRY:
1919 /* It is possible that TRY region is kept alive only because some of
1920 contained catch region still have RESX instruction but they are
1921 reached via their copies. In this case we need to do nothing. */
1922 if (!region->u.eh_try.eh_catch->label)
1923 break;
1924
1925 /* ??? Collect the set of all non-overlapping catch handlers
1926 all the way up the chain until blocked by a cleanup. */
1927 /* ??? Outer try regions can share landing pads with inner
1928 try regions if the types are completely non-overlapping,
1929 and there are no intervening cleanups. */
1930
1931 region->post_landing_pad = gen_label_rtx ();
1932
1933 start_sequence ();
1934
1935 emit_label (region->post_landing_pad);
1936
1937 /* ??? It is mighty inconvenient to call back into the
1938 switch statement generation code in expand_end_case.
1939 Rapid prototyping sez a sequence of ifs. */
1940 {
1941 struct eh_region *c;
1942 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1943 {
1944 if (c->u.eh_catch.type_list == NULL)
1945 emit_jump (c->label);
1946 else
1947 {
1948 /* Need for one cmp/jump per type caught. Each type
1949 list entry has a matching entry in the filter list
1950 (see assign_filter_values). */
1951 tree tp_node = c->u.eh_catch.type_list;
1952 tree flt_node = c->u.eh_catch.filter_list;
1953
1954 for (; tp_node; )
1955 {
1956 emit_cmp_and_jump_insns
1957 (crtl->eh.filter,
1958 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1959 EQ, NULL_RTX,
1960 targetm.eh_return_filter_mode (), 0, c->label);
1961
1962 tp_node = TREE_CHAIN (tp_node);
1963 flt_node = TREE_CHAIN (flt_node);
1964 }
1965 }
1966 }
1967 }
1968
1969 /* We delay the generation of the _Unwind_Resume until we generate
1970 landing pads. We emit a marker here so as to get good control
1971 flow data in the meantime. */
1972 region->resume
1973 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1974 emit_barrier ();
1975
1976 seq = get_insns ();
1977 end_sequence ();
1978
1979 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1980
1981 break;
1982
1983 case ERT_ALLOWED_EXCEPTIONS:
1984 if (!region->label)
1985 break;
1986 region->post_landing_pad = gen_label_rtx ();
1987
1988 start_sequence ();
1989
1990 emit_label (region->post_landing_pad);
1991
1992 emit_cmp_and_jump_insns (crtl->eh.filter,
1993 GEN_INT (region->u.allowed.filter),
1994 EQ, NULL_RTX,
1995 targetm.eh_return_filter_mode (), 0, region->label);
1996
1997 /* We delay the generation of the _Unwind_Resume until we generate
1998 landing pads. We emit a marker here so as to get good control
1999 flow data in the meantime. */
2000 region->resume
2001 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2002 emit_barrier ();
2003
2004 seq = get_insns ();
2005 end_sequence ();
2006
2007 emit_to_new_bb_before (seq, region->label);
2008 break;
2009
2010 case ERT_CLEANUP:
2011 case ERT_MUST_NOT_THROW:
2012 region->post_landing_pad = region->label;
2013 break;
2014
2015 case ERT_CATCH:
2016 case ERT_THROW:
2017 /* Nothing to do. */
2018 break;
2019
2020 default:
2021 gcc_unreachable ();
2022 }
2023 }
2024 }
2025
2026 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2027 _Unwind_Resume otherwise. */
2028
2029 static void
2030 connect_post_landing_pads (void)
2031 {
2032 int i;
2033
2034 for (i = cfun->eh->last_region_number; i > 0; --i)
2035 {
2036 struct eh_region *region;
2037 struct eh_region *outer;
2038 rtx seq;
2039 rtx barrier;
2040
2041 region = VEC_index (eh_region, cfun->eh->region_array, i);
2042 /* Mind we don't process a region more than once. */
2043 if (!region || region->region_number != i)
2044 continue;
2045
2046 /* If there is no RESX, or it has been deleted by flow, there's
2047 nothing to fix up. */
2048 if (! region->resume || INSN_DELETED_P (region->resume))
2049 continue;
2050
2051 /* Search for another landing pad in this function. */
2052 for (outer = region->outer; outer ; outer = outer->outer)
2053 if (outer->post_landing_pad)
2054 break;
2055
2056 start_sequence ();
2057
2058 if (outer)
2059 {
2060 edge e;
2061 basic_block src, dest;
2062
2063 emit_jump (outer->post_landing_pad);
2064 src = BLOCK_FOR_INSN (region->resume);
2065 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2066 while (EDGE_COUNT (src->succs) > 0)
2067 remove_edge (EDGE_SUCC (src, 0));
2068 e = make_edge (src, dest, 0);
2069 e->probability = REG_BR_PROB_BASE;
2070 e->count = src->count;
2071 }
2072 else
2073 {
2074 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2075 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2076
2077 /* What we just emitted was a throwing libcall, so it got a
2078 barrier automatically added after it. If the last insn in
2079 the libcall sequence isn't the barrier, it's because the
2080 target emits multiple insns for a call, and there are insns
2081 after the actual call insn (which are redundant and would be
2082 optimized away). The barrier is inserted exactly after the
2083 call insn, so let's go get that and delete the insns after
2084 it, because below we need the barrier to be the last insn in
2085 the sequence. */
2086 delete_insns_since (NEXT_INSN (last_call_insn ()));
2087 }
2088
2089 seq = get_insns ();
2090 end_sequence ();
2091 barrier = emit_insn_before (seq, region->resume);
2092 /* Avoid duplicate barrier. */
2093 gcc_assert (BARRIER_P (barrier));
2094 delete_insn (barrier);
2095 delete_insn (region->resume);
2096
2097 /* ??? From tree-ssa we can wind up with catch regions whose
2098 label is not instantiated, but whose resx is present. Now
2099 that we've dealt with the resx, kill the region. */
2100 if (region->label == NULL && region->type == ERT_CLEANUP)
2101 remove_eh_handler (region);
2102 }
2103 }
2104
2105 \f
2106 static void
2107 dw2_build_landing_pads (void)
2108 {
2109 int i;
2110
2111 for (i = cfun->eh->last_region_number; i > 0; --i)
2112 {
2113 struct eh_region *region;
2114 rtx seq;
2115 basic_block bb;
2116 edge e;
2117
2118 region = VEC_index (eh_region, cfun->eh->region_array, i);
2119 /* Mind we don't process a region more than once. */
2120 if (!region || region->region_number != i)
2121 continue;
2122
2123 if (region->type != ERT_CLEANUP
2124 && region->type != ERT_TRY
2125 && region->type != ERT_ALLOWED_EXCEPTIONS)
2126 continue;
2127
2128 if (!region->post_landing_pad)
2129 continue;
2130
2131 start_sequence ();
2132
2133 region->landing_pad = gen_label_rtx ();
2134 emit_label (region->landing_pad);
2135
2136 #ifdef HAVE_exception_receiver
2137 if (HAVE_exception_receiver)
2138 emit_insn (gen_exception_receiver ());
2139 else
2140 #endif
2141 #ifdef HAVE_nonlocal_goto_receiver
2142 if (HAVE_nonlocal_goto_receiver)
2143 emit_insn (gen_nonlocal_goto_receiver ());
2144 else
2145 #endif
2146 { /* Nothing */ }
2147
2148 emit_move_insn (crtl->eh.exc_ptr,
2149 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2150 emit_move_insn (crtl->eh.filter,
2151 gen_rtx_REG (targetm.eh_return_filter_mode (),
2152 EH_RETURN_DATA_REGNO (1)));
2153
2154 seq = get_insns ();
2155 end_sequence ();
2156
2157 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2158 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2159 e->count = bb->count;
2160 e->probability = REG_BR_PROB_BASE;
2161 }
2162 }
2163
2164 \f
2165 struct sjlj_lp_info
2166 {
2167 int directly_reachable;
2168 int action_index;
2169 int dispatch_index;
2170 int call_site_index;
2171 };
2172
2173 static bool
2174 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2175 {
2176 rtx insn;
2177 bool found_one = false;
2178
2179 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2180 {
2181 struct eh_region *region;
2182 enum reachable_code rc;
2183 tree type_thrown;
2184 rtx note;
2185
2186 if (! INSN_P (insn))
2187 continue;
2188
2189 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2190 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2191 continue;
2192
2193 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2194 if (!region)
2195 continue;
2196
2197 type_thrown = NULL_TREE;
2198 if (region->type == ERT_THROW)
2199 {
2200 type_thrown = region->u.eh_throw.type;
2201 region = region->outer;
2202 }
2203
2204 /* Find the first containing region that might handle the exception.
2205 That's the landing pad to which we will transfer control. */
2206 rc = RNL_NOT_CAUGHT;
2207 for (; region; region = region->outer)
2208 {
2209 rc = reachable_next_level (region, type_thrown, NULL, false);
2210 if (rc != RNL_NOT_CAUGHT)
2211 break;
2212 }
2213 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2214 {
2215 lp_info[region->region_number].directly_reachable = 1;
2216 found_one = true;
2217 }
2218 }
2219
2220 return found_one;
2221 }
2222
2223 static void
2224 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2225 {
2226 htab_t ar_hash;
2227 int i, index;
2228
2229 /* First task: build the action table. */
2230
2231 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2232 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2233
2234 for (i = cfun->eh->last_region_number; i > 0; --i)
2235 if (lp_info[i].directly_reachable)
2236 {
2237 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2238
2239 r->landing_pad = dispatch_label;
2240 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2241 if (lp_info[i].action_index != -1)
2242 crtl->uses_eh_lsda = 1;
2243 }
2244
2245 htab_delete (ar_hash);
2246
2247 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2248 landing pad label for the region. For sjlj though, there is one
2249 common landing pad from which we dispatch to the post-landing pads.
2250
2251 A region receives a dispatch index if it is directly reachable
2252 and requires in-function processing. Regions that share post-landing
2253 pads may share dispatch indices. */
2254 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2255 (see build_post_landing_pads) so we don't bother checking for it. */
2256
2257 index = 0;
2258 for (i = cfun->eh->last_region_number; i > 0; --i)
2259 if (lp_info[i].directly_reachable)
2260 lp_info[i].dispatch_index = index++;
2261
2262 /* Finally: assign call-site values. If dwarf2 terms, this would be
2263 the region number assigned by convert_to_eh_region_ranges, but
2264 handles no-action and must-not-throw differently. */
2265
2266 call_site_base = 1;
2267 for (i = cfun->eh->last_region_number; i > 0; --i)
2268 if (lp_info[i].directly_reachable)
2269 {
2270 int action = lp_info[i].action_index;
2271
2272 /* Map must-not-throw to otherwise unused call-site index 0. */
2273 if (action == -2)
2274 index = 0;
2275 /* Map no-action to otherwise unused call-site index -1. */
2276 else if (action == -1)
2277 index = -1;
2278 /* Otherwise, look it up in the table. */
2279 else
2280 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2281
2282 lp_info[i].call_site_index = index;
2283 }
2284 }
2285
2286 static void
2287 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2288 {
2289 int last_call_site = -2;
2290 rtx insn, mem;
2291
2292 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2293 {
2294 struct eh_region *region;
2295 int this_call_site;
2296 rtx note, before, p;
2297
2298 /* Reset value tracking at extended basic block boundaries. */
2299 if (LABEL_P (insn))
2300 last_call_site = -2;
2301
2302 if (! INSN_P (insn))
2303 continue;
2304
2305 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2306
2307 /* Calls that are known to not throw need not be marked. */
2308 if (note && INTVAL (XEXP (note, 0)) <= 0)
2309 continue;
2310
2311 if (note)
2312 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2313 else
2314 region = NULL;
2315
2316 if (!region)
2317 {
2318 /* Calls (and trapping insns) without notes are outside any
2319 exception handling region in this function. Mark them as
2320 no action. */
2321 if (CALL_P (insn)
2322 || (flag_non_call_exceptions
2323 && may_trap_p (PATTERN (insn))))
2324 this_call_site = -1;
2325 else
2326 continue;
2327 }
2328 else
2329 this_call_site = lp_info[region->region_number].call_site_index;
2330
2331 if (this_call_site == last_call_site)
2332 continue;
2333
2334 /* Don't separate a call from it's argument loads. */
2335 before = insn;
2336 if (CALL_P (insn))
2337 before = find_first_parameter_load (insn, NULL_RTX);
2338
2339 start_sequence ();
2340 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2341 sjlj_fc_call_site_ofs);
2342 emit_move_insn (mem, GEN_INT (this_call_site));
2343 p = get_insns ();
2344 end_sequence ();
2345
2346 emit_insn_before (p, before);
2347 last_call_site = this_call_site;
2348 }
2349 }
2350
2351 /* Construct the SjLj_Function_Context. */
2352
2353 static void
2354 sjlj_emit_function_enter (rtx dispatch_label)
2355 {
2356 rtx fn_begin, fc, mem, seq;
2357 bool fn_begin_outside_block;
2358
2359 fc = crtl->eh.sjlj_fc;
2360
2361 start_sequence ();
2362
2363 /* We're storing this libcall's address into memory instead of
2364 calling it directly. Thus, we must call assemble_external_libcall
2365 here, as we can not depend on emit_library_call to do it for us. */
2366 assemble_external_libcall (eh_personality_libfunc);
2367 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2368 emit_move_insn (mem, eh_personality_libfunc);
2369
2370 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2371 if (crtl->uses_eh_lsda)
2372 {
2373 char buf[20];
2374 rtx sym;
2375
2376 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2377 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2378 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2379 emit_move_insn (mem, sym);
2380 }
2381 else
2382 emit_move_insn (mem, const0_rtx);
2383
2384 #ifdef DONT_USE_BUILTIN_SETJMP
2385 {
2386 rtx x;
2387 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2388 TYPE_MODE (integer_type_node), 1,
2389 plus_constant (XEXP (fc, 0),
2390 sjlj_fc_jbuf_ofs), Pmode);
2391
2392 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2393 TYPE_MODE (integer_type_node), 0, dispatch_label);
2394 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2395 }
2396 #else
2397 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2398 dispatch_label);
2399 #endif
2400
2401 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2402 1, XEXP (fc, 0), Pmode);
2403
2404 seq = get_insns ();
2405 end_sequence ();
2406
2407 /* ??? Instead of doing this at the beginning of the function,
2408 do this in a block that is at loop level 0 and dominates all
2409 can_throw_internal instructions. */
2410
2411 fn_begin_outside_block = true;
2412 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2413 if (NOTE_P (fn_begin))
2414 {
2415 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2416 break;
2417 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2418 fn_begin_outside_block = false;
2419 }
2420
2421 if (fn_begin_outside_block)
2422 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2423 else
2424 emit_insn_after (seq, fn_begin);
2425 }
2426
2427 /* Call back from expand_function_end to know where we should put
2428 the call to unwind_sjlj_unregister_libfunc if needed. */
2429
2430 void
2431 sjlj_emit_function_exit_after (rtx after)
2432 {
2433 crtl->eh.sjlj_exit_after = after;
2434 }
2435
2436 static void
2437 sjlj_emit_function_exit (void)
2438 {
2439 rtx seq, insn;
2440
2441 start_sequence ();
2442
2443 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2444 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2445
2446 seq = get_insns ();
2447 end_sequence ();
2448
2449 /* ??? Really this can be done in any block at loop level 0 that
2450 post-dominates all can_throw_internal instructions. This is
2451 the last possible moment. */
2452
2453 insn = crtl->eh.sjlj_exit_after;
2454 if (LABEL_P (insn))
2455 insn = NEXT_INSN (insn);
2456
2457 emit_insn_after (seq, insn);
2458 }
2459
2460 static void
2461 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2462 {
2463 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2464 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2465 int i, first_reachable;
2466 rtx mem, dispatch, seq, fc;
2467 rtx before;
2468 basic_block bb;
2469 edge e;
2470
2471 fc = crtl->eh.sjlj_fc;
2472
2473 start_sequence ();
2474
2475 emit_label (dispatch_label);
2476
2477 #ifndef DONT_USE_BUILTIN_SETJMP
2478 expand_builtin_setjmp_receiver (dispatch_label);
2479 #endif
2480
2481 /* Load up dispatch index, exc_ptr and filter values from the
2482 function context. */
2483 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2484 sjlj_fc_call_site_ofs);
2485 dispatch = copy_to_reg (mem);
2486
2487 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2488 if (unwind_word_mode != ptr_mode)
2489 {
2490 #ifdef POINTERS_EXTEND_UNSIGNED
2491 mem = convert_memory_address (ptr_mode, mem);
2492 #else
2493 mem = convert_to_mode (ptr_mode, mem, 0);
2494 #endif
2495 }
2496 emit_move_insn (crtl->eh.exc_ptr, mem);
2497
2498 mem = adjust_address (fc, unwind_word_mode,
2499 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2500 if (unwind_word_mode != filter_mode)
2501 mem = convert_to_mode (filter_mode, mem, 0);
2502 emit_move_insn (crtl->eh.filter, mem);
2503
2504 /* Jump to one of the directly reachable regions. */
2505 /* ??? This really ought to be using a switch statement. */
2506
2507 first_reachable = 0;
2508 for (i = cfun->eh->last_region_number; i > 0; --i)
2509 {
2510 if (! lp_info[i].directly_reachable)
2511 continue;
2512
2513 if (! first_reachable)
2514 {
2515 first_reachable = i;
2516 continue;
2517 }
2518
2519 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2520 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2521 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2522 ->post_landing_pad);
2523 }
2524
2525 seq = get_insns ();
2526 end_sequence ();
2527
2528 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2529 ->post_landing_pad);
2530
2531 bb = emit_to_new_bb_before (seq, before);
2532 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2533 e->count = bb->count;
2534 e->probability = REG_BR_PROB_BASE;
2535 }
2536
2537 static void
2538 sjlj_build_landing_pads (void)
2539 {
2540 struct sjlj_lp_info *lp_info;
2541
2542 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2543
2544 if (sjlj_find_directly_reachable_regions (lp_info))
2545 {
2546 rtx dispatch_label = gen_label_rtx ();
2547 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2548 TYPE_MODE (sjlj_fc_type_node),
2549 TYPE_ALIGN (sjlj_fc_type_node));
2550 crtl->eh.sjlj_fc
2551 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2552 int_size_in_bytes (sjlj_fc_type_node),
2553 align);
2554
2555 sjlj_assign_call_site_values (dispatch_label, lp_info);
2556 sjlj_mark_call_sites (lp_info);
2557
2558 sjlj_emit_function_enter (dispatch_label);
2559 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2560 sjlj_emit_function_exit ();
2561 }
2562
2563 free (lp_info);
2564 }
2565
2566 /* After initial rtl generation, call back to finish generating
2567 exception support code. */
2568
2569 static void
2570 finish_eh_generation (void)
2571 {
2572 basic_block bb;
2573
2574 /* Nothing to do if no regions created. */
2575 if (cfun->eh->region_tree == NULL)
2576 return;
2577
2578 /* The object here is to provide detailed information (via
2579 reachable_handlers) on how exception control flows within the
2580 function for the CFG construction. In this first pass, we can
2581 include type information garnered from ERT_THROW and
2582 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2583 in deleting unreachable handlers. Subsequently, we will generate
2584 landing pads which will connect many of the handlers, and then
2585 type information will not be effective. Still, this is a win
2586 over previous implementations. */
2587
2588 /* These registers are used by the landing pads. Make sure they
2589 have been generated. */
2590 get_exception_pointer ();
2591 get_exception_filter ();
2592
2593 /* Construct the landing pads. */
2594
2595 assign_filter_values ();
2596 build_post_landing_pads ();
2597 connect_post_landing_pads ();
2598 if (USING_SJLJ_EXCEPTIONS)
2599 sjlj_build_landing_pads ();
2600 else
2601 dw2_build_landing_pads ();
2602
2603 crtl->eh.built_landing_pads = 1;
2604
2605 /* We've totally changed the CFG. Start over. */
2606 find_exception_handler_labels ();
2607 break_superblocks ();
2608 if (USING_SJLJ_EXCEPTIONS
2609 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2610 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2611 commit_edge_insertions ();
2612 FOR_EACH_BB (bb)
2613 {
2614 edge e;
2615 edge_iterator ei;
2616 bool eh = false;
2617 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2618 {
2619 if (e->flags & EDGE_EH)
2620 {
2621 remove_edge (e);
2622 eh = true;
2623 }
2624 else
2625 ei_next (&ei);
2626 }
2627 if (eh)
2628 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2629 }
2630 }
2631 \f
2632 /* This section handles removing dead code for flow. */
2633
2634 /* Splice REGION from the region tree and replace it by REPLACE etc.
2635 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2636 region.*/
2637
2638 static void
2639 remove_eh_handler_and_replace (struct eh_region *region,
2640 struct eh_region *replace,
2641 bool update_catch_try)
2642 {
2643 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2644 rtx lab;
2645
2646 outer = region->outer;
2647
2648 /* For the benefit of efficiently handling REG_EH_REGION notes,
2649 replace this region in the region array with its containing
2650 region. Note that previous region deletions may result in
2651 multiple copies of this region in the array, so we have a
2652 list of alternate numbers by which we are known. */
2653
2654 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2655 replace);
2656 if (region->aka)
2657 {
2658 unsigned i;
2659 bitmap_iterator bi;
2660
2661 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2662 {
2663 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2664 }
2665 }
2666
2667 if (replace)
2668 {
2669 if (!replace->aka)
2670 replace->aka = BITMAP_GGC_ALLOC ();
2671 if (region->aka)
2672 bitmap_ior_into (replace->aka, region->aka);
2673 bitmap_set_bit (replace->aka, region->region_number);
2674 }
2675
2676 if (crtl->eh.built_landing_pads)
2677 lab = region->landing_pad;
2678 else
2679 lab = region->label;
2680 if (outer)
2681 pp_start = &outer->inner;
2682 else
2683 pp_start = &cfun->eh->region_tree;
2684 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2685 continue;
2686 *pp = region->next_peer;
2687
2688 if (replace)
2689 pp_start = &replace->inner;
2690 else
2691 pp_start = &cfun->eh->region_tree;
2692 inner = region->inner;
2693 if (inner)
2694 {
2695 for (p = inner; p->next_peer ; p = p->next_peer)
2696 p->outer = replace;
2697 p->outer = replace;
2698
2699 p->next_peer = *pp_start;
2700 *pp_start = inner;
2701 }
2702
2703 if (region->type == ERT_CATCH
2704 && update_catch_try)
2705 {
2706 struct eh_region *eh_try, *next, *prev;
2707
2708 for (eh_try = region->next_peer;
2709 eh_try->type == ERT_CATCH;
2710 eh_try = eh_try->next_peer)
2711 continue;
2712 gcc_assert (eh_try->type == ERT_TRY);
2713
2714 next = region->u.eh_catch.next_catch;
2715 prev = region->u.eh_catch.prev_catch;
2716
2717 if (next)
2718 next->u.eh_catch.prev_catch = prev;
2719 else
2720 eh_try->u.eh_try.last_catch = prev;
2721 if (prev)
2722 prev->u.eh_catch.next_catch = next;
2723 else
2724 {
2725 eh_try->u.eh_try.eh_catch = next;
2726 if (! next)
2727 remove_eh_handler (eh_try);
2728 }
2729 }
2730 }
2731
2732 /* Splice REGION from the region tree and replace it by the outer region
2733 etc. */
2734
2735 static void
2736 remove_eh_handler (struct eh_region *region)
2737 {
2738 remove_eh_handler_and_replace (region, region->outer, true);
2739 }
2740
2741 /* Remove Eh region R that has turned out to have no code in its handler. */
2742
2743 void
2744 remove_eh_region (int r)
2745 {
2746 struct eh_region *region;
2747
2748 region = VEC_index (eh_region, cfun->eh->region_array, r);
2749 remove_eh_handler (region);
2750 }
2751
2752 /* Remove Eh region R that has turned out to have no code in its handler
2753 and replace in by R2. */
2754
2755 void
2756 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2757 {
2758 struct eh_region *region, *region2;
2759
2760 region = VEC_index (eh_region, cfun->eh->region_array, r);
2761 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2762 remove_eh_handler_and_replace (region, region2->outer, true);
2763 }
2764
2765 /* Invokes CALLBACK for every exception handler label. Only used by old
2766 loop hackery; should not be used by new code. */
2767
2768 void
2769 for_each_eh_label (void (*callback) (rtx))
2770 {
2771 int i;
2772 for (i = 0; i < cfun->eh->last_region_number; i++)
2773 {
2774 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2775 if (r && r->region_number == i && r->label
2776 && GET_CODE (r->label) == CODE_LABEL)
2777 (*callback) (r->label);
2778 }
2779 }
2780
2781 /* Invoke CALLBACK for every exception region in the current function. */
2782
2783 void
2784 for_each_eh_region (void (*callback) (struct eh_region *))
2785 {
2786 int i, n = cfun->eh->last_region_number;
2787 for (i = 1; i <= n; ++i)
2788 {
2789 struct eh_region *region;
2790
2791 region = VEC_index (eh_region, cfun->eh->region_array, i);
2792 if (region)
2793 (*callback) (region);
2794 }
2795 }
2796 \f
2797 /* This section describes CFG exception edges for flow. */
2798
2799 /* For communicating between calls to reachable_next_level. */
2800 struct reachable_info
2801 {
2802 tree types_caught;
2803 tree types_allowed;
2804 void (*callback) (struct eh_region *, void *);
2805 void *callback_data;
2806 };
2807
2808 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2809 base class of TYPE, is in HANDLED. */
2810
2811 static int
2812 check_handled (tree handled, tree type)
2813 {
2814 tree t;
2815
2816 /* We can check for exact matches without front-end help. */
2817 if (! lang_eh_type_covers)
2818 {
2819 for (t = handled; t ; t = TREE_CHAIN (t))
2820 if (TREE_VALUE (t) == type)
2821 return 1;
2822 }
2823 else
2824 {
2825 for (t = handled; t ; t = TREE_CHAIN (t))
2826 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2827 return 1;
2828 }
2829
2830 return 0;
2831 }
2832
2833 /* A subroutine of reachable_next_level. If we are collecting a list
2834 of handlers, add one. After landing pad generation, reference
2835 it instead of the handlers themselves. Further, the handlers are
2836 all wired together, so by referencing one, we've got them all.
2837 Before landing pad generation we reference each handler individually.
2838
2839 LP_REGION contains the landing pad; REGION is the handler. */
2840
2841 static void
2842 add_reachable_handler (struct reachable_info *info,
2843 struct eh_region *lp_region, struct eh_region *region)
2844 {
2845 if (! info)
2846 return;
2847
2848 if (crtl->eh.built_landing_pads)
2849 info->callback (lp_region, info->callback_data);
2850 else
2851 info->callback (region, info->callback_data);
2852 }
2853
2854 /* Process one level of exception regions for reachability.
2855 If TYPE_THROWN is non-null, then it is the *exact* type being
2856 propagated. If INFO is non-null, then collect handler labels
2857 and caught/allowed type information between invocations. */
2858
2859 static enum reachable_code
2860 reachable_next_level (struct eh_region *region, tree type_thrown,
2861 struct reachable_info *info,
2862 bool maybe_resx)
2863 {
2864 switch (region->type)
2865 {
2866 case ERT_CLEANUP:
2867 /* Before landing-pad generation, we model control flow
2868 directly to the individual handlers. In this way we can
2869 see that catch handler types may shadow one another. */
2870 add_reachable_handler (info, region, region);
2871 return RNL_MAYBE_CAUGHT;
2872
2873 case ERT_TRY:
2874 {
2875 struct eh_region *c;
2876 enum reachable_code ret = RNL_NOT_CAUGHT;
2877
2878 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2879 {
2880 /* A catch-all handler ends the search. */
2881 if (c->u.eh_catch.type_list == NULL)
2882 {
2883 add_reachable_handler (info, region, c);
2884 return RNL_CAUGHT;
2885 }
2886
2887 if (type_thrown)
2888 {
2889 /* If we have at least one type match, end the search. */
2890 tree tp_node = c->u.eh_catch.type_list;
2891
2892 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2893 {
2894 tree type = TREE_VALUE (tp_node);
2895
2896 if (type == type_thrown
2897 || (lang_eh_type_covers
2898 && (*lang_eh_type_covers) (type, type_thrown)))
2899 {
2900 add_reachable_handler (info, region, c);
2901 return RNL_CAUGHT;
2902 }
2903 }
2904
2905 /* If we have definitive information of a match failure,
2906 the catch won't trigger. */
2907 if (lang_eh_type_covers)
2908 return RNL_NOT_CAUGHT;
2909 }
2910
2911 /* At this point, we either don't know what type is thrown or
2912 don't have front-end assistance to help deciding if it is
2913 covered by one of the types in the list for this region.
2914
2915 We'd then like to add this region to the list of reachable
2916 handlers since it is indeed potentially reachable based on the
2917 information we have.
2918
2919 Actually, this handler is for sure not reachable if all the
2920 types it matches have already been caught. That is, it is only
2921 potentially reachable if at least one of the types it catches
2922 has not been previously caught. */
2923
2924 if (! info)
2925 ret = RNL_MAYBE_CAUGHT;
2926 else
2927 {
2928 tree tp_node = c->u.eh_catch.type_list;
2929 bool maybe_reachable = false;
2930
2931 /* Compute the potential reachability of this handler and
2932 update the list of types caught at the same time. */
2933 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2934 {
2935 tree type = TREE_VALUE (tp_node);
2936
2937 if (! check_handled (info->types_caught, type))
2938 {
2939 info->types_caught
2940 = tree_cons (NULL, type, info->types_caught);
2941
2942 maybe_reachable = true;
2943 }
2944 }
2945
2946 if (maybe_reachable)
2947 {
2948 add_reachable_handler (info, region, c);
2949
2950 /* ??? If the catch type is a base class of every allowed
2951 type, then we know we can stop the search. */
2952 ret = RNL_MAYBE_CAUGHT;
2953 }
2954 }
2955 }
2956
2957 return ret;
2958 }
2959
2960 case ERT_ALLOWED_EXCEPTIONS:
2961 /* An empty list of types definitely ends the search. */
2962 if (region->u.allowed.type_list == NULL_TREE)
2963 {
2964 add_reachable_handler (info, region, region);
2965 return RNL_CAUGHT;
2966 }
2967
2968 /* Collect a list of lists of allowed types for use in detecting
2969 when a catch may be transformed into a catch-all. */
2970 if (info)
2971 info->types_allowed = tree_cons (NULL_TREE,
2972 region->u.allowed.type_list,
2973 info->types_allowed);
2974
2975 /* If we have definitive information about the type hierarchy,
2976 then we can tell if the thrown type will pass through the
2977 filter. */
2978 if (type_thrown && lang_eh_type_covers)
2979 {
2980 if (check_handled (region->u.allowed.type_list, type_thrown))
2981 return RNL_NOT_CAUGHT;
2982 else
2983 {
2984 add_reachable_handler (info, region, region);
2985 return RNL_CAUGHT;
2986 }
2987 }
2988
2989 add_reachable_handler (info, region, region);
2990 return RNL_MAYBE_CAUGHT;
2991
2992 case ERT_CATCH:
2993 /* Catch regions are handled by their controlling try region. */
2994 return RNL_NOT_CAUGHT;
2995
2996 case ERT_MUST_NOT_THROW:
2997 /* Here we end our search, since no exceptions may propagate.
2998
2999 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3000 only via locally handled RESX instructions.
3001
3002 When we inline a function call, we can bring in new handlers. In order
3003 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3004 assume that such handlers exists prior for any inlinable call prior
3005 inlining decisions are fixed. */
3006
3007 if (maybe_resx)
3008 {
3009 add_reachable_handler (info, region, region);
3010 return RNL_CAUGHT;
3011 }
3012 else
3013 return RNL_BLOCKED;
3014
3015 case ERT_THROW:
3016 case ERT_UNKNOWN:
3017 /* Shouldn't see these here. */
3018 gcc_unreachable ();
3019 break;
3020 default:
3021 gcc_unreachable ();
3022 }
3023 }
3024
3025 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3026
3027 void
3028 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3029 void (*callback) (struct eh_region *, void *),
3030 void *callback_data)
3031 {
3032 struct reachable_info info;
3033 struct eh_region *region;
3034 tree type_thrown;
3035
3036 memset (&info, 0, sizeof (info));
3037 info.callback = callback;
3038 info.callback_data = callback_data;
3039
3040 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3041 if (!region)
3042 return;
3043
3044 type_thrown = NULL_TREE;
3045 if (is_resx)
3046 {
3047 /* A RESX leaves a region instead of entering it. Thus the
3048 region itself may have been deleted out from under us. */
3049 if (region == NULL)
3050 return;
3051 region = region->outer;
3052 }
3053 else if (region->type == ERT_THROW)
3054 {
3055 type_thrown = region->u.eh_throw.type;
3056 region = region->outer;
3057 }
3058
3059 while (region)
3060 {
3061 if (reachable_next_level (region, type_thrown, &info,
3062 inlinable_call || is_resx) >= RNL_CAUGHT)
3063 break;
3064 /* If we have processed one cleanup, there is no point in
3065 processing any more of them. Each cleanup will have an edge
3066 to the next outer cleanup region, so the flow graph will be
3067 accurate. */
3068 if (region->type == ERT_CLEANUP)
3069 {
3070 enum reachable_code code = RNL_NOT_CAUGHT;
3071 region = find_prev_try (region->outer);
3072 /* Continue looking for outer TRY region until we find one
3073 that might cath something. */
3074 while (region
3075 && (code = reachable_next_level (region, type_thrown, &info,
3076 inlinable_call || is_resx))
3077 == RNL_NOT_CAUGHT)
3078 region = find_prev_try (region->outer);
3079 if (code >= RNL_CAUGHT)
3080 break;
3081 }
3082 if (region)
3083 region = region->outer;
3084 }
3085 }
3086
3087 /* Retrieve a list of labels of exception handlers which can be
3088 reached by a given insn. */
3089
3090 static void
3091 arh_to_landing_pad (struct eh_region *region, void *data)
3092 {
3093 rtx *p_handlers = (rtx *) data;
3094 if (! *p_handlers)
3095 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3096 }
3097
3098 static void
3099 arh_to_label (struct eh_region *region, void *data)
3100 {
3101 rtx *p_handlers = (rtx *) data;
3102 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3103 }
3104
3105 rtx
3106 reachable_handlers (rtx insn)
3107 {
3108 bool is_resx = false;
3109 rtx handlers = NULL;
3110 int region_number;
3111
3112 if (JUMP_P (insn)
3113 && GET_CODE (PATTERN (insn)) == RESX)
3114 {
3115 region_number = XINT (PATTERN (insn), 0);
3116 is_resx = true;
3117 }
3118 else
3119 {
3120 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3121 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3122 return NULL;
3123 region_number = INTVAL (XEXP (note, 0));
3124 }
3125
3126 foreach_reachable_handler (region_number, is_resx, false,
3127 (crtl->eh.built_landing_pads
3128 ? arh_to_landing_pad
3129 : arh_to_label),
3130 &handlers);
3131
3132 return handlers;
3133 }
3134
3135 /* Determine if the given INSN can throw an exception that is caught
3136 within the function. */
3137
3138 bool
3139 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3140 {
3141 struct eh_region *region;
3142 tree type_thrown;
3143
3144 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3145 if (!region)
3146 return false;
3147
3148 type_thrown = NULL_TREE;
3149 if (is_resx)
3150 region = region->outer;
3151 else if (region->type == ERT_THROW)
3152 {
3153 type_thrown = region->u.eh_throw.type;
3154 region = region->outer;
3155 }
3156
3157 /* If this exception is ignored by each and every containing region,
3158 then control passes straight out. The runtime may handle some
3159 regions, which also do not require processing internally. */
3160 for (; region; region = region->outer)
3161 {
3162 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3163 inlinable_call || is_resx);
3164 if (how == RNL_BLOCKED)
3165 return false;
3166 if (how != RNL_NOT_CAUGHT)
3167 return true;
3168 }
3169
3170 return false;
3171 }
3172
3173 bool
3174 can_throw_internal (const_rtx insn)
3175 {
3176 rtx note;
3177
3178 if (! INSN_P (insn))
3179 return false;
3180
3181 if (JUMP_P (insn)
3182 && GET_CODE (PATTERN (insn)) == RESX
3183 && XINT (PATTERN (insn), 0) > 0)
3184 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3185
3186 if (NONJUMP_INSN_P (insn)
3187 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3188 insn = XVECEXP (PATTERN (insn), 0, 0);
3189
3190 /* Every insn that might throw has an EH_REGION note. */
3191 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3192 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3193 return false;
3194
3195 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3196 }
3197
3198 /* Determine if the given INSN can throw an exception that is
3199 visible outside the function. */
3200
3201 bool
3202 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3203 {
3204 struct eh_region *region;
3205 tree type_thrown;
3206
3207 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3208 if (!region)
3209 return true;
3210
3211 type_thrown = NULL_TREE;
3212 if (is_resx)
3213 region = region->outer;
3214 else if (region->type == ERT_THROW)
3215 {
3216 type_thrown = region->u.eh_throw.type;
3217 region = region->outer;
3218 }
3219
3220 /* If the exception is caught or blocked by any containing region,
3221 then it is not seen by any calling function. */
3222 for (; region ; region = region->outer)
3223 if (reachable_next_level (region, type_thrown, NULL,
3224 inlinable_call || is_resx) >= RNL_CAUGHT)
3225 return false;
3226
3227 return true;
3228 }
3229
3230 bool
3231 can_throw_external (const_rtx insn)
3232 {
3233 rtx note;
3234
3235 if (! INSN_P (insn))
3236 return false;
3237
3238 if (JUMP_P (insn)
3239 && GET_CODE (PATTERN (insn)) == RESX
3240 && XINT (PATTERN (insn), 0) > 0)
3241 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3242
3243 if (NONJUMP_INSN_P (insn)
3244 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3245 {
3246 rtx seq = PATTERN (insn);
3247 int i, n = XVECLEN (seq, 0);
3248
3249 for (i = 0; i < n; i++)
3250 if (can_throw_external (XVECEXP (seq, 0, i)))
3251 return true;
3252
3253 return false;
3254 }
3255
3256 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3257 if (!note)
3258 {
3259 /* Calls (and trapping insns) without notes are outside any
3260 exception handling region in this function. We have to
3261 assume it might throw. Given that the front end and middle
3262 ends mark known NOTHROW functions, this isn't so wildly
3263 inaccurate. */
3264 return (CALL_P (insn)
3265 || (flag_non_call_exceptions
3266 && may_trap_p (PATTERN (insn))));
3267 }
3268 if (INTVAL (XEXP (note, 0)) <= 0)
3269 return false;
3270
3271 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3272 }
3273
3274 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3275
3276 unsigned int
3277 set_nothrow_function_flags (void)
3278 {
3279 rtx insn;
3280
3281 crtl->nothrow = 1;
3282
3283 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3284 something that can throw an exception. We specifically exempt
3285 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3286 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3287 is optimistic. */
3288
3289 crtl->all_throwers_are_sibcalls = 1;
3290
3291 /* If we don't know that this implementation of the function will
3292 actually be used, then we must not set TREE_NOTHROW, since
3293 callers must not assume that this function does not throw. */
3294 if (TREE_NOTHROW (current_function_decl))
3295 return 0;
3296
3297 if (! flag_exceptions)
3298 return 0;
3299
3300 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3301 if (can_throw_external (insn))
3302 {
3303 crtl->nothrow = 0;
3304
3305 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3306 {
3307 crtl->all_throwers_are_sibcalls = 0;
3308 return 0;
3309 }
3310 }
3311
3312 for (insn = crtl->epilogue_delay_list; insn;
3313 insn = XEXP (insn, 1))
3314 if (can_throw_external (insn))
3315 {
3316 crtl->nothrow = 0;
3317
3318 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3319 {
3320 crtl->all_throwers_are_sibcalls = 0;
3321 return 0;
3322 }
3323 }
3324 if (crtl->nothrow
3325 && (cgraph_function_body_availability (cgraph_node
3326 (current_function_decl))
3327 >= AVAIL_AVAILABLE))
3328 {
3329 struct cgraph_node *node = cgraph_node (current_function_decl);
3330 struct cgraph_edge *e;
3331 for (e = node->callers; e; e = e->next_caller)
3332 e->can_throw_external = false;
3333 TREE_NOTHROW (current_function_decl) = 1;
3334
3335 if (dump_file)
3336 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3337 current_function_name ());
3338 }
3339 return 0;
3340 }
3341
3342 struct rtl_opt_pass pass_set_nothrow_function_flags =
3343 {
3344 {
3345 RTL_PASS,
3346 "nothrow", /* name */
3347 NULL, /* gate */
3348 set_nothrow_function_flags, /* execute */
3349 NULL, /* sub */
3350 NULL, /* next */
3351 0, /* static_pass_number */
3352 TV_NONE, /* tv_id */
3353 0, /* properties_required */
3354 0, /* properties_provided */
3355 0, /* properties_destroyed */
3356 0, /* todo_flags_start */
3357 TODO_dump_func, /* todo_flags_finish */
3358 }
3359 };
3360
3361 \f
3362 /* Various hooks for unwind library. */
3363
3364 /* Do any necessary initialization to access arbitrary stack frames.
3365 On the SPARC, this means flushing the register windows. */
3366
3367 void
3368 expand_builtin_unwind_init (void)
3369 {
3370 /* Set this so all the registers get saved in our frame; we need to be
3371 able to copy the saved values for any registers from frames we unwind. */
3372 crtl->saves_all_registers = 1;
3373
3374 #ifdef SETUP_FRAME_ADDRESSES
3375 SETUP_FRAME_ADDRESSES ();
3376 #endif
3377 }
3378
3379 rtx
3380 expand_builtin_eh_return_data_regno (tree exp)
3381 {
3382 tree which = CALL_EXPR_ARG (exp, 0);
3383 unsigned HOST_WIDE_INT iwhich;
3384
3385 if (TREE_CODE (which) != INTEGER_CST)
3386 {
3387 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3388 return constm1_rtx;
3389 }
3390
3391 iwhich = tree_low_cst (which, 1);
3392 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3393 if (iwhich == INVALID_REGNUM)
3394 return constm1_rtx;
3395
3396 #ifdef DWARF_FRAME_REGNUM
3397 iwhich = DWARF_FRAME_REGNUM (iwhich);
3398 #else
3399 iwhich = DBX_REGISTER_NUMBER (iwhich);
3400 #endif
3401
3402 return GEN_INT (iwhich);
3403 }
3404
3405 /* Given a value extracted from the return address register or stack slot,
3406 return the actual address encoded in that value. */
3407
3408 rtx
3409 expand_builtin_extract_return_addr (tree addr_tree)
3410 {
3411 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3412
3413 if (GET_MODE (addr) != Pmode
3414 && GET_MODE (addr) != VOIDmode)
3415 {
3416 #ifdef POINTERS_EXTEND_UNSIGNED
3417 addr = convert_memory_address (Pmode, addr);
3418 #else
3419 addr = convert_to_mode (Pmode, addr, 0);
3420 #endif
3421 }
3422
3423 /* First mask out any unwanted bits. */
3424 #ifdef MASK_RETURN_ADDR
3425 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3426 #endif
3427
3428 /* Then adjust to find the real return address. */
3429 #if defined (RETURN_ADDR_OFFSET)
3430 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3431 #endif
3432
3433 return addr;
3434 }
3435
3436 /* Given an actual address in addr_tree, do any necessary encoding
3437 and return the value to be stored in the return address register or
3438 stack slot so the epilogue will return to that address. */
3439
3440 rtx
3441 expand_builtin_frob_return_addr (tree addr_tree)
3442 {
3443 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3444
3445 addr = convert_memory_address (Pmode, addr);
3446
3447 #ifdef RETURN_ADDR_OFFSET
3448 addr = force_reg (Pmode, addr);
3449 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3450 #endif
3451
3452 return addr;
3453 }
3454
3455 /* Set up the epilogue with the magic bits we'll need to return to the
3456 exception handler. */
3457
3458 void
3459 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3460 tree handler_tree)
3461 {
3462 rtx tmp;
3463
3464 #ifdef EH_RETURN_STACKADJ_RTX
3465 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3466 VOIDmode, EXPAND_NORMAL);
3467 tmp = convert_memory_address (Pmode, tmp);
3468 if (!crtl->eh.ehr_stackadj)
3469 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3470 else if (tmp != crtl->eh.ehr_stackadj)
3471 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3472 #endif
3473
3474 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3475 VOIDmode, EXPAND_NORMAL);
3476 tmp = convert_memory_address (Pmode, tmp);
3477 if (!crtl->eh.ehr_handler)
3478 crtl->eh.ehr_handler = copy_to_reg (tmp);
3479 else if (tmp != crtl->eh.ehr_handler)
3480 emit_move_insn (crtl->eh.ehr_handler, tmp);
3481
3482 if (!crtl->eh.ehr_label)
3483 crtl->eh.ehr_label = gen_label_rtx ();
3484 emit_jump (crtl->eh.ehr_label);
3485 }
3486
3487 void
3488 expand_eh_return (void)
3489 {
3490 rtx around_label;
3491
3492 if (! crtl->eh.ehr_label)
3493 return;
3494
3495 crtl->calls_eh_return = 1;
3496
3497 #ifdef EH_RETURN_STACKADJ_RTX
3498 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3499 #endif
3500
3501 around_label = gen_label_rtx ();
3502 emit_jump (around_label);
3503
3504 emit_label (crtl->eh.ehr_label);
3505 clobber_return_register ();
3506
3507 #ifdef EH_RETURN_STACKADJ_RTX
3508 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3509 #endif
3510
3511 #ifdef HAVE_eh_return
3512 if (HAVE_eh_return)
3513 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3514 else
3515 #endif
3516 {
3517 #ifdef EH_RETURN_HANDLER_RTX
3518 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3519 #else
3520 error ("__builtin_eh_return not supported on this target");
3521 #endif
3522 }
3523
3524 emit_label (around_label);
3525 }
3526
3527 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3528 POINTERS_EXTEND_UNSIGNED and return it. */
3529
3530 rtx
3531 expand_builtin_extend_pointer (tree addr_tree)
3532 {
3533 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3534 int extend;
3535
3536 #ifdef POINTERS_EXTEND_UNSIGNED
3537 extend = POINTERS_EXTEND_UNSIGNED;
3538 #else
3539 /* The previous EH code did an unsigned extend by default, so we do this also
3540 for consistency. */
3541 extend = 1;
3542 #endif
3543
3544 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3545 }
3546 \f
3547 /* In the following functions, we represent entries in the action table
3548 as 1-based indices. Special cases are:
3549
3550 0: null action record, non-null landing pad; implies cleanups
3551 -1: null action record, null landing pad; implies no action
3552 -2: no call-site entry; implies must_not_throw
3553 -3: we have yet to process outer regions
3554
3555 Further, no special cases apply to the "next" field of the record.
3556 For next, 0 means end of list. */
3557
3558 struct action_record
3559 {
3560 int offset;
3561 int filter;
3562 int next;
3563 };
3564
3565 static int
3566 action_record_eq (const void *pentry, const void *pdata)
3567 {
3568 const struct action_record *entry = (const struct action_record *) pentry;
3569 const struct action_record *data = (const struct action_record *) pdata;
3570 return entry->filter == data->filter && entry->next == data->next;
3571 }
3572
3573 static hashval_t
3574 action_record_hash (const void *pentry)
3575 {
3576 const struct action_record *entry = (const struct action_record *) pentry;
3577 return entry->next * 1009 + entry->filter;
3578 }
3579
3580 static int
3581 add_action_record (htab_t ar_hash, int filter, int next)
3582 {
3583 struct action_record **slot, *new_ar, tmp;
3584
3585 tmp.filter = filter;
3586 tmp.next = next;
3587 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3588
3589 if ((new_ar = *slot) == NULL)
3590 {
3591 new_ar = XNEW (struct action_record);
3592 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3593 new_ar->filter = filter;
3594 new_ar->next = next;
3595 *slot = new_ar;
3596
3597 /* The filter value goes in untouched. The link to the next
3598 record is a "self-relative" byte offset, or zero to indicate
3599 that there is no next record. So convert the absolute 1 based
3600 indices we've been carrying around into a displacement. */
3601
3602 push_sleb128 (&crtl->eh.action_record_data, filter);
3603 if (next)
3604 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3605 push_sleb128 (&crtl->eh.action_record_data, next);
3606 }
3607
3608 return new_ar->offset;
3609 }
3610
3611 static int
3612 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3613 {
3614 struct eh_region *c;
3615 int next;
3616
3617 /* If we've reached the top of the region chain, then we have
3618 no actions, and require no landing pad. */
3619 if (region == NULL)
3620 return -1;
3621
3622 switch (region->type)
3623 {
3624 case ERT_CLEANUP:
3625 /* A cleanup adds a zero filter to the beginning of the chain, but
3626 there are special cases to look out for. If there are *only*
3627 cleanups along a path, then it compresses to a zero action.
3628 Further, if there are multiple cleanups along a path, we only
3629 need to represent one of them, as that is enough to trigger
3630 entry to the landing pad at runtime. */
3631 next = collect_one_action_chain (ar_hash, region->outer);
3632 if (next <= 0)
3633 return 0;
3634 for (c = region->outer; c ; c = c->outer)
3635 if (c->type == ERT_CLEANUP)
3636 return next;
3637 return add_action_record (ar_hash, 0, next);
3638
3639 case ERT_TRY:
3640 /* Process the associated catch regions in reverse order.
3641 If there's a catch-all handler, then we don't need to
3642 search outer regions. Use a magic -3 value to record
3643 that we haven't done the outer search. */
3644 next = -3;
3645 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3646 {
3647 if (c->u.eh_catch.type_list == NULL)
3648 {
3649 /* Retrieve the filter from the head of the filter list
3650 where we have stored it (see assign_filter_values). */
3651 int filter
3652 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3653
3654 next = add_action_record (ar_hash, filter, 0);
3655 }
3656 else
3657 {
3658 /* Once the outer search is done, trigger an action record for
3659 each filter we have. */
3660 tree flt_node;
3661
3662 if (next == -3)
3663 {
3664 next = collect_one_action_chain (ar_hash, region->outer);
3665
3666 /* If there is no next action, terminate the chain. */
3667 if (next == -1)
3668 next = 0;
3669 /* If all outer actions are cleanups or must_not_throw,
3670 we'll have no action record for it, since we had wanted
3671 to encode these states in the call-site record directly.
3672 Add a cleanup action to the chain to catch these. */
3673 else if (next <= 0)
3674 next = add_action_record (ar_hash, 0, 0);
3675 }
3676
3677 flt_node = c->u.eh_catch.filter_list;
3678 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3679 {
3680 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3681 next = add_action_record (ar_hash, filter, next);
3682 }
3683 }
3684 }
3685 return next;
3686
3687 case ERT_ALLOWED_EXCEPTIONS:
3688 /* An exception specification adds its filter to the
3689 beginning of the chain. */
3690 next = collect_one_action_chain (ar_hash, region->outer);
3691
3692 /* If there is no next action, terminate the chain. */
3693 if (next == -1)
3694 next = 0;
3695 /* If all outer actions are cleanups or must_not_throw,
3696 we'll have no action record for it, since we had wanted
3697 to encode these states in the call-site record directly.
3698 Add a cleanup action to the chain to catch these. */
3699 else if (next <= 0)
3700 next = add_action_record (ar_hash, 0, 0);
3701
3702 return add_action_record (ar_hash, region->u.allowed.filter, next);
3703
3704 case ERT_MUST_NOT_THROW:
3705 /* A must-not-throw region with no inner handlers or cleanups
3706 requires no call-site entry. Note that this differs from
3707 the no handler or cleanup case in that we do require an lsda
3708 to be generated. Return a magic -2 value to record this. */
3709 return -2;
3710
3711 case ERT_CATCH:
3712 case ERT_THROW:
3713 /* CATCH regions are handled in TRY above. THROW regions are
3714 for optimization information only and produce no output. */
3715 return collect_one_action_chain (ar_hash, region->outer);
3716
3717 default:
3718 gcc_unreachable ();
3719 }
3720 }
3721
3722 static int
3723 add_call_site (rtx landing_pad, int action)
3724 {
3725 call_site_record record;
3726
3727 record = GGC_NEW (struct call_site_record);
3728 record->landing_pad = landing_pad;
3729 record->action = action;
3730
3731 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3732
3733 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3734 }
3735
3736 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3737 The new note numbers will not refer to region numbers, but
3738 instead to call site entries. */
3739
3740 unsigned int
3741 convert_to_eh_region_ranges (void)
3742 {
3743 rtx insn, iter, note;
3744 htab_t ar_hash;
3745 int last_action = -3;
3746 rtx last_action_insn = NULL_RTX;
3747 rtx last_landing_pad = NULL_RTX;
3748 rtx first_no_action_insn = NULL_RTX;
3749 int call_site = 0;
3750
3751 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3752 return 0;
3753
3754 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3755
3756 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3757
3758 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3759 if (INSN_P (iter))
3760 {
3761 struct eh_region *region;
3762 int this_action;
3763 rtx this_landing_pad;
3764
3765 insn = iter;
3766 if (NONJUMP_INSN_P (insn)
3767 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3768 insn = XVECEXP (PATTERN (insn), 0, 0);
3769
3770 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3771 if (!note)
3772 {
3773 if (! (CALL_P (insn)
3774 || (flag_non_call_exceptions
3775 && may_trap_p (PATTERN (insn)))))
3776 continue;
3777 this_action = -1;
3778 region = NULL;
3779 }
3780 else
3781 {
3782 if (INTVAL (XEXP (note, 0)) <= 0)
3783 continue;
3784 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3785 this_action = collect_one_action_chain (ar_hash, region);
3786 }
3787
3788 /* Existence of catch handlers, or must-not-throw regions
3789 implies that an lsda is needed (even if empty). */
3790 if (this_action != -1)
3791 crtl->uses_eh_lsda = 1;
3792
3793 /* Delay creation of region notes for no-action regions
3794 until we're sure that an lsda will be required. */
3795 else if (last_action == -3)
3796 {
3797 first_no_action_insn = iter;
3798 last_action = -1;
3799 }
3800
3801 /* Cleanups and handlers may share action chains but not
3802 landing pads. Collect the landing pad for this region. */
3803 if (this_action >= 0)
3804 {
3805 struct eh_region *o;
3806 for (o = region; ! o->landing_pad ; o = o->outer)
3807 continue;
3808 this_landing_pad = o->landing_pad;
3809 }
3810 else
3811 this_landing_pad = NULL_RTX;
3812
3813 /* Differing actions or landing pads implies a change in call-site
3814 info, which implies some EH_REGION note should be emitted. */
3815 if (last_action != this_action
3816 || last_landing_pad != this_landing_pad)
3817 {
3818 /* If we'd not seen a previous action (-3) or the previous
3819 action was must-not-throw (-2), then we do not need an
3820 end note. */
3821 if (last_action >= -1)
3822 {
3823 /* If we delayed the creation of the begin, do it now. */
3824 if (first_no_action_insn)
3825 {
3826 call_site = add_call_site (NULL_RTX, 0);
3827 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3828 first_no_action_insn);
3829 NOTE_EH_HANDLER (note) = call_site;
3830 first_no_action_insn = NULL_RTX;
3831 }
3832
3833 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3834 last_action_insn);
3835 NOTE_EH_HANDLER (note) = call_site;
3836 }
3837
3838 /* If the new action is must-not-throw, then no region notes
3839 are created. */
3840 if (this_action >= -1)
3841 {
3842 call_site = add_call_site (this_landing_pad,
3843 this_action < 0 ? 0 : this_action);
3844 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3845 NOTE_EH_HANDLER (note) = call_site;
3846 }
3847
3848 last_action = this_action;
3849 last_landing_pad = this_landing_pad;
3850 }
3851 last_action_insn = iter;
3852 }
3853
3854 if (last_action >= -1 && ! first_no_action_insn)
3855 {
3856 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3857 NOTE_EH_HANDLER (note) = call_site;
3858 }
3859
3860 htab_delete (ar_hash);
3861 return 0;
3862 }
3863
3864 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3865 {
3866 {
3867 RTL_PASS,
3868 "eh_ranges", /* name */
3869 NULL, /* gate */
3870 convert_to_eh_region_ranges, /* execute */
3871 NULL, /* sub */
3872 NULL, /* next */
3873 0, /* static_pass_number */
3874 TV_NONE, /* tv_id */
3875 0, /* properties_required */
3876 0, /* properties_provided */
3877 0, /* properties_destroyed */
3878 0, /* todo_flags_start */
3879 TODO_dump_func, /* todo_flags_finish */
3880 }
3881 };
3882
3883 \f
3884 static void
3885 push_uleb128 (varray_type *data_area, unsigned int value)
3886 {
3887 do
3888 {
3889 unsigned char byte = value & 0x7f;
3890 value >>= 7;
3891 if (value)
3892 byte |= 0x80;
3893 VARRAY_PUSH_UCHAR (*data_area, byte);
3894 }
3895 while (value);
3896 }
3897
3898 static void
3899 push_sleb128 (varray_type *data_area, int value)
3900 {
3901 unsigned char byte;
3902 int more;
3903
3904 do
3905 {
3906 byte = value & 0x7f;
3907 value >>= 7;
3908 more = ! ((value == 0 && (byte & 0x40) == 0)
3909 || (value == -1 && (byte & 0x40) != 0));
3910 if (more)
3911 byte |= 0x80;
3912 VARRAY_PUSH_UCHAR (*data_area, byte);
3913 }
3914 while (more);
3915 }
3916
3917 \f
3918 #ifndef HAVE_AS_LEB128
3919 static int
3920 dw2_size_of_call_site_table (void)
3921 {
3922 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3923 int size = n * (4 + 4 + 4);
3924 int i;
3925
3926 for (i = 0; i < n; ++i)
3927 {
3928 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3929 size += size_of_uleb128 (cs->action);
3930 }
3931
3932 return size;
3933 }
3934
3935 static int
3936 sjlj_size_of_call_site_table (void)
3937 {
3938 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3939 int size = 0;
3940 int i;
3941
3942 for (i = 0; i < n; ++i)
3943 {
3944 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3945 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3946 size += size_of_uleb128 (cs->action);
3947 }
3948
3949 return size;
3950 }
3951 #endif
3952
3953 static void
3954 dw2_output_call_site_table (void)
3955 {
3956 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3957 int i;
3958
3959 for (i = 0; i < n; ++i)
3960 {
3961 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3962 char reg_start_lab[32];
3963 char reg_end_lab[32];
3964 char landing_pad_lab[32];
3965
3966 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3967 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3968
3969 if (cs->landing_pad)
3970 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3971 CODE_LABEL_NUMBER (cs->landing_pad));
3972
3973 /* ??? Perhaps use insn length scaling if the assembler supports
3974 generic arithmetic. */
3975 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3976 data4 if the function is small enough. */
3977 #ifdef HAVE_AS_LEB128
3978 dw2_asm_output_delta_uleb128 (reg_start_lab,
3979 current_function_func_begin_label,
3980 "region %d start", i);
3981 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3982 "length");
3983 if (cs->landing_pad)
3984 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3985 current_function_func_begin_label,
3986 "landing pad");
3987 else
3988 dw2_asm_output_data_uleb128 (0, "landing pad");
3989 #else
3990 dw2_asm_output_delta (4, reg_start_lab,
3991 current_function_func_begin_label,
3992 "region %d start", i);
3993 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3994 if (cs->landing_pad)
3995 dw2_asm_output_delta (4, landing_pad_lab,
3996 current_function_func_begin_label,
3997 "landing pad");
3998 else
3999 dw2_asm_output_data (4, 0, "landing pad");
4000 #endif
4001 dw2_asm_output_data_uleb128 (cs->action, "action");
4002 }
4003
4004 call_site_base += n;
4005 }
4006
4007 static void
4008 sjlj_output_call_site_table (void)
4009 {
4010 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4011 int i;
4012
4013 for (i = 0; i < n; ++i)
4014 {
4015 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
4016
4017 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4018 "region %d landing pad", i);
4019 dw2_asm_output_data_uleb128 (cs->action, "action");
4020 }
4021
4022 call_site_base += n;
4023 }
4024
4025 #ifndef TARGET_UNWIND_INFO
4026 /* Switch to the section that should be used for exception tables. */
4027
4028 static void
4029 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4030 {
4031 section *s;
4032
4033 if (exception_section)
4034 s = exception_section;
4035 else
4036 {
4037 /* Compute the section and cache it into exception_section,
4038 unless it depends on the function name. */
4039 if (targetm.have_named_sections)
4040 {
4041 int flags;
4042
4043 if (EH_TABLES_CAN_BE_READ_ONLY)
4044 {
4045 int tt_format =
4046 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4047 flags = ((! flag_pic
4048 || ((tt_format & 0x70) != DW_EH_PE_absptr
4049 && (tt_format & 0x70) != DW_EH_PE_aligned))
4050 ? 0 : SECTION_WRITE);
4051 }
4052 else
4053 flags = SECTION_WRITE;
4054
4055 #ifdef HAVE_LD_EH_GC_SECTIONS
4056 if (flag_function_sections)
4057 {
4058 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4059 sprintf (section_name, ".gcc_except_table.%s", fnname);
4060 s = get_section (section_name, flags, NULL);
4061 free (section_name);
4062 }
4063 else
4064 #endif
4065 exception_section
4066 = s = get_section (".gcc_except_table", flags, NULL);
4067 }
4068 else
4069 exception_section
4070 = s = flag_pic ? data_section : readonly_data_section;
4071 }
4072
4073 switch_to_section (s);
4074 }
4075 #endif
4076
4077
4078 /* Output a reference from an exception table to the type_info object TYPE.
4079 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4080 the value. */
4081
4082 static void
4083 output_ttype (tree type, int tt_format, int tt_format_size)
4084 {
4085 rtx value;
4086 bool is_public = true;
4087
4088 if (type == NULL_TREE)
4089 value = const0_rtx;
4090 else
4091 {
4092 struct varpool_node *node;
4093
4094 type = lookup_type_for_runtime (type);
4095 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4096
4097 /* Let cgraph know that the rtti decl is used. Not all of the
4098 paths below go through assemble_integer, which would take
4099 care of this for us. */
4100 STRIP_NOPS (type);
4101 if (TREE_CODE (type) == ADDR_EXPR)
4102 {
4103 type = TREE_OPERAND (type, 0);
4104 if (TREE_CODE (type) == VAR_DECL)
4105 {
4106 node = varpool_node (type);
4107 if (node)
4108 varpool_mark_needed_node (node);
4109 is_public = TREE_PUBLIC (type);
4110 }
4111 }
4112 else
4113 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4114 }
4115
4116 /* Allow the target to override the type table entry format. */
4117 if (targetm.asm_out.ttype (value))
4118 return;
4119
4120 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4121 assemble_integer (value, tt_format_size,
4122 tt_format_size * BITS_PER_UNIT, 1);
4123 else
4124 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4125 }
4126
4127 void
4128 output_function_exception_table (const char * ARG_UNUSED (fnname))
4129 {
4130 int tt_format, cs_format, lp_format, i, n;
4131 #ifdef HAVE_AS_LEB128
4132 char ttype_label[32];
4133 char cs_after_size_label[32];
4134 char cs_end_label[32];
4135 #else
4136 int call_site_len;
4137 #endif
4138 int have_tt_data;
4139 int tt_format_size = 0;
4140
4141 /* Not all functions need anything. */
4142 if (! crtl->uses_eh_lsda)
4143 return;
4144
4145 if (eh_personality_libfunc)
4146 assemble_external_libcall (eh_personality_libfunc);
4147
4148 #ifdef TARGET_UNWIND_INFO
4149 /* TODO: Move this into target file. */
4150 fputs ("\t.personality\t", asm_out_file);
4151 output_addr_const (asm_out_file, eh_personality_libfunc);
4152 fputs ("\n\t.handlerdata\n", asm_out_file);
4153 /* Note that varasm still thinks we're in the function's code section.
4154 The ".endp" directive that will immediately follow will take us back. */
4155 #else
4156 switch_to_exception_section (fnname);
4157 #endif
4158
4159 /* If the target wants a label to begin the table, emit it here. */
4160 targetm.asm_out.except_table_label (asm_out_file);
4161
4162 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4163 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4164
4165 /* Indicate the format of the @TType entries. */
4166 if (! have_tt_data)
4167 tt_format = DW_EH_PE_omit;
4168 else
4169 {
4170 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4171 #ifdef HAVE_AS_LEB128
4172 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
4173 current_function_funcdef_no);
4174 #endif
4175 tt_format_size = size_of_encoded_value (tt_format);
4176
4177 assemble_align (tt_format_size * BITS_PER_UNIT);
4178 }
4179
4180 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4181 current_function_funcdef_no);
4182
4183 /* The LSDA header. */
4184
4185 /* Indicate the format of the landing pad start pointer. An omitted
4186 field implies @LPStart == @Start. */
4187 /* Currently we always put @LPStart == @Start. This field would
4188 be most useful in moving the landing pads completely out of
4189 line to another section, but it could also be used to minimize
4190 the size of uleb128 landing pad offsets. */
4191 lp_format = DW_EH_PE_omit;
4192 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4193 eh_data_format_name (lp_format));
4194
4195 /* @LPStart pointer would go here. */
4196
4197 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4198 eh_data_format_name (tt_format));
4199
4200 #ifndef HAVE_AS_LEB128
4201 if (USING_SJLJ_EXCEPTIONS)
4202 call_site_len = sjlj_size_of_call_site_table ();
4203 else
4204 call_site_len = dw2_size_of_call_site_table ();
4205 #endif
4206
4207 /* A pc-relative 4-byte displacement to the @TType data. */
4208 if (have_tt_data)
4209 {
4210 #ifdef HAVE_AS_LEB128
4211 char ttype_after_disp_label[32];
4212 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4213 current_function_funcdef_no);
4214 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4215 "@TType base offset");
4216 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4217 #else
4218 /* Ug. Alignment queers things. */
4219 unsigned int before_disp, after_disp, last_disp, disp;
4220
4221 before_disp = 1 + 1;
4222 after_disp = (1 + size_of_uleb128 (call_site_len)
4223 + call_site_len
4224 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4225 + (VEC_length (tree, crtl->eh.ttype_data)
4226 * tt_format_size));
4227
4228 disp = after_disp;
4229 do
4230 {
4231 unsigned int disp_size, pad;
4232
4233 last_disp = disp;
4234 disp_size = size_of_uleb128 (disp);
4235 pad = before_disp + disp_size + after_disp;
4236 if (pad % tt_format_size)
4237 pad = tt_format_size - (pad % tt_format_size);
4238 else
4239 pad = 0;
4240 disp = after_disp + pad;
4241 }
4242 while (disp != last_disp);
4243
4244 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4245 #endif
4246 }
4247
4248 /* Indicate the format of the call-site offsets. */
4249 #ifdef HAVE_AS_LEB128
4250 cs_format = DW_EH_PE_uleb128;
4251 #else
4252 cs_format = DW_EH_PE_udata4;
4253 #endif
4254 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4255 eh_data_format_name (cs_format));
4256
4257 #ifdef HAVE_AS_LEB128
4258 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4259 current_function_funcdef_no);
4260 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4261 current_function_funcdef_no);
4262 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4263 "Call-site table length");
4264 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4265 if (USING_SJLJ_EXCEPTIONS)
4266 sjlj_output_call_site_table ();
4267 else
4268 dw2_output_call_site_table ();
4269 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4270 #else
4271 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4272 if (USING_SJLJ_EXCEPTIONS)
4273 sjlj_output_call_site_table ();
4274 else
4275 dw2_output_call_site_table ();
4276 #endif
4277
4278 /* ??? Decode and interpret the data for flag_debug_asm. */
4279 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4280 for (i = 0; i < n; ++i)
4281 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4282 (i ? NULL : "Action record table"));
4283
4284 if (have_tt_data)
4285 assemble_align (tt_format_size * BITS_PER_UNIT);
4286
4287 i = VEC_length (tree, crtl->eh.ttype_data);
4288 while (i-- > 0)
4289 {
4290 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4291 output_ttype (type, tt_format, tt_format_size);
4292 }
4293
4294 #ifdef HAVE_AS_LEB128
4295 if (have_tt_data)
4296 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4297 #endif
4298
4299 /* ??? Decode and interpret the data for flag_debug_asm. */
4300 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4301 for (i = 0; i < n; ++i)
4302 {
4303 if (targetm.arm_eabi_unwinder)
4304 {
4305 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4306 output_ttype (type, tt_format, tt_format_size);
4307 }
4308 else
4309 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4310 (i ? NULL : "Exception specification table"));
4311 }
4312
4313 switch_to_section (current_function_section ());
4314 }
4315
4316 void
4317 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4318 {
4319 fun->eh->throw_stmt_table = table;
4320 }
4321
4322 htab_t
4323 get_eh_throw_stmt_table (struct function *fun)
4324 {
4325 return fun->eh->throw_stmt_table;
4326 }
4327
4328 /* Dump EH information to OUT. */
4329
4330 void
4331 dump_eh_tree (FILE * out, struct function *fun)
4332 {
4333 struct eh_region *i;
4334 int depth = 0;
4335 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4336 "allowed_exceptions", "must_not_throw",
4337 "throw"
4338 };
4339
4340 i = fun->eh->region_tree;
4341 if (!i)
4342 return;
4343
4344 fprintf (out, "Eh tree:\n");
4345 while (1)
4346 {
4347 fprintf (out, " %*s %i %s", depth * 2, "",
4348 i->region_number, type_name[(int) i->type]);
4349 if (i->tree_label)
4350 {
4351 fprintf (out, " tree_label:");
4352 print_generic_expr (out, i->tree_label, 0);
4353 }
4354 if (i->label)
4355 fprintf (out, " label:%i", INSN_UID (i->label));
4356 if (i->landing_pad)
4357 {
4358 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4359 if (GET_CODE (i->landing_pad) == NOTE)
4360 fprintf (out, " (deleted)");
4361 }
4362 if (i->post_landing_pad)
4363 {
4364 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4365 if (GET_CODE (i->post_landing_pad) == NOTE)
4366 fprintf (out, " (deleted)");
4367 }
4368 if (i->resume)
4369 {
4370 fprintf (out, " resume:%i", INSN_UID (i->resume));
4371 if (GET_CODE (i->resume) == NOTE)
4372 fprintf (out, " (deleted)");
4373 }
4374 if (i->may_contain_throw)
4375 fprintf (out, " may_contain_throw");
4376 switch (i->type)
4377 {
4378 case ERT_CLEANUP:
4379 break;
4380
4381 case ERT_TRY:
4382 {
4383 struct eh_region *c;
4384 fprintf (out, " catch regions:");
4385 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4386 fprintf (out, " %i", c->region_number);
4387 }
4388 break;
4389
4390 case ERT_CATCH:
4391 if (i->u.eh_catch.prev_catch)
4392 fprintf (out, " prev: %i",
4393 i->u.eh_catch.prev_catch->region_number);
4394 if (i->u.eh_catch.next_catch)
4395 fprintf (out, " next %i",
4396 i->u.eh_catch.next_catch->region_number);
4397 fprintf (out, " type:");
4398 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4399 break;
4400
4401 case ERT_ALLOWED_EXCEPTIONS:
4402 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4403 print_generic_expr (out, i->u.allowed.type_list, 0);
4404 break;
4405
4406 case ERT_THROW:
4407 fprintf (out, " type:");
4408 print_generic_expr (out, i->u.eh_throw.type, 0);
4409 break;
4410
4411 case ERT_MUST_NOT_THROW:
4412 break;
4413
4414 case ERT_UNKNOWN:
4415 break;
4416 }
4417 if (i->aka)
4418 {
4419 fprintf (out, " also known as:");
4420 dump_bitmap (out, i->aka);
4421 }
4422 else
4423 fprintf (out, "\n");
4424 /* If there are sub-regions, process them. */
4425 if (i->inner)
4426 i = i->inner, depth++;
4427 /* If there are peers, process them. */
4428 else if (i->next_peer)
4429 i = i->next_peer;
4430 /* Otherwise, step back up the tree to the next peer. */
4431 else
4432 {
4433 do
4434 {
4435 i = i->outer;
4436 depth--;
4437 if (i == NULL)
4438 return;
4439 }
4440 while (i->next_peer == NULL);
4441 i = i->next_peer;
4442 }
4443 }
4444 }
4445
4446 /* Dump the EH tree for FN on stderr. */
4447
4448 void
4449 debug_eh_tree (struct function *fn)
4450 {
4451 dump_eh_tree (stderr, fn);
4452 }
4453
4454
4455 /* Verify EH region invariants. */
4456
4457 static bool
4458 verify_eh_region (struct eh_region *region)
4459 {
4460 bool found = false;
4461 if (!region)
4462 return false;
4463 switch (region->type)
4464 {
4465 case ERT_TRY:
4466 {
4467 struct eh_region *c, *prev = NULL;
4468 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4469 {
4470 error ("Try region %i has wrong rh_catch pointer to %i",
4471 region->region_number,
4472 region->u.eh_try.eh_catch->region_number);
4473 found = true;
4474 }
4475 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4476 {
4477 if (c->outer != region->outer)
4478 {
4479 error
4480 ("Catch region %i has different outer region than try region %i",
4481 c->region_number, region->region_number);
4482 found = true;
4483 }
4484 if (c->u.eh_catch.prev_catch != prev)
4485 {
4486 error ("Catch region %i has corrupted catchlist",
4487 c->region_number);
4488 found = true;
4489 }
4490 prev = c;
4491 }
4492 if (prev != region->u.eh_try.last_catch)
4493 {
4494 error
4495 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4496 region->region_number,
4497 region->u.eh_try.last_catch->region_number,
4498 prev->region_number);
4499 found = true;
4500 }
4501 }
4502 break;
4503 case ERT_CATCH:
4504 if (!region->u.eh_catch.prev_catch
4505 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4506 {
4507 error ("Catch region %i should be followed by try", region->region_number);
4508 found = true;
4509 }
4510 break;
4511 case ERT_CLEANUP:
4512 case ERT_ALLOWED_EXCEPTIONS:
4513 case ERT_MUST_NOT_THROW:
4514 case ERT_THROW:
4515 break;
4516 case ERT_UNKNOWN:
4517 gcc_unreachable ();
4518 }
4519 for (region = region->inner; region; region = region->next_peer)
4520 found |= verify_eh_region (region);
4521 return found;
4522 }
4523
4524 /* Verify invariants on EH datastructures. */
4525
4526 void
4527 verify_eh_tree (struct function *fun)
4528 {
4529 struct eh_region *i, *outer = NULL;
4530 bool err = false;
4531 int nvisited = 0;
4532 int count = 0;
4533 int j;
4534 int depth = 0;
4535
4536 if (!fun->eh->region_tree)
4537 return;
4538 for (j = fun->eh->last_region_number; j > 0; --j)
4539 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4540 {
4541 if (i->region_number == j)
4542 count++;
4543 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4544 {
4545 error ("region_array is corrupted for region %i",
4546 i->region_number);
4547 err = true;
4548 }
4549 }
4550 i = fun->eh->region_tree;
4551
4552 while (1)
4553 {
4554 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4555 {
4556 error ("region_array is corrupted for region %i", i->region_number);
4557 err = true;
4558 }
4559 if (i->outer != outer)
4560 {
4561 error ("outer block of region %i is wrong", i->region_number);
4562 err = true;
4563 }
4564 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4565 {
4566 error
4567 ("region %i may contain throw and is contained in region that may not",
4568 i->region_number);
4569 err = true;
4570 }
4571 if (depth < 0)
4572 {
4573 error ("negative nesting depth of region %i", i->region_number);
4574 err = true;
4575 }
4576 nvisited++;
4577 /* If there are sub-regions, process them. */
4578 if (i->inner)
4579 outer = i, i = i->inner, depth++;
4580 /* If there are peers, process them. */
4581 else if (i->next_peer)
4582 i = i->next_peer;
4583 /* Otherwise, step back up the tree to the next peer. */
4584 else
4585 {
4586 do
4587 {
4588 i = i->outer;
4589 depth--;
4590 if (i == NULL)
4591 {
4592 if (depth != -1)
4593 {
4594 error ("tree list ends on depth %i", depth + 1);
4595 err = true;
4596 }
4597 if (count != nvisited)
4598 {
4599 error ("array does not match the region tree");
4600 err = true;
4601 }
4602 if (!err)
4603 for (i = fun->eh->region_tree; i; i = i->next_peer)
4604 err |= verify_eh_region (i);
4605
4606 if (err)
4607 {
4608 dump_eh_tree (stderr, fun);
4609 internal_error ("verify_eh_tree failed");
4610 }
4611 return;
4612 }
4613 outer = i->outer;
4614 }
4615 while (i->next_peer == NULL);
4616 i = i->next_peer;
4617 }
4618 }
4619 }
4620
4621 /* Initialize unwind_resume_libfunc. */
4622
4623 void
4624 default_init_unwind_resume_libfunc (void)
4625 {
4626 /* The default c++ routines aren't actually c++ specific, so use those. */
4627 unwind_resume_libfunc =
4628 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4629 : "_Unwind_Resume");
4630 }
4631
4632 \f
4633 static bool
4634 gate_handle_eh (void)
4635 {
4636 return doing_eh (0);
4637 }
4638
4639 /* Complete generation of exception handling code. */
4640 static unsigned int
4641 rest_of_handle_eh (void)
4642 {
4643 finish_eh_generation ();
4644 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4645 return 0;
4646 }
4647
4648 struct rtl_opt_pass pass_rtl_eh =
4649 {
4650 {
4651 RTL_PASS,
4652 "eh", /* name */
4653 gate_handle_eh, /* gate */
4654 rest_of_handle_eh, /* execute */
4655 NULL, /* sub */
4656 NULL, /* next */
4657 0, /* static_pass_number */
4658 TV_JUMP, /* tv_id */
4659 0, /* properties_required */
4660 0, /* properties_provided */
4661 0, /* properties_destroyed */
4662 0, /* todo_flags_start */
4663 TODO_dump_func /* todo_flags_finish */
4664 }
4665 };
4666
4667 #include "gt-except.h"