tree.c (list_equal_p): New function.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
81
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
87
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
91
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
94
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
97
98 /* A hash table of label to region number. */
99
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region *region;
103 };
104
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
108
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
116 \f
117
118 struct GTY(()) call_site_record
119 {
120 rtx landing_pad;
121 int action;
122 };
123 \f
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
126
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
137
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
146
147 static void remove_eh_handler (struct eh_region *);
148 static void remove_eh_handler_and_replace (struct eh_region *,
149 struct eh_region *, bool);
150
151 /* The return value of reachable_next_level. */
152 enum reachable_code
153 {
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
162 };
163
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region *, tree,
166 struct reachable_info *, bool);
167
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region *);
172 static int add_call_site (rtx, int);
173
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
182
183 \f
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
187
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
190
191 int
192 doing_eh (int do_warn)
193 {
194 if (! flag_exceptions)
195 {
196 static int warned = 0;
197 if (! warned && do_warn)
198 {
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
201 }
202 return 0;
203 }
204 return 1;
205 }
206
207 \f
208 void
209 init_eh (void)
210 {
211 if (! flag_exceptions)
212 return;
213
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
215
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
219 {
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
221
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
223
224 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
225 build_pointer_type (sjlj_fc_type_node));
226 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
227
228 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
229 integer_type_node);
230 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
231
232 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
233 tmp = build_array_type (lang_hooks.types.type_for_mode
234 (targetm.unwind_word_mode (), 1),
235 tmp);
236 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
237 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
238
239 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
240 ptr_type_node);
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
242
243 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
246
247 #ifdef DONT_USE_BUILTIN_SETJMP
248 #ifdef JMP_BUF_SIZE
249 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
250 #else
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
256 #endif
257 #else
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
260 #endif
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
268 DECL_USER_ALIGN (f_jbuf) = 1;
269 #endif
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
271
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
273 TREE_CHAIN (f_prev) = f_cs;
274 TREE_CHAIN (f_cs) = f_data;
275 TREE_CHAIN (f_data) = f_per;
276 TREE_CHAIN (f_per) = f_lsda;
277 TREE_CHAIN (f_lsda) = f_jbuf;
278
279 layout_type (sjlj_fc_type_node);
280
281 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */
283 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
286 sjlj_fc_data_ofs
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
292 sjlj_fc_lsda_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
295 sjlj_fc_jbuf_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
298 }
299 }
300
301 void
302 init_eh_for_function (void)
303 {
304 cfun->eh = GGC_CNEW (struct eh_status);
305 }
306 \f
307 /* Routines to generate the exception tree somewhat directly.
308 These are used from tree-eh.c when processing exception related
309 nodes during tree optimization. */
310
311 static struct eh_region *
312 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
313 {
314 struct eh_region *new_eh;
315
316 #ifdef ENABLE_CHECKING
317 gcc_assert (doing_eh (0));
318 #endif
319
320 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = GGC_CNEW (struct eh_region);
322 new_eh->type = type;
323 new_eh->outer = outer;
324 if (outer)
325 {
326 new_eh->next_peer = outer->inner;
327 outer->inner = new_eh;
328 }
329 else
330 {
331 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh;
333 }
334
335 new_eh->region_number = ++cfun->eh->last_region_number;
336
337 return new_eh;
338 }
339
340 struct eh_region *
341 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
342 {
343 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
344 cleanup->u.cleanup.prev_try = prev_try;
345 return cleanup;
346 }
347
348 struct eh_region *
349 gen_eh_region_try (struct eh_region *outer)
350 {
351 return gen_eh_region (ERT_TRY, outer);
352 }
353
354 struct eh_region *
355 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
356 {
357 struct eh_region *c, *l;
358 tree type_list, type_node;
359
360 /* Ensure to always end up with a type list to normalize further
361 processing, then register each type against the runtime types map. */
362 type_list = type_or_list;
363 if (type_or_list)
364 {
365 if (TREE_CODE (type_or_list) != TREE_LIST)
366 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
367
368 type_node = type_list;
369 for (; type_node; type_node = TREE_CHAIN (type_node))
370 add_type_for_runtime (TREE_VALUE (type_node));
371 }
372
373 c = gen_eh_region (ERT_CATCH, t->outer);
374 c->u.eh_catch.type_list = type_list;
375 l = t->u.eh_try.last_catch;
376 c->u.eh_catch.prev_catch = l;
377 if (l)
378 l->u.eh_catch.next_catch = c;
379 else
380 t->u.eh_try.eh_catch = c;
381 t->u.eh_try.last_catch = c;
382
383 return c;
384 }
385
386 struct eh_region *
387 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
388 {
389 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
390 region->u.allowed.type_list = allowed;
391
392 for (; allowed ; allowed = TREE_CHAIN (allowed))
393 add_type_for_runtime (TREE_VALUE (allowed));
394
395 return region;
396 }
397
398 struct eh_region *
399 gen_eh_region_must_not_throw (struct eh_region *outer)
400 {
401 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
402 }
403
404 int
405 get_eh_region_number (struct eh_region *region)
406 {
407 return region->region_number;
408 }
409
410 bool
411 get_eh_region_may_contain_throw (struct eh_region *region)
412 {
413 return region->may_contain_throw;
414 }
415
416 tree
417 get_eh_region_tree_label (struct eh_region *region)
418 {
419 return region->tree_label;
420 }
421
422 tree
423 get_eh_region_no_tree_label (int region)
424 {
425 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
426 }
427
428 void
429 set_eh_region_tree_label (struct eh_region *region, tree lab)
430 {
431 region->tree_label = lab;
432 }
433 \f
434 void
435 expand_resx_expr (tree exp)
436 {
437 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
438 struct eh_region *reg = VEC_index (eh_region,
439 cfun->eh->region_array, region_nr);
440
441 gcc_assert (!reg->resume);
442 do_pending_stack_adjust ();
443 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
444 emit_barrier ();
445 }
446
447 /* Note that the current EH region (if any) may contain a throw, or a
448 call to a function which itself may contain a throw. */
449
450 void
451 note_eh_region_may_contain_throw (struct eh_region *region)
452 {
453 while (region && !region->may_contain_throw)
454 {
455 region->may_contain_throw = 1;
456 region = region->outer;
457 }
458 }
459
460
461 /* Return an rtl expression for a pointer to the exception object
462 within a handler. */
463
464 rtx
465 get_exception_pointer (void)
466 {
467 if (! crtl->eh.exc_ptr)
468 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
469 return crtl->eh.exc_ptr;
470 }
471
472 /* Return an rtl expression for the exception dispatch filter
473 within a handler. */
474
475 rtx
476 get_exception_filter (void)
477 {
478 if (! crtl->eh.filter)
479 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
480 return crtl->eh.filter;
481 }
482 \f
483 /* This section is for the exception handling specific optimization pass. */
484
485 /* Random access the exception region tree. */
486
487 void
488 collect_eh_region_array (void)
489 {
490 struct eh_region *i;
491
492 i = cfun->eh->region_tree;
493 if (! i)
494 return;
495
496 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
497 cfun->eh->last_region_number + 1);
498 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
499
500 while (1)
501 {
502 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
503
504 /* If there are sub-regions, process them. */
505 if (i->inner)
506 i = i->inner;
507 /* If there are peers, process them. */
508 else if (i->next_peer)
509 i = i->next_peer;
510 /* Otherwise, step back up the tree to the next peer. */
511 else
512 {
513 do {
514 i = i->outer;
515 if (i == NULL)
516 return;
517 } while (i->next_peer == NULL);
518 i = i->next_peer;
519 }
520 }
521 }
522
523 /* R is MUST_NOT_THROW region that is not reachable via local
524 RESX instructions. It still must be kept in the tree in case runtime
525 can unwind through it, or we will eliminate out terminate call
526 runtime would do otherwise. Return TRUE if R contains throwing statements
527 or some of the exceptions in inner regions can be unwound up to R.
528
529 CONTAINS_STMT is bitmap of all regions that contains some throwing
530 statements.
531
532 Function looks O(^3) at first sight. In fact the function is called at most
533 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
534 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
535 the outer loop examines every region at most once. The inner loop
536 is doing unwinding from the throwing statement same way as we do during
537 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
538 of CFG. In practice Eh trees are wide, not deep, so this is not
539 a problem. */
540
541 static bool
542 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
543 {
544 struct eh_region *i = r->inner;
545 unsigned n;
546 bitmap_iterator bi;
547
548 if (TEST_BIT (contains_stmt, r->region_number))
549 return true;
550 if (r->aka)
551 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
552 if (TEST_BIT (contains_stmt, n))
553 return true;
554 if (!i)
555 return false;
556 while (1)
557 {
558 /* It is pointless to look into MUST_NOT_THROW
559 or dive into subregions. They never unwind up. */
560 if (i->type != ERT_MUST_NOT_THROW)
561 {
562 bool found = TEST_BIT (contains_stmt, i->region_number);
563 if (!found)
564 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
565 if (TEST_BIT (contains_stmt, n))
566 {
567 found = true;
568 break;
569 }
570 /* We have nested region that contains throwing statement.
571 See if resuming might lead up to the resx or we get locally
572 caught sooner. If we get locally caught sooner, we either
573 know region R is not reachable or it would have direct edge
574 from the EH resx and thus consider region reachable at
575 firest place. */
576 if (found)
577 {
578 struct eh_region *i1 = i;
579 tree type_thrown = NULL_TREE;
580
581 if (i1->type == ERT_THROW)
582 {
583 type_thrown = i1->u.eh_throw.type;
584 i1 = i1->outer;
585 }
586 for (; i1 != r; i1 = i1->outer)
587 if (reachable_next_level (i1, type_thrown, NULL,
588 false) >= RNL_CAUGHT)
589 break;
590 if (i1 == r)
591 return true;
592 }
593 }
594 /* If there are sub-regions, process them. */
595 if (i->type != ERT_MUST_NOT_THROW && i->inner)
596 i = i->inner;
597 /* If there are peers, process them. */
598 else if (i->next_peer)
599 i = i->next_peer;
600 /* Otherwise, step back up the tree to the next peer. */
601 else
602 {
603 do
604 {
605 i = i->outer;
606 if (i == r)
607 return false;
608 }
609 while (i->next_peer == NULL);
610 i = i->next_peer;
611 }
612 }
613 }
614
615 /* Bring region R to the root of tree. */
616
617 static void
618 bring_to_root (struct eh_region *r)
619 {
620 struct eh_region **pp;
621 struct eh_region *outer = r->outer;
622 if (!r->outer)
623 return;
624 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
625 continue;
626 *pp = r->next_peer;
627 r->outer = NULL;
628 r->next_peer = cfun->eh->region_tree;
629 cfun->eh->region_tree = r;
630 }
631
632 /* Return true if region R2 can be replaced by R1. */
633
634 static bool
635 eh_region_replaceable_by_p (const struct eh_region *r1,
636 const struct eh_region *r2)
637 {
638 /* Regions are semantically same if they are of same type,
639 have same label and type. */
640 if (r1->type != r2->type)
641 return false;
642 if (r1->tree_label != r2->tree_label)
643 return false;
644
645 /* Verify that also region type dependent data are the same. */
646 switch (r1->type)
647 {
648 case ERT_MUST_NOT_THROW:
649 case ERT_CLEANUP:
650 break;
651 case ERT_TRY:
652 {
653 struct eh_region *c1, *c2;
654 for (c1 = r1->u.eh_try.eh_catch,
655 c2 = r2->u.eh_try.eh_catch;
656 c1 && c2;
657 c1 = c1->u.eh_catch.next_catch,
658 c2 = c2->u.eh_catch.next_catch)
659 if (!eh_region_replaceable_by_p (c1, c2))
660 return false;
661 if (c1 || c2)
662 return false;
663 }
664 break;
665 case ERT_CATCH:
666 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
667 return false;
668 if (!list_equal_p (r1->u.eh_catch.filter_list,
669 r2->u.eh_catch.filter_list))
670 return false;
671 break;
672 case ERT_ALLOWED_EXCEPTIONS:
673 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
674 return false;
675 if (r1->u.allowed.filter != r2->u.allowed.filter)
676 return false;
677 break;
678 case ERT_THROW:
679 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
680 return false;
681 break;
682 default:
683 gcc_unreachable ();
684 }
685 if (dump_file && (dump_flags & TDF_DETAILS))
686 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
687 r2->region_number);
688 return true;
689 }
690
691 /* Replace region R2 by R1. */
692
693 static void
694 replace_region (struct eh_region *r1, struct eh_region *r2)
695 {
696 struct eh_region *next1 = r1->u.eh_try.eh_catch;
697 struct eh_region *next2 = r2->u.eh_try.eh_catch;
698 bool is_try = r1->type == ERT_TRY;
699
700 gcc_assert (r1->type != ERT_CATCH);
701 remove_eh_handler_and_replace (r2, r1, false);
702 if (is_try)
703 {
704 while (next1)
705 {
706 r1 = next1;
707 r2 = next2;
708 gcc_assert (next1->type == ERT_CATCH);
709 gcc_assert (next2->type == ERT_CATCH);
710 next1 = next1->u.eh_catch.next_catch;
711 next2 = next2->u.eh_catch.next_catch;
712 remove_eh_handler_and_replace (r2, r1, false);
713 }
714 }
715 }
716
717 /* Return hash value of type list T. */
718
719 static hashval_t
720 hash_type_list (tree t)
721 {
722 hashval_t val = 0;
723 for (; t; t = TREE_CHAIN (t))
724 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
725 return val;
726 }
727
728 /* Hash EH regions so semantically same regions get same hash value. */
729
730 static hashval_t
731 hash_eh_region (const void *r)
732 {
733 const struct eh_region *region = (const struct eh_region *)r;
734 hashval_t val = region->type;
735
736 if (region->tree_label)
737 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
738 switch (region->type)
739 {
740 case ERT_MUST_NOT_THROW:
741 case ERT_CLEANUP:
742 break;
743 case ERT_TRY:
744 {
745 struct eh_region *c;
746 for (c = region->u.eh_try.eh_catch;
747 c; c = c->u.eh_catch.next_catch)
748 val = iterative_hash_hashval_t (hash_eh_region (c), val);
749 }
750 break;
751 case ERT_CATCH:
752 val = iterative_hash_hashval_t (hash_type_list
753 (region->u.eh_catch.type_list), val);
754 break;
755 case ERT_ALLOWED_EXCEPTIONS:
756 val = iterative_hash_hashval_t
757 (hash_type_list (region->u.allowed.type_list), val);
758 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
759 break;
760 case ERT_THROW:
761 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
762 break;
763 default:
764 gcc_unreachable ();
765 }
766 return val;
767 }
768
769 /* Return true if regions R1 and R2 are equal. */
770
771 static int
772 eh_regions_equal_p (const void *r1, const void *r2)
773 {
774 return eh_region_replaceable_by_p ((const struct eh_region *)r1,
775 (const struct eh_region *)r2);
776 }
777
778 /* Walk all peers of REGION and try to merge those regions
779 that are semantically equivalent. Look into subregions
780 recursively too. */
781
782 static bool
783 merge_peers (struct eh_region *region)
784 {
785 struct eh_region *r1, *r2, *outer = NULL, *next;
786 bool merged = false;
787 int num_regions = 0;
788 if (region)
789 outer = region->outer;
790 else
791 return false;
792
793 /* First see if there is inner region equivalent to region
794 in question. EH control flow is acyclic so we know we
795 can merge them. */
796 if (outer)
797 for (r1 = region; r1; r1 = next)
798 {
799 next = r1->next_peer;
800 if (r1->type == ERT_CATCH)
801 continue;
802 if (eh_region_replaceable_by_p (r1->outer, r1))
803 {
804 replace_region (r1->outer, r1);
805 merged = true;
806 }
807 else
808 num_regions ++;
809 }
810
811 /* Get new first region and try to match the peers
812 for equivalence. */
813 if (outer)
814 region = outer->inner;
815 else
816 region = cfun->eh->region_tree;
817
818 /* There are few regions to inspect:
819 N^2 loop matching each region with each region
820 will do the job well. */
821 if (num_regions < 10)
822 {
823 for (r1 = region; r1; r1 = r1->next_peer)
824 {
825 if (r1->type == ERT_CATCH)
826 continue;
827 for (r2 = r1->next_peer; r2; r2 = next)
828 {
829 next = r2->next_peer;
830 if (eh_region_replaceable_by_p (r1, r2))
831 {
832 replace_region (r1, r2);
833 merged = true;
834 }
835 }
836 }
837 }
838 /* Or use hashtable to avoid N^2 behaviour. */
839 else
840 {
841 htab_t hash;
842 hash = htab_create (num_regions, hash_eh_region,
843 eh_regions_equal_p, NULL);
844 for (r1 = region; r1; r1 = next)
845 {
846 void **slot;
847
848 next = r1->next_peer;
849 if (r1->type == ERT_CATCH)
850 continue;
851 slot = htab_find_slot (hash, r1, INSERT);
852 if (!*slot)
853 *slot = r1;
854 else
855 replace_region ((struct eh_region *)*slot, r1);
856 }
857 htab_delete (hash);
858 }
859 for (r1 = region; r1; r1 = r1->next_peer)
860 merged |= merge_peers (r1->inner);
861 return merged;
862 }
863
864 /* Remove all regions whose labels are not reachable.
865 REACHABLE is bitmap of all regions that are used by the function
866 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
867
868 void
869 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
870 {
871 int i;
872 struct eh_region *r;
873 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
874 struct eh_region *local_must_not_throw = NULL;
875 struct eh_region *first_must_not_throw = NULL;
876
877 for (i = cfun->eh->last_region_number; i > 0; --i)
878 {
879 r = VEC_index (eh_region, cfun->eh->region_array, i);
880 if (!r || r->region_number != i)
881 continue;
882 if (!TEST_BIT (reachable, i) && !r->resume)
883 {
884 bool kill_it = true;
885
886 r->tree_label = NULL;
887 switch (r->type)
888 {
889 case ERT_THROW:
890 /* Don't remove ERT_THROW regions if their outer region
891 is reachable. */
892 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
893 kill_it = false;
894 break;
895 case ERT_MUST_NOT_THROW:
896 /* MUST_NOT_THROW regions are implementable solely in the
897 runtime, but we need them when inlining function.
898
899 Keep them if outer region is not MUST_NOT_THROW a well
900 and if they contain some statement that might unwind through
901 them. */
902 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
903 && (!contains_stmt
904 || can_be_reached_by_runtime (contains_stmt, r)))
905 kill_it = false;
906 break;
907 case ERT_TRY:
908 {
909 /* TRY regions are reachable if any of its CATCH regions
910 are reachable. */
911 struct eh_region *c;
912 for (c = r->u.eh_try.eh_catch; c;
913 c = c->u.eh_catch.next_catch)
914 if (TEST_BIT (reachable, c->region_number))
915 {
916 kill_it = false;
917 break;
918 }
919 break;
920 }
921
922 default:
923 break;
924 }
925
926 if (kill_it)
927 {
928 if (dump_file)
929 fprintf (dump_file, "Removing unreachable eh region %i\n",
930 r->region_number);
931 remove_eh_handler (r);
932 }
933 else if (r->type == ERT_MUST_NOT_THROW)
934 {
935 if (!first_must_not_throw)
936 first_must_not_throw = r;
937 VEC_safe_push (eh_region, heap, must_not_throws, r);
938 }
939 }
940 else
941 if (r->type == ERT_MUST_NOT_THROW)
942 {
943 if (!local_must_not_throw)
944 local_must_not_throw = r;
945 if (r->outer)
946 VEC_safe_push (eh_region, heap, must_not_throws, r);
947 }
948 }
949
950 /* MUST_NOT_THROW regions without local handler are all the same; they
951 trigger terminate call in runtime.
952 MUST_NOT_THROW handled locally can differ in debug info associated
953 to std::terminate () call or if one is coming from Java and other
954 from C++ whether they call terminate or abort.
955
956 We merge all MUST_NOT_THROW regions handled by the run-time into one.
957 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
958 (since unwinding never continues to the outer region anyway).
959 If MUST_NOT_THROW with local handler is present in the tree, we use
960 that region to merge into, since it will remain in tree anyway;
961 otherwise we use first MUST_NOT_THROW.
962
963 Merging of locally handled regions needs changes to the CFG. Crossjumping
964 should take care of this, by looking at the actual code and
965 ensuring that the cleanup actions are really the same. */
966
967 if (local_must_not_throw)
968 first_must_not_throw = local_must_not_throw;
969
970 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
971 {
972 if (!r->label && !r->tree_label && r != first_must_not_throw)
973 {
974 if (dump_file)
975 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
976 r->region_number,
977 first_must_not_throw->region_number);
978 remove_eh_handler_and_replace (r, first_must_not_throw, false);
979 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
980 }
981 else
982 bring_to_root (r);
983 }
984 merge_peers (cfun->eh->region_tree);
985 #ifdef ENABLE_CHECKING
986 verify_eh_tree (cfun);
987 #endif
988 VEC_free (eh_region, heap, must_not_throws);
989 }
990
991 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
992 is identical to label. */
993
994 VEC (int, heap) *
995 label_to_region_map (void)
996 {
997 VEC (int, heap) * label_to_region = NULL;
998 int i;
999 int idx;
1000
1001 VEC_safe_grow_cleared (int, heap, label_to_region,
1002 cfun->cfg->last_label_uid + 1);
1003 for (i = cfun->eh->last_region_number; i > 0; --i)
1004 {
1005 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1006 if (r && r->region_number == i
1007 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1008 {
1009 if ((idx = VEC_index (int, label_to_region,
1010 LABEL_DECL_UID (r->tree_label))) != 0)
1011 r->next_region_sharing_label =
1012 VEC_index (eh_region, cfun->eh->region_array, idx);
1013 else
1014 r->next_region_sharing_label = NULL;
1015 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1016 i);
1017 }
1018 }
1019 return label_to_region;
1020 }
1021
1022 /* Return number of EH regions. */
1023 int
1024 num_eh_regions (void)
1025 {
1026 return cfun->eh->last_region_number + 1;
1027 }
1028
1029 /* Return next region sharing same label as REGION. */
1030
1031 int
1032 get_next_region_sharing_label (int region)
1033 {
1034 struct eh_region *r;
1035 if (!region)
1036 return 0;
1037 r = VEC_index (eh_region, cfun->eh->region_array, region);
1038 if (!r || !r->next_region_sharing_label)
1039 return 0;
1040 return r->next_region_sharing_label->region_number;
1041 }
1042
1043 /* Set up EH labels for RTL. */
1044
1045 void
1046 convert_from_eh_region_ranges (void)
1047 {
1048 int i, n = cfun->eh->last_region_number;
1049
1050 /* Most of the work is already done at the tree level. All we need to
1051 do is collect the rtl labels that correspond to the tree labels that
1052 collect the rtl labels that correspond to the tree labels
1053 we allocated earlier. */
1054 for (i = 1; i <= n; ++i)
1055 {
1056 struct eh_region *region;
1057
1058 region = VEC_index (eh_region, cfun->eh->region_array, i);
1059 if (region && region->tree_label)
1060 region->label = DECL_RTL_IF_SET (region->tree_label);
1061 }
1062 }
1063
1064 void
1065 find_exception_handler_labels (void)
1066 {
1067 int i;
1068
1069 if (cfun->eh->region_tree == NULL)
1070 return;
1071
1072 for (i = cfun->eh->last_region_number; i > 0; --i)
1073 {
1074 struct eh_region *region;
1075 rtx lab;
1076
1077 region = VEC_index (eh_region, cfun->eh->region_array, i);
1078 if (! region || region->region_number != i)
1079 continue;
1080 if (crtl->eh.built_landing_pads)
1081 lab = region->landing_pad;
1082 else
1083 lab = region->label;
1084 }
1085 }
1086
1087 /* Returns true if the current function has exception handling regions. */
1088
1089 bool
1090 current_function_has_exception_handlers (void)
1091 {
1092 int i;
1093
1094 for (i = cfun->eh->last_region_number; i > 0; --i)
1095 {
1096 struct eh_region *region;
1097
1098 region = VEC_index (eh_region, cfun->eh->region_array, i);
1099 if (region
1100 && region->region_number == i
1101 && region->type != ERT_THROW)
1102 return true;
1103 }
1104
1105 return false;
1106 }
1107 \f
1108 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1109 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1110
1111 static void
1112 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1113 {
1114 int i;
1115
1116 if (o->aka)
1117 {
1118 i = bitmap_first_set_bit (o->aka);
1119 if (i < *min)
1120 *min = i;
1121 i = bitmap_last_set_bit (o->aka);
1122 if (i > *max)
1123 *max = i;
1124 }
1125 if (o->region_number < *min)
1126 *min = o->region_number;
1127 if (o->region_number > *max)
1128 *max = o->region_number;
1129
1130 if (o->inner)
1131 {
1132 o = o->inner;
1133 duplicate_eh_regions_0 (o, min, max);
1134 while (o->next_peer)
1135 {
1136 o = o->next_peer;
1137 duplicate_eh_regions_0 (o, min, max);
1138 }
1139 }
1140 }
1141
1142 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1143 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1144 about the other internal pointers just yet, just the tree-like pointers. */
1145
1146 static eh_region
1147 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1148 {
1149 eh_region ret, n;
1150
1151 ret = n = GGC_NEW (struct eh_region);
1152
1153 *n = *old;
1154 n->outer = outer;
1155 n->next_peer = NULL;
1156 if (old->aka)
1157 {
1158 unsigned i;
1159 bitmap_iterator bi;
1160 n->aka = BITMAP_GGC_ALLOC ();
1161
1162 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1163 {
1164 bitmap_set_bit (n->aka, i + eh_offset);
1165 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1166 }
1167 }
1168
1169 n->region_number += eh_offset;
1170 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1171
1172 if (old->inner)
1173 {
1174 old = old->inner;
1175 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1176 while (old->next_peer)
1177 {
1178 old = old->next_peer;
1179 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1180 }
1181 }
1182
1183 return ret;
1184 }
1185
1186 /* Return prev_try pointers catch subregions of R should
1187 point to. */
1188
1189 static struct eh_region *
1190 find_prev_try (struct eh_region * r)
1191 {
1192 for (; r && r->type != ERT_TRY; r = r->outer)
1193 if (r->type == ERT_MUST_NOT_THROW
1194 || (r->type == ERT_ALLOWED_EXCEPTIONS
1195 && !r->u.allowed.type_list))
1196 {
1197 r = NULL;
1198 break;
1199 }
1200 return r;
1201 }
1202
1203 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1204 function and root the tree below OUTER_REGION. Remap labels using MAP
1205 callback. The special case of COPY_REGION of 0 means all regions. */
1206
1207 int
1208 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1209 void *data, int copy_region, int outer_region)
1210 {
1211 eh_region cur, prev_try, old_prev_try, outer, *splice;
1212 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1213 int num_regions;
1214
1215 if (!ifun->eh)
1216 return 0;
1217 #ifdef ENABLE_CHECKING
1218 verify_eh_tree (ifun);
1219 #endif
1220
1221 /* Find the range of region numbers to be copied. The interface we
1222 provide here mandates a single offset to find new number from old,
1223 which means we must look at the numbers present, instead of the
1224 count or something else. */
1225 if (copy_region > 0)
1226 {
1227 min_region = INT_MAX;
1228 max_region = 0;
1229
1230 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1231 old_prev_try = find_prev_try (cur);
1232 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1233 }
1234 else
1235 {
1236 min_region = 1;
1237 max_region = ifun->eh->last_region_number;
1238 old_prev_try = NULL;
1239 }
1240 num_regions = max_region - min_region + 1;
1241 cfun_last_region_number = cfun->eh->last_region_number;
1242 eh_offset = cfun_last_region_number + 1 - min_region;
1243
1244 /* If we've not yet created a region array, do so now. */
1245 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1246 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1247 cfun->eh->last_region_number + 1);
1248
1249 /* Locate the spot at which to insert the new tree. */
1250 if (outer_region > 0)
1251 {
1252 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1253 if (outer)
1254 splice = &outer->inner;
1255 else
1256 splice = &cfun->eh->region_tree;
1257 }
1258 else
1259 {
1260 outer = NULL;
1261 splice = &cfun->eh->region_tree;
1262 }
1263 while (*splice)
1264 splice = &(*splice)->next_peer;
1265
1266 if (!ifun->eh->region_tree)
1267 {
1268 if (outer)
1269 for (i = cfun_last_region_number + 1;
1270 i <= cfun->eh->last_region_number; i++)
1271 {
1272 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1273 if (outer->aka == NULL)
1274 outer->aka = BITMAP_GGC_ALLOC ();
1275 bitmap_set_bit (outer->aka, i);
1276 }
1277 return eh_offset;
1278 }
1279
1280 /* Copy all the regions in the subtree. */
1281 if (copy_region > 0)
1282 {
1283 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1284 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1285 }
1286 else
1287 {
1288 eh_region n;
1289
1290 cur = ifun->eh->region_tree;
1291 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1292 while (cur->next_peer)
1293 {
1294 cur = cur->next_peer;
1295 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1296 }
1297 }
1298
1299 /* Remap all the labels in the new regions. */
1300 for (i = cfun_last_region_number + 1;
1301 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1302 if (cur && cur->tree_label)
1303 cur->tree_label = map (cur->tree_label, data);
1304
1305 /* Search for the containing ERT_TRY region to fix up
1306 the prev_try short-cuts for ERT_CLEANUP regions. */
1307 prev_try = NULL;
1308 if (outer_region > 0)
1309 prev_try = find_prev_try (VEC_index (eh_region, cfun->eh->region_array, outer_region));
1310
1311 /* Remap all of the internal catch and cleanup linkages. Since we
1312 duplicate entire subtrees, all of the referenced regions will have
1313 been copied too. And since we renumbered them as a block, a simple
1314 bit of arithmetic finds us the index for the replacement region. */
1315 for (i = cfun_last_region_number + 1;
1316 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1317 {
1318 /* All removed EH that is toplevel in input function is now
1319 in outer EH of output function. */
1320 if (cur == NULL)
1321 {
1322 gcc_assert (VEC_index
1323 (eh_region, ifun->eh->region_array,
1324 i - eh_offset) == NULL);
1325 if (outer)
1326 {
1327 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1328 if (outer->aka == NULL)
1329 outer->aka = BITMAP_GGC_ALLOC ();
1330 bitmap_set_bit (outer->aka, i);
1331 }
1332 continue;
1333 }
1334 if (i != cur->region_number)
1335 continue;
1336
1337 #define REMAP(REG) \
1338 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1339 (REG)->region_number + eh_offset)
1340
1341 switch (cur->type)
1342 {
1343 case ERT_TRY:
1344 if (cur->u.eh_try.eh_catch)
1345 REMAP (cur->u.eh_try.eh_catch);
1346 if (cur->u.eh_try.last_catch)
1347 REMAP (cur->u.eh_try.last_catch);
1348 break;
1349
1350 case ERT_CATCH:
1351 if (cur->u.eh_catch.next_catch)
1352 REMAP (cur->u.eh_catch.next_catch);
1353 if (cur->u.eh_catch.prev_catch)
1354 REMAP (cur->u.eh_catch.prev_catch);
1355 break;
1356
1357 case ERT_CLEANUP:
1358 if (cur->u.cleanup.prev_try != old_prev_try)
1359 REMAP (cur->u.cleanup.prev_try);
1360 else
1361 cur->u.cleanup.prev_try = prev_try;
1362 break;
1363
1364 default:
1365 break;
1366 }
1367
1368 #undef REMAP
1369 }
1370 #ifdef ENABLE_CHECKING
1371 verify_eh_tree (cfun);
1372 #endif
1373
1374 return eh_offset;
1375 }
1376
1377 /* Return new copy of eh region OLD inside region NEW_OUTER.
1378 Do not care about updating the tree otherwise. */
1379
1380 static struct eh_region *
1381 copy_eh_region_1 (struct eh_region *old, struct eh_region *new_outer)
1382 {
1383 struct eh_region *new_eh = gen_eh_region (old->type, new_outer);
1384 new_eh->u = old->u;
1385 new_eh->tree_label = old->tree_label;
1386 new_eh->may_contain_throw = old->may_contain_throw;
1387 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1388 cfun->eh->last_region_number + 1);
1389 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1390 if (dump_file && (dump_flags & TDF_DETAILS))
1391 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1392 return new_eh;
1393 }
1394
1395 /* Return new copy of eh region OLD inside region NEW_OUTER.
1396
1397 Copy whole catch-try chain if neccesary and update cleanup region prev_try
1398 pointers.
1399
1400 PREV_TRY_MAP points to outer TRY region if it was copied in trace already. */
1401
1402 static struct eh_region *
1403 copy_eh_region (struct eh_region *old, struct eh_region *new_outer,
1404 struct eh_region *prev_try_map)
1405 {
1406 struct eh_region *r, *n, *old_try, *new_try, *ret = NULL;
1407 VEC(eh_region,heap) *catch_list = NULL;
1408
1409 if (old->type != ERT_CATCH)
1410 {
1411 gcc_assert (old->type != ERT_TRY);
1412 r = copy_eh_region_1 (old, new_outer);
1413 if (r->type == ERT_CLEANUP && prev_try_map)
1414 {
1415 gcc_assert (r->u.cleanup.prev_try);
1416 r->u.cleanup.prev_try = prev_try_map;
1417 }
1418 return r;
1419 }
1420
1421 /* Locate and copy corresponding TRY. */
1422 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1423 continue;
1424 gcc_assert (old_try->type == ERT_TRY);
1425 new_try = gen_eh_region_try (new_outer);
1426 new_try->tree_label = old_try->tree_label;
1427 new_try->may_contain_throw = old_try->may_contain_throw;
1428 if (dump_file && (dump_flags & TDF_DETAILS))
1429 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1430 old_try->region_number, new_try->region_number);
1431 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1432 cfun->eh->last_region_number + 1);
1433 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1434
1435 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1436 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1437 VEC_safe_push (eh_region, heap, catch_list, r);
1438
1439 while (VEC_length (eh_region, catch_list))
1440 {
1441 r = VEC_pop (eh_region, catch_list);
1442
1443 /* Duplicate CATCH. */
1444 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1445 n->tree_label = r->tree_label;
1446 n->may_contain_throw = r->may_contain_throw;
1447 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1448 cfun->eh->last_region_number + 1);
1449 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1450 n->tree_label = r->tree_label;
1451
1452 if (dump_file && (dump_flags & TDF_DETAILS))
1453 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1454 r->region_number, n->region_number);
1455 if (r == old)
1456 ret = n;
1457 }
1458 VEC_free (eh_region, heap, catch_list);
1459 gcc_assert (ret);
1460 return ret;
1461 }
1462
1463 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1464
1465 static void
1466 push_reachable_handler (struct eh_region *region, void *data)
1467 {
1468 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1469 VEC_safe_push (eh_region, heap, *trace, region);
1470 }
1471
1472 /* Redirect EH edge E that to NEW_DEST_LABEL.
1473 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1474 foreach_reachable_handler. */
1475
1476 struct eh_region *
1477 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1478 bool inlinable_call, int region_number)
1479 {
1480 struct eh_region *outer, *prev_try_map = NULL;
1481 struct eh_region *region;
1482 VEC (eh_region, heap) * trace = NULL;
1483 int i;
1484 int start_here = -1;
1485 basic_block old_bb = e->dest;
1486 struct eh_region *old, *r = NULL;
1487 bool update_inplace = true;
1488 edge_iterator ei;
1489 edge e2;
1490
1491 /* If there is only one EH edge, we don't need to duplicate;
1492 just update labels in the tree. */
1493 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1494 if ((e2->flags & EDGE_EH) && e2 != e)
1495 {
1496 update_inplace = false;
1497 break;
1498 }
1499
1500 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1501 gcc_assert (region);
1502
1503 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1504 push_reachable_handler, &trace);
1505 if (dump_file && (dump_flags & TDF_DETAILS))
1506 {
1507 dump_eh_tree (dump_file, cfun);
1508 fprintf (dump_file, "Trace: ");
1509 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1510 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1511 fprintf (dump_file, " inplace: %i\n", update_inplace);
1512 }
1513
1514 if (update_inplace)
1515 {
1516 /* In easy route just walk trace and update all occurences of the label. */
1517 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1518 {
1519 r = VEC_index (eh_region, trace, i);
1520 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1521 {
1522 r->tree_label = new_dest_label;
1523 if (dump_file && (dump_flags & TDF_DETAILS))
1524 fprintf (dump_file, "Updating label for region %i\n",
1525 r->region_number);
1526 }
1527 }
1528 r = region;
1529 }
1530 else
1531 {
1532 /* Now look for outermost handler that reffers to the basic block in question.
1533 We start our duplication there. */
1534 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1535 {
1536 r = VEC_index (eh_region, trace, i);
1537 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1538 start_here = i;
1539 }
1540 outer = VEC_index (eh_region, trace, start_here)->outer;
1541 gcc_assert (start_here >= 0);
1542
1543 /* And now do the dirty job! */
1544 for (i = start_here; i >= 0; i--)
1545 {
1546 old = VEC_index (eh_region, trace, i);
1547 gcc_assert (!outer || old->outer != outer->outer);
1548
1549 /* Copy region and update label. */
1550 r = copy_eh_region (old, outer, prev_try_map);
1551 VEC_replace (eh_region, trace, i, r);
1552 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1553 {
1554 r->tree_label = new_dest_label;
1555 if (dump_file && (dump_flags & TDF_DETAILS))
1556 fprintf (dump_file, "Updating label for region %i\n",
1557 r->region_number);
1558 }
1559
1560 /* We got into copying CATCH. copy_eh_region already did job
1561 of copying all catch blocks corresponding to the try. Now
1562 we need to update labels in all of them and see trace.
1563
1564 We continue nesting into TRY region corresponding to CATCH:
1565 When duplicating EH tree contaiing subregions of CATCH,
1566 the CATCH region itself is never inserted to trace so we
1567 never get here anyway. */
1568 if (r->type == ERT_CATCH)
1569 {
1570 /* Walk other catch regions we copied and update labels as needed. */
1571 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1572 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1573 {
1574 r->tree_label = new_dest_label;
1575 if (dump_file && (dump_flags & TDF_DETAILS))
1576 fprintf (dump_file, "Updating label for region %i\n",
1577 r->region_number);
1578 }
1579 gcc_assert (r->type == ERT_TRY);
1580
1581 /* Skip sibling catch regions from the trace.
1582 They are already updated. */
1583 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1584 {
1585 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1586 i--;
1587 }
1588 }
1589
1590 /* Cleanup regions points to outer TRY blocks. */
1591 if (r->type == ERT_TRY)
1592 prev_try_map = r;
1593 outer = r;
1594 }
1595
1596 if (is_resx || region->type == ERT_THROW)
1597 r = copy_eh_region (region, outer, prev_try_map);
1598 }
1599
1600 VEC_free (eh_region, heap, trace);
1601 if (dump_file && (dump_flags & TDF_DETAILS))
1602 {
1603 dump_eh_tree (dump_file, cfun);
1604 fprintf (dump_file, "New region: %i\n", r->region_number);
1605 }
1606 return r;
1607 }
1608
1609 /* Return region number of region that is outer to both if REGION_A and
1610 REGION_B in IFUN. */
1611
1612 int
1613 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1614 {
1615 struct eh_region *rp_a, *rp_b;
1616 sbitmap b_outer;
1617
1618 gcc_assert (ifun->eh->last_region_number > 0);
1619 gcc_assert (ifun->eh->region_tree);
1620
1621 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1622 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1623 gcc_assert (rp_a != NULL);
1624 gcc_assert (rp_b != NULL);
1625
1626 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1627 sbitmap_zero (b_outer);
1628
1629 do
1630 {
1631 SET_BIT (b_outer, rp_b->region_number);
1632 rp_b = rp_b->outer;
1633 }
1634 while (rp_b);
1635
1636 do
1637 {
1638 if (TEST_BIT (b_outer, rp_a->region_number))
1639 {
1640 sbitmap_free (b_outer);
1641 return rp_a->region_number;
1642 }
1643 rp_a = rp_a->outer;
1644 }
1645 while (rp_a);
1646
1647 sbitmap_free (b_outer);
1648 return -1;
1649 }
1650 \f
1651 static int
1652 t2r_eq (const void *pentry, const void *pdata)
1653 {
1654 const_tree const entry = (const_tree) pentry;
1655 const_tree const data = (const_tree) pdata;
1656
1657 return TREE_PURPOSE (entry) == data;
1658 }
1659
1660 static hashval_t
1661 t2r_hash (const void *pentry)
1662 {
1663 const_tree const entry = (const_tree) pentry;
1664 return TREE_HASH (TREE_PURPOSE (entry));
1665 }
1666
1667 void
1668 add_type_for_runtime (tree type)
1669 {
1670 tree *slot;
1671
1672 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1673 TREE_HASH (type), INSERT);
1674 if (*slot == NULL)
1675 {
1676 tree runtime = (*lang_eh_runtime_type) (type);
1677 *slot = tree_cons (type, runtime, NULL_TREE);
1678 }
1679 }
1680
1681 tree
1682 lookup_type_for_runtime (tree type)
1683 {
1684 tree *slot;
1685
1686 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1687 TREE_HASH (type), NO_INSERT);
1688
1689 /* We should have always inserted the data earlier. */
1690 return TREE_VALUE (*slot);
1691 }
1692
1693 \f
1694 /* Represent an entry in @TTypes for either catch actions
1695 or exception filter actions. */
1696 struct GTY(()) ttypes_filter {
1697 tree t;
1698 int filter;
1699 };
1700
1701 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1702 (a tree) for a @TTypes type node we are thinking about adding. */
1703
1704 static int
1705 ttypes_filter_eq (const void *pentry, const void *pdata)
1706 {
1707 const struct ttypes_filter *const entry
1708 = (const struct ttypes_filter *) pentry;
1709 const_tree const data = (const_tree) pdata;
1710
1711 return entry->t == data;
1712 }
1713
1714 static hashval_t
1715 ttypes_filter_hash (const void *pentry)
1716 {
1717 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1718 return TREE_HASH (entry->t);
1719 }
1720
1721 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1722 exception specification list we are thinking about adding. */
1723 /* ??? Currently we use the type lists in the order given. Someone
1724 should put these in some canonical order. */
1725
1726 static int
1727 ehspec_filter_eq (const void *pentry, const void *pdata)
1728 {
1729 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1730 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1731
1732 return type_list_equal (entry->t, data->t);
1733 }
1734
1735 /* Hash function for exception specification lists. */
1736
1737 static hashval_t
1738 ehspec_filter_hash (const void *pentry)
1739 {
1740 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1741 hashval_t h = 0;
1742 tree list;
1743
1744 for (list = entry->t; list ; list = TREE_CHAIN (list))
1745 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1746 return h;
1747 }
1748
1749 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1750 to speed up the search. Return the filter value to be used. */
1751
1752 static int
1753 add_ttypes_entry (htab_t ttypes_hash, tree type)
1754 {
1755 struct ttypes_filter **slot, *n;
1756
1757 slot = (struct ttypes_filter **)
1758 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1759
1760 if ((n = *slot) == NULL)
1761 {
1762 /* Filter value is a 1 based table index. */
1763
1764 n = XNEW (struct ttypes_filter);
1765 n->t = type;
1766 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1767 *slot = n;
1768
1769 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1770 }
1771
1772 return n->filter;
1773 }
1774
1775 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1776 to speed up the search. Return the filter value to be used. */
1777
1778 static int
1779 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1780 {
1781 struct ttypes_filter **slot, *n;
1782 struct ttypes_filter dummy;
1783
1784 dummy.t = list;
1785 slot = (struct ttypes_filter **)
1786 htab_find_slot (ehspec_hash, &dummy, INSERT);
1787
1788 if ((n = *slot) == NULL)
1789 {
1790 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1791
1792 n = XNEW (struct ttypes_filter);
1793 n->t = list;
1794 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1795 *slot = n;
1796
1797 /* Generate a 0 terminated list of filter values. */
1798 for (; list ; list = TREE_CHAIN (list))
1799 {
1800 if (targetm.arm_eabi_unwinder)
1801 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1802 else
1803 {
1804 /* Look up each type in the list and encode its filter
1805 value as a uleb128. */
1806 push_uleb128 (&crtl->eh.ehspec_data,
1807 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1808 }
1809 }
1810 if (targetm.arm_eabi_unwinder)
1811 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1812 else
1813 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1814 }
1815
1816 return n->filter;
1817 }
1818
1819 /* Generate the action filter values to be used for CATCH and
1820 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1821 we use lots of landing pads, and so every type or list can share
1822 the same filter value, which saves table space. */
1823
1824 static void
1825 assign_filter_values (void)
1826 {
1827 int i;
1828 htab_t ttypes, ehspec;
1829
1830 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1831 if (targetm.arm_eabi_unwinder)
1832 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1833 else
1834 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1835
1836 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1837 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1838
1839 for (i = cfun->eh->last_region_number; i > 0; --i)
1840 {
1841 struct eh_region *r;
1842
1843 r = VEC_index (eh_region, cfun->eh->region_array, i);
1844
1845 /* Mind we don't process a region more than once. */
1846 if (!r || r->region_number != i)
1847 continue;
1848
1849 switch (r->type)
1850 {
1851 case ERT_CATCH:
1852 /* Whatever type_list is (NULL or true list), we build a list
1853 of filters for the region. */
1854 r->u.eh_catch.filter_list = NULL_TREE;
1855
1856 if (r->u.eh_catch.type_list != NULL)
1857 {
1858 /* Get a filter value for each of the types caught and store
1859 them in the region's dedicated list. */
1860 tree tp_node = r->u.eh_catch.type_list;
1861
1862 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1863 {
1864 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1865 tree flt_node = build_int_cst (NULL_TREE, flt);
1866
1867 r->u.eh_catch.filter_list
1868 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1869 }
1870 }
1871 else
1872 {
1873 /* Get a filter value for the NULL list also since it will need
1874 an action record anyway. */
1875 int flt = add_ttypes_entry (ttypes, NULL);
1876 tree flt_node = build_int_cst (NULL_TREE, flt);
1877
1878 r->u.eh_catch.filter_list
1879 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1880 }
1881
1882 break;
1883
1884 case ERT_ALLOWED_EXCEPTIONS:
1885 r->u.allowed.filter
1886 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1887 break;
1888
1889 default:
1890 break;
1891 }
1892 }
1893
1894 htab_delete (ttypes);
1895 htab_delete (ehspec);
1896 }
1897
1898 /* Emit SEQ into basic block just before INSN (that is assumed to be
1899 first instruction of some existing BB and return the newly
1900 produced block. */
1901 static basic_block
1902 emit_to_new_bb_before (rtx seq, rtx insn)
1903 {
1904 rtx last;
1905 basic_block bb;
1906 edge e;
1907 edge_iterator ei;
1908
1909 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1910 call), we don't want it to go into newly created landing pad or other EH
1911 construct. */
1912 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1913 if (e->flags & EDGE_FALLTHRU)
1914 force_nonfallthru (e);
1915 else
1916 ei_next (&ei);
1917 last = emit_insn_before (seq, insn);
1918 if (BARRIER_P (last))
1919 last = PREV_INSN (last);
1920 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1921 update_bb_for_insn (bb);
1922 bb->flags |= BB_SUPERBLOCK;
1923 return bb;
1924 }
1925
1926 /* Generate the code to actually handle exceptions, which will follow the
1927 landing pads. */
1928
1929 static void
1930 build_post_landing_pads (void)
1931 {
1932 int i;
1933
1934 for (i = cfun->eh->last_region_number; i > 0; --i)
1935 {
1936 struct eh_region *region;
1937 rtx seq;
1938
1939 region = VEC_index (eh_region, cfun->eh->region_array, i);
1940 /* Mind we don't process a region more than once. */
1941 if (!region || region->region_number != i)
1942 continue;
1943
1944 switch (region->type)
1945 {
1946 case ERT_TRY:
1947 /* It is possible that TRY region is kept alive only because some of
1948 contained catch region still have RESX instruction but they are
1949 reached via their copies. In this case we need to do nothing. */
1950 if (!region->u.eh_try.eh_catch->label)
1951 break;
1952
1953 /* ??? Collect the set of all non-overlapping catch handlers
1954 all the way up the chain until blocked by a cleanup. */
1955 /* ??? Outer try regions can share landing pads with inner
1956 try regions if the types are completely non-overlapping,
1957 and there are no intervening cleanups. */
1958
1959 region->post_landing_pad = gen_label_rtx ();
1960
1961 start_sequence ();
1962
1963 emit_label (region->post_landing_pad);
1964
1965 /* ??? It is mighty inconvenient to call back into the
1966 switch statement generation code in expand_end_case.
1967 Rapid prototyping sez a sequence of ifs. */
1968 {
1969 struct eh_region *c;
1970 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1971 {
1972 if (c->u.eh_catch.type_list == NULL)
1973 emit_jump (c->label);
1974 else
1975 {
1976 /* Need for one cmp/jump per type caught. Each type
1977 list entry has a matching entry in the filter list
1978 (see assign_filter_values). */
1979 tree tp_node = c->u.eh_catch.type_list;
1980 tree flt_node = c->u.eh_catch.filter_list;
1981
1982 for (; tp_node; )
1983 {
1984 emit_cmp_and_jump_insns
1985 (crtl->eh.filter,
1986 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1987 EQ, NULL_RTX,
1988 targetm.eh_return_filter_mode (), 0, c->label);
1989
1990 tp_node = TREE_CHAIN (tp_node);
1991 flt_node = TREE_CHAIN (flt_node);
1992 }
1993 }
1994 }
1995 }
1996
1997 /* We delay the generation of the _Unwind_Resume until we generate
1998 landing pads. We emit a marker here so as to get good control
1999 flow data in the meantime. */
2000 region->resume
2001 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2002 emit_barrier ();
2003
2004 seq = get_insns ();
2005 end_sequence ();
2006
2007 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
2008
2009 break;
2010
2011 case ERT_ALLOWED_EXCEPTIONS:
2012 if (!region->label)
2013 break;
2014 region->post_landing_pad = gen_label_rtx ();
2015
2016 start_sequence ();
2017
2018 emit_label (region->post_landing_pad);
2019
2020 emit_cmp_and_jump_insns (crtl->eh.filter,
2021 GEN_INT (region->u.allowed.filter),
2022 EQ, NULL_RTX,
2023 targetm.eh_return_filter_mode (), 0, region->label);
2024
2025 /* We delay the generation of the _Unwind_Resume until we generate
2026 landing pads. We emit a marker here so as to get good control
2027 flow data in the meantime. */
2028 region->resume
2029 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2030 emit_barrier ();
2031
2032 seq = get_insns ();
2033 end_sequence ();
2034
2035 emit_to_new_bb_before (seq, region->label);
2036 break;
2037
2038 case ERT_CLEANUP:
2039 case ERT_MUST_NOT_THROW:
2040 region->post_landing_pad = region->label;
2041 break;
2042
2043 case ERT_CATCH:
2044 case ERT_THROW:
2045 /* Nothing to do. */
2046 break;
2047
2048 default:
2049 gcc_unreachable ();
2050 }
2051 }
2052 }
2053
2054 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2055 _Unwind_Resume otherwise. */
2056
2057 static void
2058 connect_post_landing_pads (void)
2059 {
2060 int i;
2061
2062 for (i = cfun->eh->last_region_number; i > 0; --i)
2063 {
2064 struct eh_region *region;
2065 struct eh_region *outer;
2066 rtx seq;
2067 rtx barrier;
2068
2069 region = VEC_index (eh_region, cfun->eh->region_array, i);
2070 /* Mind we don't process a region more than once. */
2071 if (!region || region->region_number != i)
2072 continue;
2073
2074 /* If there is no RESX, or it has been deleted by flow, there's
2075 nothing to fix up. */
2076 if (! region->resume || INSN_DELETED_P (region->resume))
2077 continue;
2078
2079 /* Search for another landing pad in this function. */
2080 for (outer = region->outer; outer ; outer = outer->outer)
2081 if (outer->post_landing_pad)
2082 break;
2083
2084 start_sequence ();
2085
2086 if (outer)
2087 {
2088 edge e;
2089 basic_block src, dest;
2090
2091 emit_jump (outer->post_landing_pad);
2092 src = BLOCK_FOR_INSN (region->resume);
2093 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2094 while (EDGE_COUNT (src->succs) > 0)
2095 remove_edge (EDGE_SUCC (src, 0));
2096 e = make_edge (src, dest, 0);
2097 e->probability = REG_BR_PROB_BASE;
2098 e->count = src->count;
2099 }
2100 else
2101 {
2102 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2103 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2104
2105 /* What we just emitted was a throwing libcall, so it got a
2106 barrier automatically added after it. If the last insn in
2107 the libcall sequence isn't the barrier, it's because the
2108 target emits multiple insns for a call, and there are insns
2109 after the actual call insn (which are redundant and would be
2110 optimized away). The barrier is inserted exactly after the
2111 call insn, so let's go get that and delete the insns after
2112 it, because below we need the barrier to be the last insn in
2113 the sequence. */
2114 delete_insns_since (NEXT_INSN (last_call_insn ()));
2115 }
2116
2117 seq = get_insns ();
2118 end_sequence ();
2119 barrier = emit_insn_before (seq, region->resume);
2120 /* Avoid duplicate barrier. */
2121 gcc_assert (BARRIER_P (barrier));
2122 delete_insn (barrier);
2123 delete_insn (region->resume);
2124
2125 /* ??? From tree-ssa we can wind up with catch regions whose
2126 label is not instantiated, but whose resx is present. Now
2127 that we've dealt with the resx, kill the region. */
2128 if (region->label == NULL && region->type == ERT_CLEANUP)
2129 remove_eh_handler (region);
2130 }
2131 }
2132
2133 \f
2134 static void
2135 dw2_build_landing_pads (void)
2136 {
2137 int i;
2138
2139 for (i = cfun->eh->last_region_number; i > 0; --i)
2140 {
2141 struct eh_region *region;
2142 rtx seq;
2143 basic_block bb;
2144 edge e;
2145
2146 region = VEC_index (eh_region, cfun->eh->region_array, i);
2147 /* Mind we don't process a region more than once. */
2148 if (!region || region->region_number != i)
2149 continue;
2150
2151 if (region->type != ERT_CLEANUP
2152 && region->type != ERT_TRY
2153 && region->type != ERT_ALLOWED_EXCEPTIONS)
2154 continue;
2155
2156 if (!region->post_landing_pad)
2157 continue;
2158
2159 start_sequence ();
2160
2161 region->landing_pad = gen_label_rtx ();
2162 emit_label (region->landing_pad);
2163
2164 #ifdef HAVE_exception_receiver
2165 if (HAVE_exception_receiver)
2166 emit_insn (gen_exception_receiver ());
2167 else
2168 #endif
2169 #ifdef HAVE_nonlocal_goto_receiver
2170 if (HAVE_nonlocal_goto_receiver)
2171 emit_insn (gen_nonlocal_goto_receiver ());
2172 else
2173 #endif
2174 { /* Nothing */ }
2175
2176 emit_move_insn (crtl->eh.exc_ptr,
2177 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2178 emit_move_insn (crtl->eh.filter,
2179 gen_rtx_REG (targetm.eh_return_filter_mode (),
2180 EH_RETURN_DATA_REGNO (1)));
2181
2182 seq = get_insns ();
2183 end_sequence ();
2184
2185 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2186 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2187 e->count = bb->count;
2188 e->probability = REG_BR_PROB_BASE;
2189 }
2190 }
2191
2192 \f
2193 struct sjlj_lp_info
2194 {
2195 int directly_reachable;
2196 int action_index;
2197 int dispatch_index;
2198 int call_site_index;
2199 };
2200
2201 static bool
2202 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2203 {
2204 rtx insn;
2205 bool found_one = false;
2206
2207 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2208 {
2209 struct eh_region *region;
2210 enum reachable_code rc;
2211 tree type_thrown;
2212 rtx note;
2213
2214 if (! INSN_P (insn))
2215 continue;
2216
2217 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2218 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2219 continue;
2220
2221 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2222 if (!region)
2223 continue;
2224
2225 type_thrown = NULL_TREE;
2226 if (region->type == ERT_THROW)
2227 {
2228 type_thrown = region->u.eh_throw.type;
2229 region = region->outer;
2230 }
2231
2232 /* Find the first containing region that might handle the exception.
2233 That's the landing pad to which we will transfer control. */
2234 rc = RNL_NOT_CAUGHT;
2235 for (; region; region = region->outer)
2236 {
2237 rc = reachable_next_level (region, type_thrown, NULL, false);
2238 if (rc != RNL_NOT_CAUGHT)
2239 break;
2240 }
2241 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2242 {
2243 lp_info[region->region_number].directly_reachable = 1;
2244 found_one = true;
2245 }
2246 }
2247
2248 return found_one;
2249 }
2250
2251 static void
2252 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2253 {
2254 htab_t ar_hash;
2255 int i, index;
2256
2257 /* First task: build the action table. */
2258
2259 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2260 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2261
2262 for (i = cfun->eh->last_region_number; i > 0; --i)
2263 if (lp_info[i].directly_reachable)
2264 {
2265 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2266
2267 r->landing_pad = dispatch_label;
2268 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2269 if (lp_info[i].action_index != -1)
2270 crtl->uses_eh_lsda = 1;
2271 }
2272
2273 htab_delete (ar_hash);
2274
2275 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2276 landing pad label for the region. For sjlj though, there is one
2277 common landing pad from which we dispatch to the post-landing pads.
2278
2279 A region receives a dispatch index if it is directly reachable
2280 and requires in-function processing. Regions that share post-landing
2281 pads may share dispatch indices. */
2282 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2283 (see build_post_landing_pads) so we don't bother checking for it. */
2284
2285 index = 0;
2286 for (i = cfun->eh->last_region_number; i > 0; --i)
2287 if (lp_info[i].directly_reachable)
2288 lp_info[i].dispatch_index = index++;
2289
2290 /* Finally: assign call-site values. If dwarf2 terms, this would be
2291 the region number assigned by convert_to_eh_region_ranges, but
2292 handles no-action and must-not-throw differently. */
2293
2294 call_site_base = 1;
2295 for (i = cfun->eh->last_region_number; i > 0; --i)
2296 if (lp_info[i].directly_reachable)
2297 {
2298 int action = lp_info[i].action_index;
2299
2300 /* Map must-not-throw to otherwise unused call-site index 0. */
2301 if (action == -2)
2302 index = 0;
2303 /* Map no-action to otherwise unused call-site index -1. */
2304 else if (action == -1)
2305 index = -1;
2306 /* Otherwise, look it up in the table. */
2307 else
2308 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2309
2310 lp_info[i].call_site_index = index;
2311 }
2312 }
2313
2314 static void
2315 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2316 {
2317 int last_call_site = -2;
2318 rtx insn, mem;
2319
2320 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2321 {
2322 struct eh_region *region;
2323 int this_call_site;
2324 rtx note, before, p;
2325
2326 /* Reset value tracking at extended basic block boundaries. */
2327 if (LABEL_P (insn))
2328 last_call_site = -2;
2329
2330 if (! INSN_P (insn))
2331 continue;
2332
2333 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2334
2335 /* Calls that are known to not throw need not be marked. */
2336 if (note && INTVAL (XEXP (note, 0)) <= 0)
2337 continue;
2338
2339 if (note)
2340 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2341 else
2342 region = NULL;
2343
2344 if (!region)
2345 {
2346 /* Calls (and trapping insns) without notes are outside any
2347 exception handling region in this function. Mark them as
2348 no action. */
2349 if (CALL_P (insn)
2350 || (flag_non_call_exceptions
2351 && may_trap_p (PATTERN (insn))))
2352 this_call_site = -1;
2353 else
2354 continue;
2355 }
2356 else
2357 this_call_site = lp_info[region->region_number].call_site_index;
2358
2359 if (this_call_site == last_call_site)
2360 continue;
2361
2362 /* Don't separate a call from it's argument loads. */
2363 before = insn;
2364 if (CALL_P (insn))
2365 before = find_first_parameter_load (insn, NULL_RTX);
2366
2367 start_sequence ();
2368 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2369 sjlj_fc_call_site_ofs);
2370 emit_move_insn (mem, GEN_INT (this_call_site));
2371 p = get_insns ();
2372 end_sequence ();
2373
2374 emit_insn_before (p, before);
2375 last_call_site = this_call_site;
2376 }
2377 }
2378
2379 /* Construct the SjLj_Function_Context. */
2380
2381 static void
2382 sjlj_emit_function_enter (rtx dispatch_label)
2383 {
2384 rtx fn_begin, fc, mem, seq;
2385 bool fn_begin_outside_block;
2386
2387 fc = crtl->eh.sjlj_fc;
2388
2389 start_sequence ();
2390
2391 /* We're storing this libcall's address into memory instead of
2392 calling it directly. Thus, we must call assemble_external_libcall
2393 here, as we can not depend on emit_library_call to do it for us. */
2394 assemble_external_libcall (eh_personality_libfunc);
2395 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2396 emit_move_insn (mem, eh_personality_libfunc);
2397
2398 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2399 if (crtl->uses_eh_lsda)
2400 {
2401 char buf[20];
2402 rtx sym;
2403
2404 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2405 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2406 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2407 emit_move_insn (mem, sym);
2408 }
2409 else
2410 emit_move_insn (mem, const0_rtx);
2411
2412 #ifdef DONT_USE_BUILTIN_SETJMP
2413 {
2414 rtx x;
2415 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2416 TYPE_MODE (integer_type_node), 1,
2417 plus_constant (XEXP (fc, 0),
2418 sjlj_fc_jbuf_ofs), Pmode);
2419
2420 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2421 TYPE_MODE (integer_type_node), 0, dispatch_label);
2422 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2423 }
2424 #else
2425 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2426 dispatch_label);
2427 #endif
2428
2429 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2430 1, XEXP (fc, 0), Pmode);
2431
2432 seq = get_insns ();
2433 end_sequence ();
2434
2435 /* ??? Instead of doing this at the beginning of the function,
2436 do this in a block that is at loop level 0 and dominates all
2437 can_throw_internal instructions. */
2438
2439 fn_begin_outside_block = true;
2440 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2441 if (NOTE_P (fn_begin))
2442 {
2443 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2444 break;
2445 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2446 fn_begin_outside_block = false;
2447 }
2448
2449 if (fn_begin_outside_block)
2450 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2451 else
2452 emit_insn_after (seq, fn_begin);
2453 }
2454
2455 /* Call back from expand_function_end to know where we should put
2456 the call to unwind_sjlj_unregister_libfunc if needed. */
2457
2458 void
2459 sjlj_emit_function_exit_after (rtx after)
2460 {
2461 crtl->eh.sjlj_exit_after = after;
2462 }
2463
2464 static void
2465 sjlj_emit_function_exit (void)
2466 {
2467 rtx seq, insn;
2468
2469 start_sequence ();
2470
2471 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2472 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2473
2474 seq = get_insns ();
2475 end_sequence ();
2476
2477 /* ??? Really this can be done in any block at loop level 0 that
2478 post-dominates all can_throw_internal instructions. This is
2479 the last possible moment. */
2480
2481 insn = crtl->eh.sjlj_exit_after;
2482 if (LABEL_P (insn))
2483 insn = NEXT_INSN (insn);
2484
2485 emit_insn_after (seq, insn);
2486 }
2487
2488 static void
2489 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2490 {
2491 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2492 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2493 int i, first_reachable;
2494 rtx mem, dispatch, seq, fc;
2495 rtx before;
2496 basic_block bb;
2497 edge e;
2498
2499 fc = crtl->eh.sjlj_fc;
2500
2501 start_sequence ();
2502
2503 emit_label (dispatch_label);
2504
2505 #ifndef DONT_USE_BUILTIN_SETJMP
2506 expand_builtin_setjmp_receiver (dispatch_label);
2507 #endif
2508
2509 /* Load up dispatch index, exc_ptr and filter values from the
2510 function context. */
2511 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2512 sjlj_fc_call_site_ofs);
2513 dispatch = copy_to_reg (mem);
2514
2515 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2516 if (unwind_word_mode != ptr_mode)
2517 {
2518 #ifdef POINTERS_EXTEND_UNSIGNED
2519 mem = convert_memory_address (ptr_mode, mem);
2520 #else
2521 mem = convert_to_mode (ptr_mode, mem, 0);
2522 #endif
2523 }
2524 emit_move_insn (crtl->eh.exc_ptr, mem);
2525
2526 mem = adjust_address (fc, unwind_word_mode,
2527 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2528 if (unwind_word_mode != filter_mode)
2529 mem = convert_to_mode (filter_mode, mem, 0);
2530 emit_move_insn (crtl->eh.filter, mem);
2531
2532 /* Jump to one of the directly reachable regions. */
2533 /* ??? This really ought to be using a switch statement. */
2534
2535 first_reachable = 0;
2536 for (i = cfun->eh->last_region_number; i > 0; --i)
2537 {
2538 if (! lp_info[i].directly_reachable)
2539 continue;
2540
2541 if (! first_reachable)
2542 {
2543 first_reachable = i;
2544 continue;
2545 }
2546
2547 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2548 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2549 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2550 ->post_landing_pad);
2551 }
2552
2553 seq = get_insns ();
2554 end_sequence ();
2555
2556 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2557 ->post_landing_pad);
2558
2559 bb = emit_to_new_bb_before (seq, before);
2560 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2561 e->count = bb->count;
2562 e->probability = REG_BR_PROB_BASE;
2563 }
2564
2565 static void
2566 sjlj_build_landing_pads (void)
2567 {
2568 struct sjlj_lp_info *lp_info;
2569
2570 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2571
2572 if (sjlj_find_directly_reachable_regions (lp_info))
2573 {
2574 rtx dispatch_label = gen_label_rtx ();
2575 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2576 TYPE_MODE (sjlj_fc_type_node),
2577 TYPE_ALIGN (sjlj_fc_type_node));
2578 crtl->eh.sjlj_fc
2579 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2580 int_size_in_bytes (sjlj_fc_type_node),
2581 align);
2582
2583 sjlj_assign_call_site_values (dispatch_label, lp_info);
2584 sjlj_mark_call_sites (lp_info);
2585
2586 sjlj_emit_function_enter (dispatch_label);
2587 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2588 sjlj_emit_function_exit ();
2589 }
2590
2591 free (lp_info);
2592 }
2593
2594 /* After initial rtl generation, call back to finish generating
2595 exception support code. */
2596
2597 static void
2598 finish_eh_generation (void)
2599 {
2600 basic_block bb;
2601
2602 /* Nothing to do if no regions created. */
2603 if (cfun->eh->region_tree == NULL)
2604 return;
2605
2606 /* The object here is to provide detailed information (via
2607 reachable_handlers) on how exception control flows within the
2608 function for the CFG construction. In this first pass, we can
2609 include type information garnered from ERT_THROW and
2610 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2611 in deleting unreachable handlers. Subsequently, we will generate
2612 landing pads which will connect many of the handlers, and then
2613 type information will not be effective. Still, this is a win
2614 over previous implementations. */
2615
2616 /* These registers are used by the landing pads. Make sure they
2617 have been generated. */
2618 get_exception_pointer ();
2619 get_exception_filter ();
2620
2621 /* Construct the landing pads. */
2622
2623 assign_filter_values ();
2624 build_post_landing_pads ();
2625 connect_post_landing_pads ();
2626 if (USING_SJLJ_EXCEPTIONS)
2627 sjlj_build_landing_pads ();
2628 else
2629 dw2_build_landing_pads ();
2630
2631 crtl->eh.built_landing_pads = 1;
2632
2633 /* We've totally changed the CFG. Start over. */
2634 find_exception_handler_labels ();
2635 break_superblocks ();
2636 if (USING_SJLJ_EXCEPTIONS
2637 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2638 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2639 commit_edge_insertions ();
2640 FOR_EACH_BB (bb)
2641 {
2642 edge e;
2643 edge_iterator ei;
2644 bool eh = false;
2645 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2646 {
2647 if (e->flags & EDGE_EH)
2648 {
2649 remove_edge (e);
2650 eh = true;
2651 }
2652 else
2653 ei_next (&ei);
2654 }
2655 if (eh)
2656 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2657 }
2658 }
2659 \f
2660 /* This section handles removing dead code for flow. */
2661
2662 /* Splice REGION from the region tree and replace it by REPLACE etc.
2663 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2664 region.*/
2665
2666 static void
2667 remove_eh_handler_and_replace (struct eh_region *region,
2668 struct eh_region *replace,
2669 bool update_catch_try)
2670 {
2671 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2672 rtx lab;
2673
2674 outer = region->outer;
2675
2676 /* When we are moving the region in EH tree, update prev_try pointers. */
2677 if (outer != replace && region->inner)
2678 {
2679 struct eh_region *prev_try = find_prev_try (replace);
2680 p = region->inner;
2681 while (p != region)
2682 {
2683 if (p->type == ERT_CLEANUP)
2684 p->u.cleanup.prev_try = prev_try;
2685 if (p->type != ERT_TRY
2686 && p->type != ERT_MUST_NOT_THROW
2687 && (p->type != ERT_ALLOWED_EXCEPTIONS
2688 || p->u.allowed.type_list)
2689 && p->inner)
2690 p = p->inner;
2691 else if (p->next_peer)
2692 p = p->next_peer;
2693 else
2694 {
2695 while (p != region && !p->next_peer)
2696 p = p->outer;
2697 if (p != region)
2698 p = p->next_peer;
2699 }
2700 }
2701 }
2702 /* For the benefit of efficiently handling REG_EH_REGION notes,
2703 replace this region in the region array with its containing
2704 region. Note that previous region deletions may result in
2705 multiple copies of this region in the array, so we have a
2706 list of alternate numbers by which we are known. */
2707
2708 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2709 replace);
2710 if (region->aka)
2711 {
2712 unsigned i;
2713 bitmap_iterator bi;
2714
2715 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2716 {
2717 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2718 }
2719 }
2720
2721 if (replace)
2722 {
2723 if (!replace->aka)
2724 replace->aka = BITMAP_GGC_ALLOC ();
2725 if (region->aka)
2726 bitmap_ior_into (replace->aka, region->aka);
2727 bitmap_set_bit (replace->aka, region->region_number);
2728 }
2729
2730 if (crtl->eh.built_landing_pads)
2731 lab = region->landing_pad;
2732 else
2733 lab = region->label;
2734 if (outer)
2735 pp_start = &outer->inner;
2736 else
2737 pp_start = &cfun->eh->region_tree;
2738 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2739 continue;
2740 *pp = region->next_peer;
2741
2742 if (replace)
2743 pp_start = &replace->inner;
2744 else
2745 pp_start = &cfun->eh->region_tree;
2746 inner = region->inner;
2747 if (inner)
2748 {
2749 for (p = inner; p->next_peer ; p = p->next_peer)
2750 p->outer = replace;
2751 p->outer = replace;
2752
2753 p->next_peer = *pp_start;
2754 *pp_start = inner;
2755 }
2756
2757 if (region->type == ERT_CATCH
2758 && update_catch_try)
2759 {
2760 struct eh_region *eh_try, *next, *prev;
2761
2762 for (eh_try = region->next_peer;
2763 eh_try->type == ERT_CATCH;
2764 eh_try = eh_try->next_peer)
2765 continue;
2766 gcc_assert (eh_try->type == ERT_TRY);
2767
2768 next = region->u.eh_catch.next_catch;
2769 prev = region->u.eh_catch.prev_catch;
2770
2771 if (next)
2772 next->u.eh_catch.prev_catch = prev;
2773 else
2774 eh_try->u.eh_try.last_catch = prev;
2775 if (prev)
2776 prev->u.eh_catch.next_catch = next;
2777 else
2778 {
2779 eh_try->u.eh_try.eh_catch = next;
2780 if (! next)
2781 remove_eh_handler (eh_try);
2782 }
2783 }
2784 }
2785
2786 /* Splice REGION from the region tree and replace it by the outer region
2787 etc. */
2788
2789 static void
2790 remove_eh_handler (struct eh_region *region)
2791 {
2792 remove_eh_handler_and_replace (region, region->outer, true);
2793 }
2794
2795 /* Remove Eh region R that has turned out to have no code in its handler. */
2796
2797 void
2798 remove_eh_region (int r)
2799 {
2800 struct eh_region *region;
2801
2802 region = VEC_index (eh_region, cfun->eh->region_array, r);
2803 remove_eh_handler (region);
2804 }
2805
2806 /* Remove Eh region R that has turned out to have no code in its handler
2807 and replace in by R2. */
2808
2809 void
2810 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2811 {
2812 struct eh_region *region, *region2;
2813
2814 region = VEC_index (eh_region, cfun->eh->region_array, r);
2815 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2816 remove_eh_handler_and_replace (region, region2->outer, true);
2817 }
2818
2819 /* Invokes CALLBACK for every exception handler label. Only used by old
2820 loop hackery; should not be used by new code. */
2821
2822 void
2823 for_each_eh_label (void (*callback) (rtx))
2824 {
2825 int i;
2826 for (i = 0; i < cfun->eh->last_region_number; i++)
2827 {
2828 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2829 if (r && r->region_number == i && r->label
2830 && GET_CODE (r->label) == CODE_LABEL)
2831 (*callback) (r->label);
2832 }
2833 }
2834
2835 /* Invoke CALLBACK for every exception region in the current function. */
2836
2837 void
2838 for_each_eh_region (void (*callback) (struct eh_region *))
2839 {
2840 int i, n = cfun->eh->last_region_number;
2841 for (i = 1; i <= n; ++i)
2842 {
2843 struct eh_region *region;
2844
2845 region = VEC_index (eh_region, cfun->eh->region_array, i);
2846 if (region)
2847 (*callback) (region);
2848 }
2849 }
2850 \f
2851 /* This section describes CFG exception edges for flow. */
2852
2853 /* For communicating between calls to reachable_next_level. */
2854 struct reachable_info
2855 {
2856 tree types_caught;
2857 tree types_allowed;
2858 void (*callback) (struct eh_region *, void *);
2859 void *callback_data;
2860 };
2861
2862 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2863 base class of TYPE, is in HANDLED. */
2864
2865 static int
2866 check_handled (tree handled, tree type)
2867 {
2868 tree t;
2869
2870 /* We can check for exact matches without front-end help. */
2871 if (! lang_eh_type_covers)
2872 {
2873 for (t = handled; t ; t = TREE_CHAIN (t))
2874 if (TREE_VALUE (t) == type)
2875 return 1;
2876 }
2877 else
2878 {
2879 for (t = handled; t ; t = TREE_CHAIN (t))
2880 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2881 return 1;
2882 }
2883
2884 return 0;
2885 }
2886
2887 /* A subroutine of reachable_next_level. If we are collecting a list
2888 of handlers, add one. After landing pad generation, reference
2889 it instead of the handlers themselves. Further, the handlers are
2890 all wired together, so by referencing one, we've got them all.
2891 Before landing pad generation we reference each handler individually.
2892
2893 LP_REGION contains the landing pad; REGION is the handler. */
2894
2895 static void
2896 add_reachable_handler (struct reachable_info *info,
2897 struct eh_region *lp_region, struct eh_region *region)
2898 {
2899 if (! info)
2900 return;
2901
2902 if (crtl->eh.built_landing_pads)
2903 info->callback (lp_region, info->callback_data);
2904 else
2905 info->callback (region, info->callback_data);
2906 }
2907
2908 /* Process one level of exception regions for reachability.
2909 If TYPE_THROWN is non-null, then it is the *exact* type being
2910 propagated. If INFO is non-null, then collect handler labels
2911 and caught/allowed type information between invocations. */
2912
2913 static enum reachable_code
2914 reachable_next_level (struct eh_region *region, tree type_thrown,
2915 struct reachable_info *info,
2916 bool maybe_resx)
2917 {
2918 switch (region->type)
2919 {
2920 case ERT_CLEANUP:
2921 /* Before landing-pad generation, we model control flow
2922 directly to the individual handlers. In this way we can
2923 see that catch handler types may shadow one another. */
2924 add_reachable_handler (info, region, region);
2925 return RNL_MAYBE_CAUGHT;
2926
2927 case ERT_TRY:
2928 {
2929 struct eh_region *c;
2930 enum reachable_code ret = RNL_NOT_CAUGHT;
2931
2932 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2933 {
2934 /* A catch-all handler ends the search. */
2935 if (c->u.eh_catch.type_list == NULL)
2936 {
2937 add_reachable_handler (info, region, c);
2938 return RNL_CAUGHT;
2939 }
2940
2941 if (type_thrown)
2942 {
2943 /* If we have at least one type match, end the search. */
2944 tree tp_node = c->u.eh_catch.type_list;
2945
2946 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2947 {
2948 tree type = TREE_VALUE (tp_node);
2949
2950 if (type == type_thrown
2951 || (lang_eh_type_covers
2952 && (*lang_eh_type_covers) (type, type_thrown)))
2953 {
2954 add_reachable_handler (info, region, c);
2955 return RNL_CAUGHT;
2956 }
2957 }
2958
2959 /* If we have definitive information of a match failure,
2960 the catch won't trigger. */
2961 if (lang_eh_type_covers)
2962 return RNL_NOT_CAUGHT;
2963 }
2964
2965 /* At this point, we either don't know what type is thrown or
2966 don't have front-end assistance to help deciding if it is
2967 covered by one of the types in the list for this region.
2968
2969 We'd then like to add this region to the list of reachable
2970 handlers since it is indeed potentially reachable based on the
2971 information we have.
2972
2973 Actually, this handler is for sure not reachable if all the
2974 types it matches have already been caught. That is, it is only
2975 potentially reachable if at least one of the types it catches
2976 has not been previously caught. */
2977
2978 if (! info)
2979 ret = RNL_MAYBE_CAUGHT;
2980 else
2981 {
2982 tree tp_node = c->u.eh_catch.type_list;
2983 bool maybe_reachable = false;
2984
2985 /* Compute the potential reachability of this handler and
2986 update the list of types caught at the same time. */
2987 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2988 {
2989 tree type = TREE_VALUE (tp_node);
2990
2991 if (! check_handled (info->types_caught, type))
2992 {
2993 info->types_caught
2994 = tree_cons (NULL, type, info->types_caught);
2995
2996 maybe_reachable = true;
2997 }
2998 }
2999
3000 if (maybe_reachable)
3001 {
3002 add_reachable_handler (info, region, c);
3003
3004 /* ??? If the catch type is a base class of every allowed
3005 type, then we know we can stop the search. */
3006 ret = RNL_MAYBE_CAUGHT;
3007 }
3008 }
3009 }
3010
3011 return ret;
3012 }
3013
3014 case ERT_ALLOWED_EXCEPTIONS:
3015 /* An empty list of types definitely ends the search. */
3016 if (region->u.allowed.type_list == NULL_TREE)
3017 {
3018 add_reachable_handler (info, region, region);
3019 return RNL_CAUGHT;
3020 }
3021
3022 /* Collect a list of lists of allowed types for use in detecting
3023 when a catch may be transformed into a catch-all. */
3024 if (info)
3025 info->types_allowed = tree_cons (NULL_TREE,
3026 region->u.allowed.type_list,
3027 info->types_allowed);
3028
3029 /* If we have definitive information about the type hierarchy,
3030 then we can tell if the thrown type will pass through the
3031 filter. */
3032 if (type_thrown && lang_eh_type_covers)
3033 {
3034 if (check_handled (region->u.allowed.type_list, type_thrown))
3035 return RNL_NOT_CAUGHT;
3036 else
3037 {
3038 add_reachable_handler (info, region, region);
3039 return RNL_CAUGHT;
3040 }
3041 }
3042
3043 add_reachable_handler (info, region, region);
3044 return RNL_MAYBE_CAUGHT;
3045
3046 case ERT_CATCH:
3047 /* Catch regions are handled by their controlling try region. */
3048 return RNL_NOT_CAUGHT;
3049
3050 case ERT_MUST_NOT_THROW:
3051 /* Here we end our search, since no exceptions may propagate.
3052
3053 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3054 only via locally handled RESX instructions.
3055
3056 When we inline a function call, we can bring in new handlers. In order
3057 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3058 assume that such handlers exists prior for any inlinable call prior
3059 inlining decisions are fixed. */
3060
3061 if (maybe_resx)
3062 {
3063 add_reachable_handler (info, region, region);
3064 return RNL_CAUGHT;
3065 }
3066 else
3067 return RNL_BLOCKED;
3068
3069 case ERT_THROW:
3070 case ERT_UNKNOWN:
3071 /* Shouldn't see these here. */
3072 gcc_unreachable ();
3073 break;
3074 default:
3075 gcc_unreachable ();
3076 }
3077 }
3078
3079 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3080
3081 void
3082 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3083 void (*callback) (struct eh_region *, void *),
3084 void *callback_data)
3085 {
3086 struct reachable_info info;
3087 struct eh_region *region;
3088 tree type_thrown;
3089
3090 memset (&info, 0, sizeof (info));
3091 info.callback = callback;
3092 info.callback_data = callback_data;
3093
3094 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3095 if (!region)
3096 return;
3097
3098 type_thrown = NULL_TREE;
3099 if (is_resx)
3100 {
3101 /* A RESX leaves a region instead of entering it. Thus the
3102 region itself may have been deleted out from under us. */
3103 if (region == NULL)
3104 return;
3105 region = region->outer;
3106 }
3107 else if (region->type == ERT_THROW)
3108 {
3109 type_thrown = region->u.eh_throw.type;
3110 region = region->outer;
3111 }
3112
3113 while (region)
3114 {
3115 if (reachable_next_level (region, type_thrown, &info,
3116 inlinable_call || is_resx) >= RNL_CAUGHT)
3117 break;
3118 /* If we have processed one cleanup, there is no point in
3119 processing any more of them. Each cleanup will have an edge
3120 to the next outer cleanup region, so the flow graph will be
3121 accurate. */
3122 if (region->type == ERT_CLEANUP)
3123 region = region->u.cleanup.prev_try;
3124 else
3125 region = region->outer;
3126 }
3127 }
3128
3129 /* Retrieve a list of labels of exception handlers which can be
3130 reached by a given insn. */
3131
3132 static void
3133 arh_to_landing_pad (struct eh_region *region, void *data)
3134 {
3135 rtx *p_handlers = (rtx *) data;
3136 if (! *p_handlers)
3137 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3138 }
3139
3140 static void
3141 arh_to_label (struct eh_region *region, void *data)
3142 {
3143 rtx *p_handlers = (rtx *) data;
3144 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3145 }
3146
3147 rtx
3148 reachable_handlers (rtx insn)
3149 {
3150 bool is_resx = false;
3151 rtx handlers = NULL;
3152 int region_number;
3153
3154 if (JUMP_P (insn)
3155 && GET_CODE (PATTERN (insn)) == RESX)
3156 {
3157 region_number = XINT (PATTERN (insn), 0);
3158 is_resx = true;
3159 }
3160 else
3161 {
3162 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3163 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3164 return NULL;
3165 region_number = INTVAL (XEXP (note, 0));
3166 }
3167
3168 foreach_reachable_handler (region_number, is_resx, false,
3169 (crtl->eh.built_landing_pads
3170 ? arh_to_landing_pad
3171 : arh_to_label),
3172 &handlers);
3173
3174 return handlers;
3175 }
3176
3177 /* Determine if the given INSN can throw an exception that is caught
3178 within the function. */
3179
3180 bool
3181 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3182 {
3183 struct eh_region *region;
3184 tree type_thrown;
3185
3186 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3187 if (!region)
3188 return false;
3189
3190 type_thrown = NULL_TREE;
3191 if (is_resx)
3192 region = region->outer;
3193 else if (region->type == ERT_THROW)
3194 {
3195 type_thrown = region->u.eh_throw.type;
3196 region = region->outer;
3197 }
3198
3199 /* If this exception is ignored by each and every containing region,
3200 then control passes straight out. The runtime may handle some
3201 regions, which also do not require processing internally. */
3202 for (; region; region = region->outer)
3203 {
3204 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3205 inlinable_call || is_resx);
3206 if (how == RNL_BLOCKED)
3207 return false;
3208 if (how != RNL_NOT_CAUGHT)
3209 return true;
3210 }
3211
3212 return false;
3213 }
3214
3215 bool
3216 can_throw_internal (const_rtx insn)
3217 {
3218 rtx note;
3219
3220 if (! INSN_P (insn))
3221 return false;
3222
3223 if (JUMP_P (insn)
3224 && GET_CODE (PATTERN (insn)) == RESX
3225 && XINT (PATTERN (insn), 0) > 0)
3226 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3227
3228 if (NONJUMP_INSN_P (insn)
3229 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3230 insn = XVECEXP (PATTERN (insn), 0, 0);
3231
3232 /* Every insn that might throw has an EH_REGION note. */
3233 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3234 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3235 return false;
3236
3237 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3238 }
3239
3240 /* Determine if the given INSN can throw an exception that is
3241 visible outside the function. */
3242
3243 bool
3244 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3245 {
3246 struct eh_region *region;
3247 tree type_thrown;
3248
3249 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3250 if (!region)
3251 return true;
3252
3253 type_thrown = NULL_TREE;
3254 if (is_resx)
3255 region = region->outer;
3256 else if (region->type == ERT_THROW)
3257 {
3258 type_thrown = region->u.eh_throw.type;
3259 region = region->outer;
3260 }
3261
3262 /* If the exception is caught or blocked by any containing region,
3263 then it is not seen by any calling function. */
3264 for (; region ; region = region->outer)
3265 if (reachable_next_level (region, type_thrown, NULL,
3266 inlinable_call || is_resx) >= RNL_CAUGHT)
3267 return false;
3268
3269 return true;
3270 }
3271
3272 bool
3273 can_throw_external (const_rtx insn)
3274 {
3275 rtx note;
3276
3277 if (! INSN_P (insn))
3278 return false;
3279
3280 if (JUMP_P (insn)
3281 && GET_CODE (PATTERN (insn)) == RESX
3282 && XINT (PATTERN (insn), 0) > 0)
3283 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3284
3285 if (NONJUMP_INSN_P (insn)
3286 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3287 {
3288 rtx seq = PATTERN (insn);
3289 int i, n = XVECLEN (seq, 0);
3290
3291 for (i = 0; i < n; i++)
3292 if (can_throw_external (XVECEXP (seq, 0, i)))
3293 return true;
3294
3295 return false;
3296 }
3297
3298 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3299 if (!note)
3300 {
3301 /* Calls (and trapping insns) without notes are outside any
3302 exception handling region in this function. We have to
3303 assume it might throw. Given that the front end and middle
3304 ends mark known NOTHROW functions, this isn't so wildly
3305 inaccurate. */
3306 return (CALL_P (insn)
3307 || (flag_non_call_exceptions
3308 && may_trap_p (PATTERN (insn))));
3309 }
3310 if (INTVAL (XEXP (note, 0)) <= 0)
3311 return false;
3312
3313 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3314 }
3315
3316 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3317
3318 unsigned int
3319 set_nothrow_function_flags (void)
3320 {
3321 rtx insn;
3322
3323 crtl->nothrow = 1;
3324
3325 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3326 something that can throw an exception. We specifically exempt
3327 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3328 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3329 is optimistic. */
3330
3331 crtl->all_throwers_are_sibcalls = 1;
3332
3333 /* If we don't know that this implementation of the function will
3334 actually be used, then we must not set TREE_NOTHROW, since
3335 callers must not assume that this function does not throw. */
3336 if (TREE_NOTHROW (current_function_decl))
3337 return 0;
3338
3339 if (! flag_exceptions)
3340 return 0;
3341
3342 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3343 if (can_throw_external (insn))
3344 {
3345 crtl->nothrow = 0;
3346
3347 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3348 {
3349 crtl->all_throwers_are_sibcalls = 0;
3350 return 0;
3351 }
3352 }
3353
3354 for (insn = crtl->epilogue_delay_list; insn;
3355 insn = XEXP (insn, 1))
3356 if (can_throw_external (insn))
3357 {
3358 crtl->nothrow = 0;
3359
3360 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3361 {
3362 crtl->all_throwers_are_sibcalls = 0;
3363 return 0;
3364 }
3365 }
3366 if (crtl->nothrow
3367 && (cgraph_function_body_availability (cgraph_node
3368 (current_function_decl))
3369 >= AVAIL_AVAILABLE))
3370 {
3371 struct cgraph_node *node = cgraph_node (current_function_decl);
3372 struct cgraph_edge *e;
3373 for (e = node->callers; e; e = e->next_caller)
3374 e->can_throw_external = false;
3375 TREE_NOTHROW (current_function_decl) = 1;
3376
3377 if (dump_file)
3378 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3379 current_function_name ());
3380 }
3381 return 0;
3382 }
3383
3384 struct rtl_opt_pass pass_set_nothrow_function_flags =
3385 {
3386 {
3387 RTL_PASS,
3388 "nothrow", /* name */
3389 NULL, /* gate */
3390 set_nothrow_function_flags, /* execute */
3391 NULL, /* sub */
3392 NULL, /* next */
3393 0, /* static_pass_number */
3394 TV_NONE, /* tv_id */
3395 0, /* properties_required */
3396 0, /* properties_provided */
3397 0, /* properties_destroyed */
3398 0, /* todo_flags_start */
3399 TODO_dump_func, /* todo_flags_finish */
3400 }
3401 };
3402
3403 \f
3404 /* Various hooks for unwind library. */
3405
3406 /* Do any necessary initialization to access arbitrary stack frames.
3407 On the SPARC, this means flushing the register windows. */
3408
3409 void
3410 expand_builtin_unwind_init (void)
3411 {
3412 /* Set this so all the registers get saved in our frame; we need to be
3413 able to copy the saved values for any registers from frames we unwind. */
3414 crtl->saves_all_registers = 1;
3415
3416 #ifdef SETUP_FRAME_ADDRESSES
3417 SETUP_FRAME_ADDRESSES ();
3418 #endif
3419 }
3420
3421 rtx
3422 expand_builtin_eh_return_data_regno (tree exp)
3423 {
3424 tree which = CALL_EXPR_ARG (exp, 0);
3425 unsigned HOST_WIDE_INT iwhich;
3426
3427 if (TREE_CODE (which) != INTEGER_CST)
3428 {
3429 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3430 return constm1_rtx;
3431 }
3432
3433 iwhich = tree_low_cst (which, 1);
3434 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3435 if (iwhich == INVALID_REGNUM)
3436 return constm1_rtx;
3437
3438 #ifdef DWARF_FRAME_REGNUM
3439 iwhich = DWARF_FRAME_REGNUM (iwhich);
3440 #else
3441 iwhich = DBX_REGISTER_NUMBER (iwhich);
3442 #endif
3443
3444 return GEN_INT (iwhich);
3445 }
3446
3447 /* Given a value extracted from the return address register or stack slot,
3448 return the actual address encoded in that value. */
3449
3450 rtx
3451 expand_builtin_extract_return_addr (tree addr_tree)
3452 {
3453 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3454
3455 if (GET_MODE (addr) != Pmode
3456 && GET_MODE (addr) != VOIDmode)
3457 {
3458 #ifdef POINTERS_EXTEND_UNSIGNED
3459 addr = convert_memory_address (Pmode, addr);
3460 #else
3461 addr = convert_to_mode (Pmode, addr, 0);
3462 #endif
3463 }
3464
3465 /* First mask out any unwanted bits. */
3466 #ifdef MASK_RETURN_ADDR
3467 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3468 #endif
3469
3470 /* Then adjust to find the real return address. */
3471 #if defined (RETURN_ADDR_OFFSET)
3472 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3473 #endif
3474
3475 return addr;
3476 }
3477
3478 /* Given an actual address in addr_tree, do any necessary encoding
3479 and return the value to be stored in the return address register or
3480 stack slot so the epilogue will return to that address. */
3481
3482 rtx
3483 expand_builtin_frob_return_addr (tree addr_tree)
3484 {
3485 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3486
3487 addr = convert_memory_address (Pmode, addr);
3488
3489 #ifdef RETURN_ADDR_OFFSET
3490 addr = force_reg (Pmode, addr);
3491 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3492 #endif
3493
3494 return addr;
3495 }
3496
3497 /* Set up the epilogue with the magic bits we'll need to return to the
3498 exception handler. */
3499
3500 void
3501 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3502 tree handler_tree)
3503 {
3504 rtx tmp;
3505
3506 #ifdef EH_RETURN_STACKADJ_RTX
3507 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3508 VOIDmode, EXPAND_NORMAL);
3509 tmp = convert_memory_address (Pmode, tmp);
3510 if (!crtl->eh.ehr_stackadj)
3511 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3512 else if (tmp != crtl->eh.ehr_stackadj)
3513 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3514 #endif
3515
3516 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3517 VOIDmode, EXPAND_NORMAL);
3518 tmp = convert_memory_address (Pmode, tmp);
3519 if (!crtl->eh.ehr_handler)
3520 crtl->eh.ehr_handler = copy_to_reg (tmp);
3521 else if (tmp != crtl->eh.ehr_handler)
3522 emit_move_insn (crtl->eh.ehr_handler, tmp);
3523
3524 if (!crtl->eh.ehr_label)
3525 crtl->eh.ehr_label = gen_label_rtx ();
3526 emit_jump (crtl->eh.ehr_label);
3527 }
3528
3529 void
3530 expand_eh_return (void)
3531 {
3532 rtx around_label;
3533
3534 if (! crtl->eh.ehr_label)
3535 return;
3536
3537 crtl->calls_eh_return = 1;
3538
3539 #ifdef EH_RETURN_STACKADJ_RTX
3540 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3541 #endif
3542
3543 around_label = gen_label_rtx ();
3544 emit_jump (around_label);
3545
3546 emit_label (crtl->eh.ehr_label);
3547 clobber_return_register ();
3548
3549 #ifdef EH_RETURN_STACKADJ_RTX
3550 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3551 #endif
3552
3553 #ifdef HAVE_eh_return
3554 if (HAVE_eh_return)
3555 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3556 else
3557 #endif
3558 {
3559 #ifdef EH_RETURN_HANDLER_RTX
3560 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3561 #else
3562 error ("__builtin_eh_return not supported on this target");
3563 #endif
3564 }
3565
3566 emit_label (around_label);
3567 }
3568
3569 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3570 POINTERS_EXTEND_UNSIGNED and return it. */
3571
3572 rtx
3573 expand_builtin_extend_pointer (tree addr_tree)
3574 {
3575 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3576 int extend;
3577
3578 #ifdef POINTERS_EXTEND_UNSIGNED
3579 extend = POINTERS_EXTEND_UNSIGNED;
3580 #else
3581 /* The previous EH code did an unsigned extend by default, so we do this also
3582 for consistency. */
3583 extend = 1;
3584 #endif
3585
3586 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3587 }
3588 \f
3589 /* In the following functions, we represent entries in the action table
3590 as 1-based indices. Special cases are:
3591
3592 0: null action record, non-null landing pad; implies cleanups
3593 -1: null action record, null landing pad; implies no action
3594 -2: no call-site entry; implies must_not_throw
3595 -3: we have yet to process outer regions
3596
3597 Further, no special cases apply to the "next" field of the record.
3598 For next, 0 means end of list. */
3599
3600 struct action_record
3601 {
3602 int offset;
3603 int filter;
3604 int next;
3605 };
3606
3607 static int
3608 action_record_eq (const void *pentry, const void *pdata)
3609 {
3610 const struct action_record *entry = (const struct action_record *) pentry;
3611 const struct action_record *data = (const struct action_record *) pdata;
3612 return entry->filter == data->filter && entry->next == data->next;
3613 }
3614
3615 static hashval_t
3616 action_record_hash (const void *pentry)
3617 {
3618 const struct action_record *entry = (const struct action_record *) pentry;
3619 return entry->next * 1009 + entry->filter;
3620 }
3621
3622 static int
3623 add_action_record (htab_t ar_hash, int filter, int next)
3624 {
3625 struct action_record **slot, *new_ar, tmp;
3626
3627 tmp.filter = filter;
3628 tmp.next = next;
3629 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3630
3631 if ((new_ar = *slot) == NULL)
3632 {
3633 new_ar = XNEW (struct action_record);
3634 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3635 new_ar->filter = filter;
3636 new_ar->next = next;
3637 *slot = new_ar;
3638
3639 /* The filter value goes in untouched. The link to the next
3640 record is a "self-relative" byte offset, or zero to indicate
3641 that there is no next record. So convert the absolute 1 based
3642 indices we've been carrying around into a displacement. */
3643
3644 push_sleb128 (&crtl->eh.action_record_data, filter);
3645 if (next)
3646 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3647 push_sleb128 (&crtl->eh.action_record_data, next);
3648 }
3649
3650 return new_ar->offset;
3651 }
3652
3653 static int
3654 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3655 {
3656 struct eh_region *c;
3657 int next;
3658
3659 /* If we've reached the top of the region chain, then we have
3660 no actions, and require no landing pad. */
3661 if (region == NULL)
3662 return -1;
3663
3664 switch (region->type)
3665 {
3666 case ERT_CLEANUP:
3667 /* A cleanup adds a zero filter to the beginning of the chain, but
3668 there are special cases to look out for. If there are *only*
3669 cleanups along a path, then it compresses to a zero action.
3670 Further, if there are multiple cleanups along a path, we only
3671 need to represent one of them, as that is enough to trigger
3672 entry to the landing pad at runtime. */
3673 next = collect_one_action_chain (ar_hash, region->outer);
3674 if (next <= 0)
3675 return 0;
3676 for (c = region->outer; c ; c = c->outer)
3677 if (c->type == ERT_CLEANUP)
3678 return next;
3679 return add_action_record (ar_hash, 0, next);
3680
3681 case ERT_TRY:
3682 /* Process the associated catch regions in reverse order.
3683 If there's a catch-all handler, then we don't need to
3684 search outer regions. Use a magic -3 value to record
3685 that we haven't done the outer search. */
3686 next = -3;
3687 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3688 {
3689 if (c->u.eh_catch.type_list == NULL)
3690 {
3691 /* Retrieve the filter from the head of the filter list
3692 where we have stored it (see assign_filter_values). */
3693 int filter
3694 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3695
3696 next = add_action_record (ar_hash, filter, 0);
3697 }
3698 else
3699 {
3700 /* Once the outer search is done, trigger an action record for
3701 each filter we have. */
3702 tree flt_node;
3703
3704 if (next == -3)
3705 {
3706 next = collect_one_action_chain (ar_hash, region->outer);
3707
3708 /* If there is no next action, terminate the chain. */
3709 if (next == -1)
3710 next = 0;
3711 /* If all outer actions are cleanups or must_not_throw,
3712 we'll have no action record for it, since we had wanted
3713 to encode these states in the call-site record directly.
3714 Add a cleanup action to the chain to catch these. */
3715 else if (next <= 0)
3716 next = add_action_record (ar_hash, 0, 0);
3717 }
3718
3719 flt_node = c->u.eh_catch.filter_list;
3720 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3721 {
3722 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3723 next = add_action_record (ar_hash, filter, next);
3724 }
3725 }
3726 }
3727 return next;
3728
3729 case ERT_ALLOWED_EXCEPTIONS:
3730 /* An exception specification adds its filter to the
3731 beginning of the chain. */
3732 next = collect_one_action_chain (ar_hash, region->outer);
3733
3734 /* If there is no next action, terminate the chain. */
3735 if (next == -1)
3736 next = 0;
3737 /* If all outer actions are cleanups or must_not_throw,
3738 we'll have no action record for it, since we had wanted
3739 to encode these states in the call-site record directly.
3740 Add a cleanup action to the chain to catch these. */
3741 else if (next <= 0)
3742 next = add_action_record (ar_hash, 0, 0);
3743
3744 return add_action_record (ar_hash, region->u.allowed.filter, next);
3745
3746 case ERT_MUST_NOT_THROW:
3747 /* A must-not-throw region with no inner handlers or cleanups
3748 requires no call-site entry. Note that this differs from
3749 the no handler or cleanup case in that we do require an lsda
3750 to be generated. Return a magic -2 value to record this. */
3751 return -2;
3752
3753 case ERT_CATCH:
3754 case ERT_THROW:
3755 /* CATCH regions are handled in TRY above. THROW regions are
3756 for optimization information only and produce no output. */
3757 return collect_one_action_chain (ar_hash, region->outer);
3758
3759 default:
3760 gcc_unreachable ();
3761 }
3762 }
3763
3764 static int
3765 add_call_site (rtx landing_pad, int action)
3766 {
3767 call_site_record record;
3768
3769 record = GGC_NEW (struct call_site_record);
3770 record->landing_pad = landing_pad;
3771 record->action = action;
3772
3773 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3774
3775 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3776 }
3777
3778 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3779 The new note numbers will not refer to region numbers, but
3780 instead to call site entries. */
3781
3782 unsigned int
3783 convert_to_eh_region_ranges (void)
3784 {
3785 rtx insn, iter, note;
3786 htab_t ar_hash;
3787 int last_action = -3;
3788 rtx last_action_insn = NULL_RTX;
3789 rtx last_landing_pad = NULL_RTX;
3790 rtx first_no_action_insn = NULL_RTX;
3791 int call_site = 0;
3792
3793 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3794 return 0;
3795
3796 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3797
3798 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3799
3800 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3801 if (INSN_P (iter))
3802 {
3803 struct eh_region *region;
3804 int this_action;
3805 rtx this_landing_pad;
3806
3807 insn = iter;
3808 if (NONJUMP_INSN_P (insn)
3809 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3810 insn = XVECEXP (PATTERN (insn), 0, 0);
3811
3812 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3813 if (!note)
3814 {
3815 if (! (CALL_P (insn)
3816 || (flag_non_call_exceptions
3817 && may_trap_p (PATTERN (insn)))))
3818 continue;
3819 this_action = -1;
3820 region = NULL;
3821 }
3822 else
3823 {
3824 if (INTVAL (XEXP (note, 0)) <= 0)
3825 continue;
3826 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3827 this_action = collect_one_action_chain (ar_hash, region);
3828 }
3829
3830 /* Existence of catch handlers, or must-not-throw regions
3831 implies that an lsda is needed (even if empty). */
3832 if (this_action != -1)
3833 crtl->uses_eh_lsda = 1;
3834
3835 /* Delay creation of region notes for no-action regions
3836 until we're sure that an lsda will be required. */
3837 else if (last_action == -3)
3838 {
3839 first_no_action_insn = iter;
3840 last_action = -1;
3841 }
3842
3843 /* Cleanups and handlers may share action chains but not
3844 landing pads. Collect the landing pad for this region. */
3845 if (this_action >= 0)
3846 {
3847 struct eh_region *o;
3848 for (o = region; ! o->landing_pad ; o = o->outer)
3849 continue;
3850 this_landing_pad = o->landing_pad;
3851 }
3852 else
3853 this_landing_pad = NULL_RTX;
3854
3855 /* Differing actions or landing pads implies a change in call-site
3856 info, which implies some EH_REGION note should be emitted. */
3857 if (last_action != this_action
3858 || last_landing_pad != this_landing_pad)
3859 {
3860 /* If we'd not seen a previous action (-3) or the previous
3861 action was must-not-throw (-2), then we do not need an
3862 end note. */
3863 if (last_action >= -1)
3864 {
3865 /* If we delayed the creation of the begin, do it now. */
3866 if (first_no_action_insn)
3867 {
3868 call_site = add_call_site (NULL_RTX, 0);
3869 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3870 first_no_action_insn);
3871 NOTE_EH_HANDLER (note) = call_site;
3872 first_no_action_insn = NULL_RTX;
3873 }
3874
3875 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3876 last_action_insn);
3877 NOTE_EH_HANDLER (note) = call_site;
3878 }
3879
3880 /* If the new action is must-not-throw, then no region notes
3881 are created. */
3882 if (this_action >= -1)
3883 {
3884 call_site = add_call_site (this_landing_pad,
3885 this_action < 0 ? 0 : this_action);
3886 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3887 NOTE_EH_HANDLER (note) = call_site;
3888 }
3889
3890 last_action = this_action;
3891 last_landing_pad = this_landing_pad;
3892 }
3893 last_action_insn = iter;
3894 }
3895
3896 if (last_action >= -1 && ! first_no_action_insn)
3897 {
3898 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3899 NOTE_EH_HANDLER (note) = call_site;
3900 }
3901
3902 htab_delete (ar_hash);
3903 return 0;
3904 }
3905
3906 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3907 {
3908 {
3909 RTL_PASS,
3910 "eh_ranges", /* name */
3911 NULL, /* gate */
3912 convert_to_eh_region_ranges, /* execute */
3913 NULL, /* sub */
3914 NULL, /* next */
3915 0, /* static_pass_number */
3916 TV_NONE, /* tv_id */
3917 0, /* properties_required */
3918 0, /* properties_provided */
3919 0, /* properties_destroyed */
3920 0, /* todo_flags_start */
3921 TODO_dump_func, /* todo_flags_finish */
3922 }
3923 };
3924
3925 \f
3926 static void
3927 push_uleb128 (varray_type *data_area, unsigned int value)
3928 {
3929 do
3930 {
3931 unsigned char byte = value & 0x7f;
3932 value >>= 7;
3933 if (value)
3934 byte |= 0x80;
3935 VARRAY_PUSH_UCHAR (*data_area, byte);
3936 }
3937 while (value);
3938 }
3939
3940 static void
3941 push_sleb128 (varray_type *data_area, int value)
3942 {
3943 unsigned char byte;
3944 int more;
3945
3946 do
3947 {
3948 byte = value & 0x7f;
3949 value >>= 7;
3950 more = ! ((value == 0 && (byte & 0x40) == 0)
3951 || (value == -1 && (byte & 0x40) != 0));
3952 if (more)
3953 byte |= 0x80;
3954 VARRAY_PUSH_UCHAR (*data_area, byte);
3955 }
3956 while (more);
3957 }
3958
3959 \f
3960 #ifndef HAVE_AS_LEB128
3961 static int
3962 dw2_size_of_call_site_table (void)
3963 {
3964 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3965 int size = n * (4 + 4 + 4);
3966 int i;
3967
3968 for (i = 0; i < n; ++i)
3969 {
3970 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3971 size += size_of_uleb128 (cs->action);
3972 }
3973
3974 return size;
3975 }
3976
3977 static int
3978 sjlj_size_of_call_site_table (void)
3979 {
3980 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3981 int size = 0;
3982 int i;
3983
3984 for (i = 0; i < n; ++i)
3985 {
3986 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3987 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3988 size += size_of_uleb128 (cs->action);
3989 }
3990
3991 return size;
3992 }
3993 #endif
3994
3995 static void
3996 dw2_output_call_site_table (void)
3997 {
3998 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3999 int i;
4000
4001 for (i = 0; i < n; ++i)
4002 {
4003 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
4004 char reg_start_lab[32];
4005 char reg_end_lab[32];
4006 char landing_pad_lab[32];
4007
4008 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
4009 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
4010
4011 if (cs->landing_pad)
4012 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
4013 CODE_LABEL_NUMBER (cs->landing_pad));
4014
4015 /* ??? Perhaps use insn length scaling if the assembler supports
4016 generic arithmetic. */
4017 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
4018 data4 if the function is small enough. */
4019 #ifdef HAVE_AS_LEB128
4020 dw2_asm_output_delta_uleb128 (reg_start_lab,
4021 current_function_func_begin_label,
4022 "region %d start", i);
4023 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
4024 "length");
4025 if (cs->landing_pad)
4026 dw2_asm_output_delta_uleb128 (landing_pad_lab,
4027 current_function_func_begin_label,
4028 "landing pad");
4029 else
4030 dw2_asm_output_data_uleb128 (0, "landing pad");
4031 #else
4032 dw2_asm_output_delta (4, reg_start_lab,
4033 current_function_func_begin_label,
4034 "region %d start", i);
4035 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
4036 if (cs->landing_pad)
4037 dw2_asm_output_delta (4, landing_pad_lab,
4038 current_function_func_begin_label,
4039 "landing pad");
4040 else
4041 dw2_asm_output_data (4, 0, "landing pad");
4042 #endif
4043 dw2_asm_output_data_uleb128 (cs->action, "action");
4044 }
4045
4046 call_site_base += n;
4047 }
4048
4049 static void
4050 sjlj_output_call_site_table (void)
4051 {
4052 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4053 int i;
4054
4055 for (i = 0; i < n; ++i)
4056 {
4057 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
4058
4059 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4060 "region %d landing pad", i);
4061 dw2_asm_output_data_uleb128 (cs->action, "action");
4062 }
4063
4064 call_site_base += n;
4065 }
4066
4067 #ifndef TARGET_UNWIND_INFO
4068 /* Switch to the section that should be used for exception tables. */
4069
4070 static void
4071 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4072 {
4073 section *s;
4074
4075 if (exception_section)
4076 s = exception_section;
4077 else
4078 {
4079 /* Compute the section and cache it into exception_section,
4080 unless it depends on the function name. */
4081 if (targetm.have_named_sections)
4082 {
4083 int flags;
4084
4085 if (EH_TABLES_CAN_BE_READ_ONLY)
4086 {
4087 int tt_format =
4088 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4089 flags = ((! flag_pic
4090 || ((tt_format & 0x70) != DW_EH_PE_absptr
4091 && (tt_format & 0x70) != DW_EH_PE_aligned))
4092 ? 0 : SECTION_WRITE);
4093 }
4094 else
4095 flags = SECTION_WRITE;
4096
4097 #ifdef HAVE_LD_EH_GC_SECTIONS
4098 if (flag_function_sections)
4099 {
4100 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4101 sprintf (section_name, ".gcc_except_table.%s", fnname);
4102 s = get_section (section_name, flags, NULL);
4103 free (section_name);
4104 }
4105 else
4106 #endif
4107 exception_section
4108 = s = get_section (".gcc_except_table", flags, NULL);
4109 }
4110 else
4111 exception_section
4112 = s = flag_pic ? data_section : readonly_data_section;
4113 }
4114
4115 switch_to_section (s);
4116 }
4117 #endif
4118
4119
4120 /* Output a reference from an exception table to the type_info object TYPE.
4121 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4122 the value. */
4123
4124 static void
4125 output_ttype (tree type, int tt_format, int tt_format_size)
4126 {
4127 rtx value;
4128 bool is_public = true;
4129
4130 if (type == NULL_TREE)
4131 value = const0_rtx;
4132 else
4133 {
4134 struct varpool_node *node;
4135
4136 type = lookup_type_for_runtime (type);
4137 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4138
4139 /* Let cgraph know that the rtti decl is used. Not all of the
4140 paths below go through assemble_integer, which would take
4141 care of this for us. */
4142 STRIP_NOPS (type);
4143 if (TREE_CODE (type) == ADDR_EXPR)
4144 {
4145 type = TREE_OPERAND (type, 0);
4146 if (TREE_CODE (type) == VAR_DECL)
4147 {
4148 node = varpool_node (type);
4149 if (node)
4150 varpool_mark_needed_node (node);
4151 is_public = TREE_PUBLIC (type);
4152 }
4153 }
4154 else
4155 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4156 }
4157
4158 /* Allow the target to override the type table entry format. */
4159 if (targetm.asm_out.ttype (value))
4160 return;
4161
4162 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4163 assemble_integer (value, tt_format_size,
4164 tt_format_size * BITS_PER_UNIT, 1);
4165 else
4166 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4167 }
4168
4169 void
4170 output_function_exception_table (const char * ARG_UNUSED (fnname))
4171 {
4172 int tt_format, cs_format, lp_format, i, n;
4173 #ifdef HAVE_AS_LEB128
4174 char ttype_label[32];
4175 char cs_after_size_label[32];
4176 char cs_end_label[32];
4177 #else
4178 int call_site_len;
4179 #endif
4180 int have_tt_data;
4181 int tt_format_size = 0;
4182
4183 /* Not all functions need anything. */
4184 if (! crtl->uses_eh_lsda)
4185 return;
4186
4187 if (eh_personality_libfunc)
4188 assemble_external_libcall (eh_personality_libfunc);
4189
4190 #ifdef TARGET_UNWIND_INFO
4191 /* TODO: Move this into target file. */
4192 fputs ("\t.personality\t", asm_out_file);
4193 output_addr_const (asm_out_file, eh_personality_libfunc);
4194 fputs ("\n\t.handlerdata\n", asm_out_file);
4195 /* Note that varasm still thinks we're in the function's code section.
4196 The ".endp" directive that will immediately follow will take us back. */
4197 #else
4198 switch_to_exception_section (fnname);
4199 #endif
4200
4201 /* If the target wants a label to begin the table, emit it here. */
4202 targetm.asm_out.except_table_label (asm_out_file);
4203
4204 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4205 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4206
4207 /* Indicate the format of the @TType entries. */
4208 if (! have_tt_data)
4209 tt_format = DW_EH_PE_omit;
4210 else
4211 {
4212 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4213 #ifdef HAVE_AS_LEB128
4214 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
4215 current_function_funcdef_no);
4216 #endif
4217 tt_format_size = size_of_encoded_value (tt_format);
4218
4219 assemble_align (tt_format_size * BITS_PER_UNIT);
4220 }
4221
4222 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4223 current_function_funcdef_no);
4224
4225 /* The LSDA header. */
4226
4227 /* Indicate the format of the landing pad start pointer. An omitted
4228 field implies @LPStart == @Start. */
4229 /* Currently we always put @LPStart == @Start. This field would
4230 be most useful in moving the landing pads completely out of
4231 line to another section, but it could also be used to minimize
4232 the size of uleb128 landing pad offsets. */
4233 lp_format = DW_EH_PE_omit;
4234 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4235 eh_data_format_name (lp_format));
4236
4237 /* @LPStart pointer would go here. */
4238
4239 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4240 eh_data_format_name (tt_format));
4241
4242 #ifndef HAVE_AS_LEB128
4243 if (USING_SJLJ_EXCEPTIONS)
4244 call_site_len = sjlj_size_of_call_site_table ();
4245 else
4246 call_site_len = dw2_size_of_call_site_table ();
4247 #endif
4248
4249 /* A pc-relative 4-byte displacement to the @TType data. */
4250 if (have_tt_data)
4251 {
4252 #ifdef HAVE_AS_LEB128
4253 char ttype_after_disp_label[32];
4254 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4255 current_function_funcdef_no);
4256 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4257 "@TType base offset");
4258 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4259 #else
4260 /* Ug. Alignment queers things. */
4261 unsigned int before_disp, after_disp, last_disp, disp;
4262
4263 before_disp = 1 + 1;
4264 after_disp = (1 + size_of_uleb128 (call_site_len)
4265 + call_site_len
4266 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4267 + (VEC_length (tree, crtl->eh.ttype_data)
4268 * tt_format_size));
4269
4270 disp = after_disp;
4271 do
4272 {
4273 unsigned int disp_size, pad;
4274
4275 last_disp = disp;
4276 disp_size = size_of_uleb128 (disp);
4277 pad = before_disp + disp_size + after_disp;
4278 if (pad % tt_format_size)
4279 pad = tt_format_size - (pad % tt_format_size);
4280 else
4281 pad = 0;
4282 disp = after_disp + pad;
4283 }
4284 while (disp != last_disp);
4285
4286 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4287 #endif
4288 }
4289
4290 /* Indicate the format of the call-site offsets. */
4291 #ifdef HAVE_AS_LEB128
4292 cs_format = DW_EH_PE_uleb128;
4293 #else
4294 cs_format = DW_EH_PE_udata4;
4295 #endif
4296 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4297 eh_data_format_name (cs_format));
4298
4299 #ifdef HAVE_AS_LEB128
4300 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4301 current_function_funcdef_no);
4302 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4303 current_function_funcdef_no);
4304 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4305 "Call-site table length");
4306 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4307 if (USING_SJLJ_EXCEPTIONS)
4308 sjlj_output_call_site_table ();
4309 else
4310 dw2_output_call_site_table ();
4311 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4312 #else
4313 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4314 if (USING_SJLJ_EXCEPTIONS)
4315 sjlj_output_call_site_table ();
4316 else
4317 dw2_output_call_site_table ();
4318 #endif
4319
4320 /* ??? Decode and interpret the data for flag_debug_asm. */
4321 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4322 for (i = 0; i < n; ++i)
4323 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4324 (i ? NULL : "Action record table"));
4325
4326 if (have_tt_data)
4327 assemble_align (tt_format_size * BITS_PER_UNIT);
4328
4329 i = VEC_length (tree, crtl->eh.ttype_data);
4330 while (i-- > 0)
4331 {
4332 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4333 output_ttype (type, tt_format, tt_format_size);
4334 }
4335
4336 #ifdef HAVE_AS_LEB128
4337 if (have_tt_data)
4338 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4339 #endif
4340
4341 /* ??? Decode and interpret the data for flag_debug_asm. */
4342 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4343 for (i = 0; i < n; ++i)
4344 {
4345 if (targetm.arm_eabi_unwinder)
4346 {
4347 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4348 output_ttype (type, tt_format, tt_format_size);
4349 }
4350 else
4351 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4352 (i ? NULL : "Exception specification table"));
4353 }
4354
4355 switch_to_section (current_function_section ());
4356 }
4357
4358 void
4359 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4360 {
4361 fun->eh->throw_stmt_table = table;
4362 }
4363
4364 htab_t
4365 get_eh_throw_stmt_table (struct function *fun)
4366 {
4367 return fun->eh->throw_stmt_table;
4368 }
4369
4370 /* Dump EH information to OUT. */
4371
4372 void
4373 dump_eh_tree (FILE * out, struct function *fun)
4374 {
4375 struct eh_region *i;
4376 int depth = 0;
4377 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4378 "allowed_exceptions", "must_not_throw",
4379 "throw"
4380 };
4381
4382 i = fun->eh->region_tree;
4383 if (!i)
4384 return;
4385
4386 fprintf (out, "Eh tree:\n");
4387 while (1)
4388 {
4389 fprintf (out, " %*s %i %s", depth * 2, "",
4390 i->region_number, type_name[(int) i->type]);
4391 if (i->tree_label)
4392 {
4393 fprintf (out, " tree_label:");
4394 print_generic_expr (out, i->tree_label, 0);
4395 }
4396 if (i->label)
4397 fprintf (out, " label:%i", INSN_UID (i->label));
4398 if (i->landing_pad)
4399 {
4400 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4401 if (GET_CODE (i->landing_pad) == NOTE)
4402 fprintf (out, " (deleted)");
4403 }
4404 if (i->post_landing_pad)
4405 {
4406 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4407 if (GET_CODE (i->post_landing_pad) == NOTE)
4408 fprintf (out, " (deleted)");
4409 }
4410 if (i->resume)
4411 {
4412 fprintf (out, " resume:%i", INSN_UID (i->resume));
4413 if (GET_CODE (i->resume) == NOTE)
4414 fprintf (out, " (deleted)");
4415 }
4416 if (i->may_contain_throw)
4417 fprintf (out, " may_contain_throw");
4418 switch (i->type)
4419 {
4420 case ERT_CLEANUP:
4421 if (i->u.cleanup.prev_try)
4422 fprintf (out, " prev try:%i",
4423 i->u.cleanup.prev_try->region_number);
4424 break;
4425
4426 case ERT_TRY:
4427 {
4428 struct eh_region *c;
4429 fprintf (out, " catch regions:");
4430 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4431 fprintf (out, " %i", c->region_number);
4432 }
4433 break;
4434
4435 case ERT_CATCH:
4436 if (i->u.eh_catch.prev_catch)
4437 fprintf (out, " prev: %i",
4438 i->u.eh_catch.prev_catch->region_number);
4439 if (i->u.eh_catch.next_catch)
4440 fprintf (out, " next %i",
4441 i->u.eh_catch.next_catch->region_number);
4442 fprintf (out, " type:");
4443 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4444 break;
4445
4446 case ERT_ALLOWED_EXCEPTIONS:
4447 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4448 print_generic_expr (out, i->u.allowed.type_list, 0);
4449 break;
4450
4451 case ERT_THROW:
4452 fprintf (out, " type:");
4453 print_generic_expr (out, i->u.eh_throw.type, 0);
4454 break;
4455
4456 case ERT_MUST_NOT_THROW:
4457 break;
4458
4459 case ERT_UNKNOWN:
4460 break;
4461 }
4462 if (i->aka)
4463 {
4464 fprintf (out, " also known as:");
4465 dump_bitmap (out, i->aka);
4466 }
4467 else
4468 fprintf (out, "\n");
4469 /* If there are sub-regions, process them. */
4470 if (i->inner)
4471 i = i->inner, depth++;
4472 /* If there are peers, process them. */
4473 else if (i->next_peer)
4474 i = i->next_peer;
4475 /* Otherwise, step back up the tree to the next peer. */
4476 else
4477 {
4478 do
4479 {
4480 i = i->outer;
4481 depth--;
4482 if (i == NULL)
4483 return;
4484 }
4485 while (i->next_peer == NULL);
4486 i = i->next_peer;
4487 }
4488 }
4489 }
4490
4491 /* Dump the EH tree for FN on stderr. */
4492
4493 void
4494 debug_eh_tree (struct function *fn)
4495 {
4496 dump_eh_tree (stderr, fn);
4497 }
4498
4499
4500 /* Verify EH region invariants. */
4501
4502 static bool
4503 verify_eh_region (struct eh_region *region, struct eh_region *prev_try)
4504 {
4505 bool found = false;
4506 if (!region)
4507 return false;
4508 switch (region->type)
4509 {
4510 case ERT_CLEANUP:
4511 if (region->u.cleanup.prev_try != prev_try)
4512 {
4513 error ("Wrong prev_try pointer in EH region %i",
4514 region->region_number);
4515 found = true;
4516 }
4517 break;
4518 case ERT_TRY:
4519 {
4520 struct eh_region *c, *prev = NULL;
4521 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4522 {
4523 error ("Try region %i has wrong rh_catch pointer to %i",
4524 region->region_number,
4525 region->u.eh_try.eh_catch->region_number);
4526 found = true;
4527 }
4528 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4529 {
4530 if (c->outer != region->outer)
4531 {
4532 error
4533 ("Catch region %i has different outer region than try region %i",
4534 c->region_number, region->region_number);
4535 found = true;
4536 }
4537 if (c->u.eh_catch.prev_catch != prev)
4538 {
4539 error ("Catch region %i has corrupted catchlist",
4540 c->region_number);
4541 found = true;
4542 }
4543 prev = c;
4544 }
4545 if (prev != region->u.eh_try.last_catch)
4546 {
4547 error
4548 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4549 region->region_number,
4550 region->u.eh_try.last_catch->region_number,
4551 prev->region_number);
4552 found = true;
4553 }
4554 }
4555 break;
4556 case ERT_CATCH:
4557 if (!region->u.eh_catch.prev_catch
4558 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4559 {
4560 error ("Catch region %i should be followed by try", region->region_number);
4561 found = true;
4562 }
4563 break;
4564 case ERT_ALLOWED_EXCEPTIONS:
4565 case ERT_MUST_NOT_THROW:
4566 case ERT_THROW:
4567 break;
4568 case ERT_UNKNOWN:
4569 gcc_unreachable ();
4570 }
4571 if (region->type == ERT_TRY)
4572 prev_try = region;
4573 else if (region->type == ERT_MUST_NOT_THROW
4574 || (region->type == ERT_ALLOWED_EXCEPTIONS
4575 && !region->u.allowed.type_list))
4576 prev_try = NULL;
4577 for (region = region->inner; region; region = region->next_peer)
4578 found |= verify_eh_region (region, prev_try);
4579 return found;
4580 }
4581
4582 /* Verify invariants on EH datastructures. */
4583
4584 void
4585 verify_eh_tree (struct function *fun)
4586 {
4587 struct eh_region *i, *outer = NULL;
4588 bool err = false;
4589 int nvisited = 0;
4590 int count = 0;
4591 int j;
4592 int depth = 0;
4593
4594 if (!fun->eh->region_tree)
4595 return;
4596 for (j = fun->eh->last_region_number; j > 0; --j)
4597 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4598 {
4599 if (i->region_number == j)
4600 count++;
4601 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4602 {
4603 error ("region_array is corrupted for region %i",
4604 i->region_number);
4605 err = true;
4606 }
4607 }
4608 i = fun->eh->region_tree;
4609
4610 while (1)
4611 {
4612 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4613 {
4614 error ("region_array is corrupted for region %i", i->region_number);
4615 err = true;
4616 }
4617 if (i->outer != outer)
4618 {
4619 error ("outer block of region %i is wrong", i->region_number);
4620 err = true;
4621 }
4622 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4623 {
4624 error
4625 ("region %i may contain throw and is contained in region that may not",
4626 i->region_number);
4627 err = true;
4628 }
4629 if (depth < 0)
4630 {
4631 error ("negative nesting depth of region %i", i->region_number);
4632 err = true;
4633 }
4634 nvisited++;
4635 /* If there are sub-regions, process them. */
4636 if (i->inner)
4637 outer = i, i = i->inner, depth++;
4638 /* If there are peers, process them. */
4639 else if (i->next_peer)
4640 i = i->next_peer;
4641 /* Otherwise, step back up the tree to the next peer. */
4642 else
4643 {
4644 do
4645 {
4646 i = i->outer;
4647 depth--;
4648 if (i == NULL)
4649 {
4650 if (depth != -1)
4651 {
4652 error ("tree list ends on depth %i", depth + 1);
4653 err = true;
4654 }
4655 if (count != nvisited)
4656 {
4657 error ("array does not match the region tree");
4658 err = true;
4659 }
4660 if (!err)
4661 for (i = fun->eh->region_tree; i; i = i->next_peer)
4662 err |= verify_eh_region (i, NULL);
4663
4664 if (err)
4665 {
4666 dump_eh_tree (stderr, fun);
4667 internal_error ("verify_eh_tree failed");
4668 }
4669 return;
4670 }
4671 outer = i->outer;
4672 }
4673 while (i->next_peer == NULL);
4674 i = i->next_peer;
4675 }
4676 }
4677 }
4678
4679 /* Initialize unwind_resume_libfunc. */
4680
4681 void
4682 default_init_unwind_resume_libfunc (void)
4683 {
4684 /* The default c++ routines aren't actually c++ specific, so use those. */
4685 unwind_resume_libfunc =
4686 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4687 : "_Unwind_Resume");
4688 }
4689
4690 \f
4691 static bool
4692 gate_handle_eh (void)
4693 {
4694 return doing_eh (0);
4695 }
4696
4697 /* Complete generation of exception handling code. */
4698 static unsigned int
4699 rest_of_handle_eh (void)
4700 {
4701 finish_eh_generation ();
4702 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4703 return 0;
4704 }
4705
4706 struct rtl_opt_pass pass_rtl_eh =
4707 {
4708 {
4709 RTL_PASS,
4710 "eh", /* name */
4711 gate_handle_eh, /* gate */
4712 rest_of_handle_eh, /* execute */
4713 NULL, /* sub */
4714 NULL, /* next */
4715 0, /* static_pass_number */
4716 TV_JUMP, /* tv_id */
4717 0, /* properties_required */
4718 0, /* properties_provided */
4719 0, /* properties_destroyed */
4720 0, /* todo_flags_start */
4721 TODO_dump_func /* todo_flags_finish */
4722 }
4723 };
4724
4725 #include "gt-except.h"