re PR middle-end/40388 (another null pointer in remove_unreachable_regions)
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80 #include "tree-flow.h"
81
82 /* Provide defaults for stuff that may not be defined when using
83 sjlj exceptions. */
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 #endif
87
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
91
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
94
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
97
98 /* A hash table of label to region number. */
99
100 struct GTY(()) ehl_map_entry {
101 rtx label;
102 struct eh_region_d *region;
103 };
104
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
108
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
116 \f
117
118 struct GTY(()) call_site_record_d
119 {
120 rtx landing_pad;
121 int action;
122 };
123 \f
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
126
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
137
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
146
147 static void remove_eh_handler (struct eh_region_d *);
148 static void remove_eh_handler_and_replace (struct eh_region_d *,
149 struct eh_region_d *, bool);
150
151 /* The return value of reachable_next_level. */
152 enum reachable_code
153 {
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
162 };
163
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region_d *, tree,
166 struct reachable_info *, bool);
167
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region_d *);
172 static int add_call_site (rtx, int);
173
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
182
183 \f
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
187
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
190
191 int
192 doing_eh (int do_warn)
193 {
194 if (! flag_exceptions)
195 {
196 static int warned = 0;
197 if (! warned && do_warn)
198 {
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
201 }
202 return 0;
203 }
204 return 1;
205 }
206
207 \f
208 void
209 init_eh (void)
210 {
211 if (! flag_exceptions)
212 return;
213
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
215
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
219 {
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
221
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
223
224 f_prev = build_decl (BUILTINS_LOCATION,
225 FIELD_DECL, get_identifier ("__prev"),
226 build_pointer_type (sjlj_fc_type_node));
227 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
228
229 f_cs = build_decl (BUILTINS_LOCATION,
230 FIELD_DECL, get_identifier ("__call_site"),
231 integer_type_node);
232 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
233
234 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
235 tmp = build_array_type (lang_hooks.types.type_for_mode
236 (targetm.unwind_word_mode (), 1),
237 tmp);
238 f_data = build_decl (BUILTINS_LOCATION,
239 FIELD_DECL, get_identifier ("__data"), tmp);
240 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
241
242 f_per = build_decl (BUILTINS_LOCATION,
243 FIELD_DECL, get_identifier ("__personality"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
246
247 f_lsda = build_decl (BUILTINS_LOCATION,
248 FIELD_DECL, get_identifier ("__lsda"),
249 ptr_type_node);
250 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
251
252 #ifdef DONT_USE_BUILTIN_SETJMP
253 #ifdef JMP_BUF_SIZE
254 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
255 #else
256 /* Should be large enough for most systems, if it is not,
257 JMP_BUF_SIZE should be defined with the proper value. It will
258 also tend to be larger than necessary for most systems, a more
259 optimal port will define JMP_BUF_SIZE. */
260 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
261 #endif
262 #else
263 /* builtin_setjmp takes a pointer to 5 words. */
264 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
265 #endif
266 tmp = build_index_type (tmp);
267 tmp = build_array_type (ptr_type_node, tmp);
268 f_jbuf = build_decl (BUILTINS_LOCATION,
269 FIELD_DECL, get_identifier ("__jbuf"), tmp);
270 #ifdef DONT_USE_BUILTIN_SETJMP
271 /* We don't know what the alignment requirements of the
272 runtime's jmp_buf has. Overestimate. */
273 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
274 DECL_USER_ALIGN (f_jbuf) = 1;
275 #endif
276 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
277
278 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
279 TREE_CHAIN (f_prev) = f_cs;
280 TREE_CHAIN (f_cs) = f_data;
281 TREE_CHAIN (f_data) = f_per;
282 TREE_CHAIN (f_per) = f_lsda;
283 TREE_CHAIN (f_lsda) = f_jbuf;
284
285 layout_type (sjlj_fc_type_node);
286
287 /* Cache the interesting field offsets so that we have
288 easy access from rtl. */
289 sjlj_fc_call_site_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
292 sjlj_fc_data_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
295 sjlj_fc_personality_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
298 sjlj_fc_lsda_ofs
299 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
300 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
301 sjlj_fc_jbuf_ofs
302 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
303 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
304 }
305 }
306
307 void
308 init_eh_for_function (void)
309 {
310 cfun->eh = GGC_CNEW (struct eh_status);
311 }
312 \f
313 /* Routines to generate the exception tree somewhat directly.
314 These are used from tree-eh.c when processing exception related
315 nodes during tree optimization. */
316
317 static struct eh_region_d *
318 gen_eh_region (enum eh_region_type type, struct eh_region_d *outer)
319 {
320 struct eh_region_d *new_eh;
321
322 #ifdef ENABLE_CHECKING
323 gcc_assert (doing_eh (0));
324 #endif
325
326 /* Insert a new blank region as a leaf in the tree. */
327 new_eh = GGC_CNEW (struct eh_region_d);
328 new_eh->type = type;
329 new_eh->outer = outer;
330 if (outer)
331 {
332 new_eh->next_peer = outer->inner;
333 outer->inner = new_eh;
334 }
335 else
336 {
337 new_eh->next_peer = cfun->eh->region_tree;
338 cfun->eh->region_tree = new_eh;
339 }
340
341 new_eh->region_number = ++cfun->eh->last_region_number;
342
343 return new_eh;
344 }
345
346 struct eh_region_d *
347 gen_eh_region_cleanup (struct eh_region_d *outer)
348 {
349 struct eh_region_d *cleanup = gen_eh_region (ERT_CLEANUP, outer);
350 return cleanup;
351 }
352
353 struct eh_region_d *
354 gen_eh_region_try (struct eh_region_d *outer)
355 {
356 return gen_eh_region (ERT_TRY, outer);
357 }
358
359 struct eh_region_d *
360 gen_eh_region_catch (struct eh_region_d *t, tree type_or_list)
361 {
362 struct eh_region_d *c, *l;
363 tree type_list, type_node;
364
365 /* Ensure to always end up with a type list to normalize further
366 processing, then register each type against the runtime types map. */
367 type_list = type_or_list;
368 if (type_or_list)
369 {
370 if (TREE_CODE (type_or_list) != TREE_LIST)
371 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
372
373 type_node = type_list;
374 for (; type_node; type_node = TREE_CHAIN (type_node))
375 add_type_for_runtime (TREE_VALUE (type_node));
376 }
377
378 c = gen_eh_region (ERT_CATCH, t->outer);
379 c->u.eh_catch.type_list = type_list;
380 l = t->u.eh_try.last_catch;
381 c->u.eh_catch.prev_catch = l;
382 if (l)
383 l->u.eh_catch.next_catch = c;
384 else
385 t->u.eh_try.eh_catch = c;
386 t->u.eh_try.last_catch = c;
387
388 return c;
389 }
390
391 struct eh_region_d *
392 gen_eh_region_allowed (struct eh_region_d *outer, tree allowed)
393 {
394 struct eh_region_d *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
395 region->u.allowed.type_list = allowed;
396
397 for (; allowed ; allowed = TREE_CHAIN (allowed))
398 add_type_for_runtime (TREE_VALUE (allowed));
399
400 return region;
401 }
402
403 struct eh_region_d *
404 gen_eh_region_must_not_throw (struct eh_region_d *outer)
405 {
406 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
407 }
408
409 int
410 get_eh_region_number (struct eh_region_d *region)
411 {
412 return region->region_number;
413 }
414
415 bool
416 get_eh_region_may_contain_throw (struct eh_region_d *region)
417 {
418 return region->may_contain_throw;
419 }
420
421 tree
422 get_eh_region_tree_label (struct eh_region_d *region)
423 {
424 return region->tree_label;
425 }
426
427 tree
428 get_eh_region_no_tree_label (int region)
429 {
430 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
431 }
432
433 void
434 set_eh_region_tree_label (struct eh_region_d *region, tree lab)
435 {
436 region->tree_label = lab;
437 }
438 \f
439 void
440 expand_resx_expr (tree exp)
441 {
442 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
443 struct eh_region_d *reg = VEC_index (eh_region,
444 cfun->eh->region_array, region_nr);
445
446 gcc_assert (!reg->resume);
447 do_pending_stack_adjust ();
448 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
449 emit_barrier ();
450 }
451
452 /* Note that the current EH region (if any) may contain a throw, or a
453 call to a function which itself may contain a throw. */
454
455 void
456 note_eh_region_may_contain_throw (struct eh_region_d *region)
457 {
458 while (region && !region->may_contain_throw)
459 {
460 region->may_contain_throw = 1;
461 region = region->outer;
462 }
463 }
464
465
466 /* Return an rtl expression for a pointer to the exception object
467 within a handler. */
468
469 rtx
470 get_exception_pointer (void)
471 {
472 if (! crtl->eh.exc_ptr)
473 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
474 return crtl->eh.exc_ptr;
475 }
476
477 /* Return an rtl expression for the exception dispatch filter
478 within a handler. */
479
480 rtx
481 get_exception_filter (void)
482 {
483 if (! crtl->eh.filter)
484 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
485 return crtl->eh.filter;
486 }
487 \f
488 /* This section is for the exception handling specific optimization pass. */
489
490 /* Random access the exception region tree. */
491
492 void
493 collect_eh_region_array (void)
494 {
495 struct eh_region_d *i;
496
497 i = cfun->eh->region_tree;
498 if (! i)
499 return;
500
501 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
502 cfun->eh->last_region_number + 1);
503 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
504
505 while (1)
506 {
507 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
508
509 /* If there are sub-regions, process them. */
510 if (i->inner)
511 i = i->inner;
512 /* If there are peers, process them. */
513 else if (i->next_peer)
514 i = i->next_peer;
515 /* Otherwise, step back up the tree to the next peer. */
516 else
517 {
518 do {
519 i = i->outer;
520 if (i == NULL)
521 return;
522 } while (i->next_peer == NULL);
523 i = i->next_peer;
524 }
525 }
526 }
527
528 /* R is MUST_NOT_THROW region that is not reachable via local
529 RESX instructions. It still must be kept in the tree in case runtime
530 can unwind through it, or we will eliminate out terminate call
531 runtime would do otherwise. Return TRUE if R contains throwing statements
532 or some of the exceptions in inner regions can be unwound up to R.
533
534 CONTAINS_STMT is bitmap of all regions that contains some throwing
535 statements.
536
537 Function looks O(^3) at first sight. In fact the function is called at most
538 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
539 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
540 the outer loop examines every region at most once. The inner loop
541 is doing unwinding from the throwing statement same way as we do during
542 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
543 of CFG. In practice Eh trees are wide, not deep, so this is not
544 a problem. */
545
546 static bool
547 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region_d *r)
548 {
549 struct eh_region_d *i = r->inner;
550 unsigned n;
551 bitmap_iterator bi;
552
553 if (TEST_BIT (contains_stmt, r->region_number))
554 return true;
555 if (r->aka)
556 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
557 if (TEST_BIT (contains_stmt, n))
558 return true;
559 if (!i)
560 return false;
561 while (1)
562 {
563 /* It is pointless to look into MUST_NOT_THROW
564 or dive into subregions. They never unwind up. */
565 if (i->type != ERT_MUST_NOT_THROW)
566 {
567 bool found = TEST_BIT (contains_stmt, i->region_number);
568 if (!found && i->aka)
569 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
570 if (TEST_BIT (contains_stmt, n))
571 {
572 found = true;
573 break;
574 }
575 /* We have nested region that contains throwing statement.
576 See if resuming might lead up to the resx or we get locally
577 caught sooner. If we get locally caught sooner, we either
578 know region R is not reachable or it would have direct edge
579 from the EH resx and thus consider region reachable at
580 firest place. */
581 if (found)
582 {
583 struct eh_region_d *i1 = i;
584 tree type_thrown = NULL_TREE;
585
586 if (i1->type == ERT_THROW)
587 {
588 type_thrown = i1->u.eh_throw.type;
589 i1 = i1->outer;
590 }
591 for (; i1 != r; i1 = i1->outer)
592 if (reachable_next_level (i1, type_thrown, NULL,
593 false) >= RNL_CAUGHT)
594 break;
595 if (i1 == r)
596 return true;
597 }
598 }
599 /* If there are sub-regions, process them. */
600 if (i->type != ERT_MUST_NOT_THROW && i->inner)
601 i = i->inner;
602 /* If there are peers, process them. */
603 else if (i->next_peer)
604 i = i->next_peer;
605 /* Otherwise, step back up the tree to the next peer. */
606 else
607 {
608 do
609 {
610 i = i->outer;
611 if (i == r)
612 return false;
613 }
614 while (i->next_peer == NULL);
615 i = i->next_peer;
616 }
617 }
618 }
619
620 /* Bring region R to the root of tree. */
621
622 static void
623 bring_to_root (struct eh_region_d *r)
624 {
625 struct eh_region_d **pp;
626 struct eh_region_d *outer = r->outer;
627 if (!r->outer)
628 return;
629 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
630 continue;
631 *pp = r->next_peer;
632 r->outer = NULL;
633 r->next_peer = cfun->eh->region_tree;
634 cfun->eh->region_tree = r;
635 }
636
637 /* Return true if region R2 can be replaced by R1. */
638
639 static bool
640 eh_region_replaceable_by_p (const struct eh_region_d *r1,
641 const struct eh_region_d *r2)
642 {
643 /* Regions are semantically same if they are of same type,
644 have same label and type. */
645 if (r1->type != r2->type)
646 return false;
647 if (r1->tree_label != r2->tree_label)
648 return false;
649
650 /* Verify that also region type dependent data are the same. */
651 switch (r1->type)
652 {
653 case ERT_MUST_NOT_THROW:
654 case ERT_CLEANUP:
655 break;
656 case ERT_TRY:
657 {
658 struct eh_region_d *c1, *c2;
659 for (c1 = r1->u.eh_try.eh_catch,
660 c2 = r2->u.eh_try.eh_catch;
661 c1 && c2;
662 c1 = c1->u.eh_catch.next_catch,
663 c2 = c2->u.eh_catch.next_catch)
664 if (!eh_region_replaceable_by_p (c1, c2))
665 return false;
666 if (c1 || c2)
667 return false;
668 }
669 break;
670 case ERT_CATCH:
671 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
672 return false;
673 if (!list_equal_p (r1->u.eh_catch.filter_list,
674 r2->u.eh_catch.filter_list))
675 return false;
676 break;
677 case ERT_ALLOWED_EXCEPTIONS:
678 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
679 return false;
680 if (r1->u.allowed.filter != r2->u.allowed.filter)
681 return false;
682 break;
683 case ERT_THROW:
684 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
685 return false;
686 break;
687 default:
688 gcc_unreachable ();
689 }
690 if (dump_file && (dump_flags & TDF_DETAILS))
691 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
692 r2->region_number);
693 return true;
694 }
695
696 /* Replace region R2 by R1. */
697
698 static void
699 replace_region (struct eh_region_d *r1, struct eh_region_d *r2)
700 {
701 struct eh_region_d *next1 = r1->u.eh_try.eh_catch;
702 struct eh_region_d *next2 = r2->u.eh_try.eh_catch;
703 bool is_try = r1->type == ERT_TRY;
704
705 gcc_assert (r1->type != ERT_CATCH);
706 remove_eh_handler_and_replace (r2, r1, false);
707 if (is_try)
708 {
709 while (next1)
710 {
711 r1 = next1;
712 r2 = next2;
713 gcc_assert (next1->type == ERT_CATCH);
714 gcc_assert (next2->type == ERT_CATCH);
715 next1 = next1->u.eh_catch.next_catch;
716 next2 = next2->u.eh_catch.next_catch;
717 remove_eh_handler_and_replace (r2, r1, false);
718 }
719 }
720 }
721
722 /* Return hash value of type list T. */
723
724 static hashval_t
725 hash_type_list (tree t)
726 {
727 hashval_t val = 0;
728 for (; t; t = TREE_CHAIN (t))
729 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
730 return val;
731 }
732
733 /* Hash EH regions so semantically same regions get same hash value. */
734
735 static hashval_t
736 hash_eh_region (const void *r)
737 {
738 const struct eh_region_d *region = (const struct eh_region_d *) r;
739 hashval_t val = region->type;
740
741 if (region->tree_label)
742 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
743 switch (region->type)
744 {
745 case ERT_MUST_NOT_THROW:
746 case ERT_CLEANUP:
747 break;
748 case ERT_TRY:
749 {
750 struct eh_region_d *c;
751 for (c = region->u.eh_try.eh_catch;
752 c; c = c->u.eh_catch.next_catch)
753 val = iterative_hash_hashval_t (hash_eh_region (c), val);
754 }
755 break;
756 case ERT_CATCH:
757 val = iterative_hash_hashval_t (hash_type_list
758 (region->u.eh_catch.type_list), val);
759 break;
760 case ERT_ALLOWED_EXCEPTIONS:
761 val = iterative_hash_hashval_t
762 (hash_type_list (region->u.allowed.type_list), val);
763 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
764 break;
765 case ERT_THROW:
766 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
767 break;
768 default:
769 gcc_unreachable ();
770 }
771 return val;
772 }
773
774 /* Return true if regions R1 and R2 are equal. */
775
776 static int
777 eh_regions_equal_p (const void *r1, const void *r2)
778 {
779 return eh_region_replaceable_by_p ((const struct eh_region_d *) r1,
780 (const struct eh_region_d *) r2);
781 }
782
783 /* Walk all peers of REGION and try to merge those regions
784 that are semantically equivalent. Look into subregions
785 recursively too. */
786
787 static bool
788 merge_peers (struct eh_region_d *region)
789 {
790 struct eh_region_d *r1, *r2, *outer = NULL, *next;
791 bool merged = false;
792 int num_regions = 0;
793 if (region)
794 outer = region->outer;
795 else
796 return false;
797
798 /* First see if there is inner region equivalent to region
799 in question. EH control flow is acyclic so we know we
800 can merge them. */
801 if (outer)
802 for (r1 = region; r1; r1 = next)
803 {
804 next = r1->next_peer;
805 if (r1->type == ERT_CATCH)
806 continue;
807 if (eh_region_replaceable_by_p (r1->outer, r1))
808 {
809 replace_region (r1->outer, r1);
810 merged = true;
811 }
812 else
813 num_regions ++;
814 }
815
816 /* Get new first region and try to match the peers
817 for equivalence. */
818 if (outer)
819 region = outer->inner;
820 else
821 region = cfun->eh->region_tree;
822
823 /* There are few regions to inspect:
824 N^2 loop matching each region with each region
825 will do the job well. */
826 if (num_regions < 10)
827 {
828 for (r1 = region; r1; r1 = r1->next_peer)
829 {
830 if (r1->type == ERT_CATCH)
831 continue;
832 for (r2 = r1->next_peer; r2; r2 = next)
833 {
834 next = r2->next_peer;
835 if (eh_region_replaceable_by_p (r1, r2))
836 {
837 replace_region (r1, r2);
838 merged = true;
839 }
840 }
841 }
842 }
843 /* Or use hashtable to avoid N^2 behaviour. */
844 else
845 {
846 htab_t hash;
847 hash = htab_create (num_regions, hash_eh_region,
848 eh_regions_equal_p, NULL);
849 for (r1 = region; r1; r1 = next)
850 {
851 void **slot;
852
853 next = r1->next_peer;
854 if (r1->type == ERT_CATCH)
855 continue;
856 slot = htab_find_slot (hash, r1, INSERT);
857 if (!*slot)
858 *slot = r1;
859 else
860 replace_region ((struct eh_region_d *) *slot, r1);
861 }
862 htab_delete (hash);
863 }
864 for (r1 = region; r1; r1 = r1->next_peer)
865 merged |= merge_peers (r1->inner);
866 return merged;
867 }
868
869 /* Remove all regions whose labels are not reachable.
870 REACHABLE is bitmap of all regions that are used by the function
871 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
872
873 void
874 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
875 {
876 int i;
877 struct eh_region_d *r;
878 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
879 struct eh_region_d *local_must_not_throw = NULL;
880 struct eh_region_d *first_must_not_throw = NULL;
881
882 for (i = cfun->eh->last_region_number; i > 0; --i)
883 {
884 r = VEC_index (eh_region, cfun->eh->region_array, i);
885 if (!r || r->region_number != i)
886 continue;
887 if (!TEST_BIT (reachable, i) && !r->resume)
888 {
889 bool kill_it = true;
890
891 r->tree_label = NULL;
892 switch (r->type)
893 {
894 case ERT_THROW:
895 /* Don't remove ERT_THROW regions if their outer region
896 is reachable. */
897 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
898 kill_it = false;
899 break;
900 case ERT_MUST_NOT_THROW:
901 /* MUST_NOT_THROW regions are implementable solely in the
902 runtime, but we need them when inlining function.
903
904 Keep them if outer region is not MUST_NOT_THROW a well
905 and if they contain some statement that might unwind through
906 them. */
907 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
908 && (!contains_stmt
909 || can_be_reached_by_runtime (contains_stmt, r)))
910 kill_it = false;
911 break;
912 case ERT_TRY:
913 {
914 /* TRY regions are reachable if any of its CATCH regions
915 are reachable. */
916 struct eh_region_d *c;
917 for (c = r->u.eh_try.eh_catch; c;
918 c = c->u.eh_catch.next_catch)
919 if (TEST_BIT (reachable, c->region_number))
920 {
921 kill_it = false;
922 break;
923 }
924 break;
925 }
926
927 default:
928 break;
929 }
930
931 if (kill_it)
932 {
933 if (dump_file)
934 fprintf (dump_file, "Removing unreachable eh region %i\n",
935 r->region_number);
936 remove_eh_handler (r);
937 }
938 else if (r->type == ERT_MUST_NOT_THROW)
939 {
940 if (!first_must_not_throw)
941 first_must_not_throw = r;
942 VEC_safe_push (eh_region, heap, must_not_throws, r);
943 }
944 }
945 else
946 if (r->type == ERT_MUST_NOT_THROW)
947 {
948 if (!local_must_not_throw)
949 local_must_not_throw = r;
950 if (r->outer)
951 VEC_safe_push (eh_region, heap, must_not_throws, r);
952 }
953 }
954
955 /* MUST_NOT_THROW regions without local handler are all the same; they
956 trigger terminate call in runtime.
957 MUST_NOT_THROW handled locally can differ in debug info associated
958 to std::terminate () call or if one is coming from Java and other
959 from C++ whether they call terminate or abort.
960
961 We merge all MUST_NOT_THROW regions handled by the run-time into one.
962 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
963 (since unwinding never continues to the outer region anyway).
964 If MUST_NOT_THROW with local handler is present in the tree, we use
965 that region to merge into, since it will remain in tree anyway;
966 otherwise we use first MUST_NOT_THROW.
967
968 Merging of locally handled regions needs changes to the CFG. Crossjumping
969 should take care of this, by looking at the actual code and
970 ensuring that the cleanup actions are really the same. */
971
972 if (local_must_not_throw)
973 first_must_not_throw = local_must_not_throw;
974
975 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
976 {
977 if (!r->label && !r->tree_label && r != first_must_not_throw)
978 {
979 if (dump_file)
980 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
981 r->region_number,
982 first_must_not_throw->region_number);
983 remove_eh_handler_and_replace (r, first_must_not_throw, false);
984 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
985 }
986 else
987 bring_to_root (r);
988 }
989 merge_peers (cfun->eh->region_tree);
990 #ifdef ENABLE_CHECKING
991 verify_eh_tree (cfun);
992 #endif
993 VEC_free (eh_region, heap, must_not_throws);
994 }
995
996 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
997 is identical to label. */
998
999 VEC (int, heap) *
1000 label_to_region_map (void)
1001 {
1002 VEC (int, heap) * label_to_region = NULL;
1003 int i;
1004 int idx;
1005
1006 VEC_safe_grow_cleared (int, heap, label_to_region,
1007 cfun->cfg->last_label_uid + 1);
1008 for (i = cfun->eh->last_region_number; i > 0; --i)
1009 {
1010 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
1011 if (r && r->region_number == i
1012 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1013 {
1014 if ((idx = VEC_index (int, label_to_region,
1015 LABEL_DECL_UID (r->tree_label))) != 0)
1016 r->next_region_sharing_label =
1017 VEC_index (eh_region, cfun->eh->region_array, idx);
1018 else
1019 r->next_region_sharing_label = NULL;
1020 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1021 i);
1022 }
1023 }
1024 return label_to_region;
1025 }
1026
1027 /* Return number of EH regions. */
1028 int
1029 num_eh_regions (void)
1030 {
1031 return cfun->eh->last_region_number + 1;
1032 }
1033
1034 /* Return next region sharing same label as REGION. */
1035
1036 int
1037 get_next_region_sharing_label (int region)
1038 {
1039 struct eh_region_d *r;
1040 if (!region)
1041 return 0;
1042 r = VEC_index (eh_region, cfun->eh->region_array, region);
1043 if (!r || !r->next_region_sharing_label)
1044 return 0;
1045 return r->next_region_sharing_label->region_number;
1046 }
1047
1048 /* Return bitmap of all labels that are handlers of must not throw regions. */
1049
1050 bitmap
1051 must_not_throw_labels (void)
1052 {
1053 struct eh_region_d *i;
1054 bitmap labels = BITMAP_ALLOC (NULL);
1055
1056 i = cfun->eh->region_tree;
1057 if (! i)
1058 return labels;
1059
1060 while (1)
1061 {
1062 if (i->type == ERT_MUST_NOT_THROW && i->tree_label
1063 && LABEL_DECL_UID (i->tree_label) >= 0)
1064 bitmap_set_bit (labels, LABEL_DECL_UID (i->tree_label));
1065
1066 /* If there are sub-regions, process them. */
1067 if (i->inner)
1068 i = i->inner;
1069 /* If there are peers, process them. */
1070 else if (i->next_peer)
1071 i = i->next_peer;
1072 /* Otherwise, step back up the tree to the next peer. */
1073 else
1074 {
1075 do {
1076 i = i->outer;
1077 if (i == NULL)
1078 return labels;
1079 } while (i->next_peer == NULL);
1080 i = i->next_peer;
1081 }
1082 }
1083 }
1084
1085 /* Set up EH labels for RTL. */
1086
1087 void
1088 convert_from_eh_region_ranges (void)
1089 {
1090 int i, n = cfun->eh->last_region_number;
1091
1092 /* Most of the work is already done at the tree level. All we need to
1093 do is collect the rtl labels that correspond to the tree labels that
1094 collect the rtl labels that correspond to the tree labels
1095 we allocated earlier. */
1096 for (i = 1; i <= n; ++i)
1097 {
1098 struct eh_region_d *region;
1099
1100 region = VEC_index (eh_region, cfun->eh->region_array, i);
1101 if (region && region->tree_label)
1102 region->label = DECL_RTL_IF_SET (region->tree_label);
1103 }
1104 }
1105
1106 void
1107 find_exception_handler_labels (void)
1108 {
1109 int i;
1110
1111 if (cfun->eh->region_tree == NULL)
1112 return;
1113
1114 for (i = cfun->eh->last_region_number; i > 0; --i)
1115 {
1116 struct eh_region_d *region;
1117 rtx lab;
1118
1119 region = VEC_index (eh_region, cfun->eh->region_array, i);
1120 if (! region || region->region_number != i)
1121 continue;
1122 if (crtl->eh.built_landing_pads)
1123 lab = region->landing_pad;
1124 else
1125 lab = region->label;
1126 }
1127 }
1128
1129 /* Returns true if the current function has exception handling regions. */
1130
1131 bool
1132 current_function_has_exception_handlers (void)
1133 {
1134 int i;
1135
1136 for (i = cfun->eh->last_region_number; i > 0; --i)
1137 {
1138 struct eh_region_d *region;
1139
1140 region = VEC_index (eh_region, cfun->eh->region_array, i);
1141 if (region
1142 && region->region_number == i
1143 && region->type != ERT_THROW)
1144 return true;
1145 }
1146
1147 return false;
1148 }
1149 \f
1150 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1151 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1152
1153 static void
1154 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1155 {
1156 int i;
1157
1158 if (o->aka)
1159 {
1160 i = bitmap_first_set_bit (o->aka);
1161 if (i < *min)
1162 *min = i;
1163 i = bitmap_last_set_bit (o->aka);
1164 if (i > *max)
1165 *max = i;
1166 }
1167 if (o->region_number < *min)
1168 *min = o->region_number;
1169 if (o->region_number > *max)
1170 *max = o->region_number;
1171
1172 if (o->inner)
1173 {
1174 o = o->inner;
1175 duplicate_eh_regions_0 (o, min, max);
1176 while (o->next_peer)
1177 {
1178 o = o->next_peer;
1179 duplicate_eh_regions_0 (o, min, max);
1180 }
1181 }
1182 }
1183
1184 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1185 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1186 about the other internal pointers just yet, just the tree-like pointers. */
1187
1188 static eh_region
1189 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1190 {
1191 eh_region ret, n;
1192
1193 ret = n = GGC_NEW (struct eh_region_d);
1194
1195 *n = *old;
1196 n->outer = outer;
1197 n->next_peer = NULL;
1198 if (old->aka)
1199 {
1200 unsigned i;
1201 bitmap_iterator bi;
1202 n->aka = BITMAP_GGC_ALLOC ();
1203
1204 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1205 {
1206 bitmap_set_bit (n->aka, i + eh_offset);
1207 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1208 }
1209 }
1210
1211 n->region_number += eh_offset;
1212 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1213
1214 if (old->inner)
1215 {
1216 old = old->inner;
1217 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1218 while (old->next_peer)
1219 {
1220 old = old->next_peer;
1221 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1222 }
1223 }
1224
1225 return ret;
1226 }
1227
1228 /* Look for first outer region of R (or R itself) that is
1229 TRY region. Return NULL if none. */
1230
1231 static struct eh_region_d *
1232 find_prev_try (struct eh_region_d * r)
1233 {
1234 for (; r && r->type != ERT_TRY; r = r->outer)
1235 if (r->type == ERT_MUST_NOT_THROW
1236 || (r->type == ERT_ALLOWED_EXCEPTIONS
1237 && !r->u.allowed.type_list))
1238 {
1239 r = NULL;
1240 break;
1241 }
1242 return r;
1243 }
1244
1245 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1246 function and root the tree below OUTER_REGION. Remap labels using MAP
1247 callback. The special case of COPY_REGION of 0 means all regions. */
1248
1249 int
1250 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1251 void *data, int copy_region, int outer_region)
1252 {
1253 eh_region cur, outer, *splice;
1254 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1255 int num_regions;
1256
1257 if (!ifun->eh)
1258 return 0;
1259 #ifdef ENABLE_CHECKING
1260 verify_eh_tree (ifun);
1261 #endif
1262
1263 /* Find the range of region numbers to be copied. The interface we
1264 provide here mandates a single offset to find new number from old,
1265 which means we must look at the numbers present, instead of the
1266 count or something else. */
1267 if (copy_region > 0)
1268 {
1269 min_region = INT_MAX;
1270 max_region = 0;
1271
1272 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1273 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1274 }
1275 else
1276 {
1277 min_region = 1;
1278 max_region = ifun->eh->last_region_number;
1279 }
1280 num_regions = max_region - min_region + 1;
1281 cfun_last_region_number = cfun->eh->last_region_number;
1282 eh_offset = cfun_last_region_number + 1 - min_region;
1283
1284 /* If we've not yet created a region array, do so now. */
1285 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1286 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1287 cfun->eh->last_region_number + 1);
1288
1289 /* Locate the spot at which to insert the new tree. */
1290 if (outer_region > 0)
1291 {
1292 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1293 if (outer)
1294 splice = &outer->inner;
1295 else
1296 splice = &cfun->eh->region_tree;
1297 }
1298 else
1299 {
1300 outer = NULL;
1301 splice = &cfun->eh->region_tree;
1302 }
1303 while (*splice)
1304 splice = &(*splice)->next_peer;
1305
1306 if (!ifun->eh->region_tree)
1307 {
1308 if (outer)
1309 for (i = cfun_last_region_number + 1;
1310 i <= cfun->eh->last_region_number; i++)
1311 {
1312 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1313 if (outer->aka == NULL)
1314 outer->aka = BITMAP_GGC_ALLOC ();
1315 bitmap_set_bit (outer->aka, i);
1316 }
1317 return eh_offset;
1318 }
1319
1320 /* Copy all the regions in the subtree. */
1321 if (copy_region > 0)
1322 {
1323 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1324 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1325 }
1326 else
1327 {
1328 eh_region n;
1329
1330 cur = ifun->eh->region_tree;
1331 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1332 while (cur->next_peer)
1333 {
1334 cur = cur->next_peer;
1335 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1336 }
1337 }
1338
1339 /* Remap all the labels in the new regions. */
1340 for (i = cfun_last_region_number + 1;
1341 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1342 if (cur && cur->tree_label)
1343 cur->tree_label = map (cur->tree_label, data);
1344
1345 /* Remap all of the internal catch and cleanup linkages. Since we
1346 duplicate entire subtrees, all of the referenced regions will have
1347 been copied too. And since we renumbered them as a block, a simple
1348 bit of arithmetic finds us the index for the replacement region. */
1349 for (i = cfun_last_region_number + 1;
1350 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1351 {
1352 /* All removed EH that is toplevel in input function is now
1353 in outer EH of output function. */
1354 if (cur == NULL)
1355 {
1356 gcc_assert (VEC_index
1357 (eh_region, ifun->eh->region_array,
1358 i - eh_offset) == NULL);
1359 if (outer)
1360 {
1361 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1362 if (outer->aka == NULL)
1363 outer->aka = BITMAP_GGC_ALLOC ();
1364 bitmap_set_bit (outer->aka, i);
1365 }
1366 continue;
1367 }
1368 if (i != cur->region_number)
1369 continue;
1370
1371 #define REMAP(REG) \
1372 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1373 (REG)->region_number + eh_offset)
1374
1375 switch (cur->type)
1376 {
1377 case ERT_TRY:
1378 if (cur->u.eh_try.eh_catch)
1379 REMAP (cur->u.eh_try.eh_catch);
1380 if (cur->u.eh_try.last_catch)
1381 REMAP (cur->u.eh_try.last_catch);
1382 break;
1383
1384 case ERT_CATCH:
1385 if (cur->u.eh_catch.next_catch)
1386 REMAP (cur->u.eh_catch.next_catch);
1387 if (cur->u.eh_catch.prev_catch)
1388 REMAP (cur->u.eh_catch.prev_catch);
1389 break;
1390
1391 default:
1392 break;
1393 }
1394
1395 #undef REMAP
1396 }
1397 #ifdef ENABLE_CHECKING
1398 verify_eh_tree (cfun);
1399 #endif
1400
1401 return eh_offset;
1402 }
1403
1404 /* Return new copy of eh region OLD inside region NEW_OUTER.
1405 Do not care about updating the tree otherwise. */
1406
1407 static struct eh_region_d *
1408 copy_eh_region_1 (struct eh_region_d *old, struct eh_region_d *new_outer)
1409 {
1410 struct eh_region_d *new_eh = gen_eh_region (old->type, new_outer);
1411 new_eh->u = old->u;
1412 new_eh->tree_label = old->tree_label;
1413 new_eh->may_contain_throw = old->may_contain_throw;
1414 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1415 cfun->eh->last_region_number + 1);
1416 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1417 if (dump_file && (dump_flags & TDF_DETAILS))
1418 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1419 return new_eh;
1420 }
1421
1422 /* Return new copy of eh region OLD inside region NEW_OUTER.
1423
1424 Copy whole catch-try chain if neccesary. */
1425
1426 static struct eh_region_d *
1427 copy_eh_region (struct eh_region_d *old, struct eh_region_d *new_outer)
1428 {
1429 struct eh_region_d *r, *n, *old_try, *new_try, *ret = NULL;
1430 VEC(eh_region,heap) *catch_list = NULL;
1431
1432 if (old->type != ERT_CATCH)
1433 {
1434 gcc_assert (old->type != ERT_TRY);
1435 r = copy_eh_region_1 (old, new_outer);
1436 return r;
1437 }
1438
1439 /* Locate and copy corresponding TRY. */
1440 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1441 continue;
1442 gcc_assert (old_try->type == ERT_TRY);
1443 new_try = gen_eh_region_try (new_outer);
1444 new_try->tree_label = old_try->tree_label;
1445 new_try->may_contain_throw = old_try->may_contain_throw;
1446 if (dump_file && (dump_flags & TDF_DETAILS))
1447 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1448 old_try->region_number, new_try->region_number);
1449 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1450 cfun->eh->last_region_number + 1);
1451 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1452
1453 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1454 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1455 VEC_safe_push (eh_region, heap, catch_list, r);
1456
1457 while (VEC_length (eh_region, catch_list))
1458 {
1459 r = VEC_pop (eh_region, catch_list);
1460
1461 /* Duplicate CATCH. */
1462 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1463 n->tree_label = r->tree_label;
1464 n->may_contain_throw = r->may_contain_throw;
1465 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1466 cfun->eh->last_region_number + 1);
1467 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1468 n->tree_label = r->tree_label;
1469
1470 if (dump_file && (dump_flags & TDF_DETAILS))
1471 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1472 r->region_number, n->region_number);
1473 if (r == old)
1474 ret = n;
1475 }
1476 VEC_free (eh_region, heap, catch_list);
1477 gcc_assert (ret);
1478 return ret;
1479 }
1480
1481 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1482
1483 static void
1484 push_reachable_handler (struct eh_region_d *region, void *data)
1485 {
1486 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1487 VEC_safe_push (eh_region, heap, *trace, region);
1488 }
1489
1490 /* Redirect EH edge E that to NEW_DEST_LABEL.
1491 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1492 foreach_reachable_handler. */
1493
1494 struct eh_region_d *
1495 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1496 bool inlinable_call, int region_number)
1497 {
1498 struct eh_region_d *outer;
1499 struct eh_region_d *region;
1500 VEC (eh_region, heap) * trace = NULL;
1501 int i;
1502 int start_here = -1;
1503 basic_block old_bb = e->dest;
1504 struct eh_region_d *old, *r = NULL;
1505 bool update_inplace = true;
1506 edge_iterator ei;
1507 edge e2;
1508
1509 /* If there is only one EH edge, we don't need to duplicate;
1510 just update labels in the tree. */
1511 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1512 if ((e2->flags & EDGE_EH) && e2 != e)
1513 {
1514 update_inplace = false;
1515 break;
1516 }
1517
1518 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1519 gcc_assert (region);
1520
1521 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1522 push_reachable_handler, &trace);
1523 if (dump_file && (dump_flags & TDF_DETAILS))
1524 {
1525 dump_eh_tree (dump_file, cfun);
1526 fprintf (dump_file, "Trace: ");
1527 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1528 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1529 fprintf (dump_file, " inplace: %i\n", update_inplace);
1530 }
1531
1532 if (update_inplace)
1533 {
1534 /* In easy route just walk trace and update all occurences of the label. */
1535 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1536 {
1537 r = VEC_index (eh_region, trace, i);
1538 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1539 {
1540 r->tree_label = new_dest_label;
1541 if (dump_file && (dump_flags & TDF_DETAILS))
1542 fprintf (dump_file, "Updating label for region %i\n",
1543 r->region_number);
1544 }
1545 }
1546 r = region;
1547 }
1548 else
1549 {
1550 /* Now look for outermost handler that reffers to the basic block in question.
1551 We start our duplication there. */
1552 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1553 {
1554 r = VEC_index (eh_region, trace, i);
1555 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1556 start_here = i;
1557 }
1558 outer = VEC_index (eh_region, trace, start_here)->outer;
1559 gcc_assert (start_here >= 0);
1560
1561 /* And now do the dirty job! */
1562 for (i = start_here; i >= 0; i--)
1563 {
1564 old = VEC_index (eh_region, trace, i);
1565 gcc_assert (!outer || old->outer != outer->outer);
1566
1567 /* Copy region and update label. */
1568 r = copy_eh_region (old, outer);
1569 VEC_replace (eh_region, trace, i, r);
1570 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1571 {
1572 r->tree_label = new_dest_label;
1573 if (dump_file && (dump_flags & TDF_DETAILS))
1574 fprintf (dump_file, "Updating label for region %i\n",
1575 r->region_number);
1576 }
1577
1578 /* We got into copying CATCH. copy_eh_region already did job
1579 of copying all catch blocks corresponding to the try. Now
1580 we need to update labels in all of them and see trace.
1581
1582 We continue nesting into TRY region corresponding to CATCH:
1583 When duplicating EH tree contaiing subregions of CATCH,
1584 the CATCH region itself is never inserted to trace so we
1585 never get here anyway. */
1586 if (r->type == ERT_CATCH)
1587 {
1588 /* Walk other catch regions we copied and update labels as needed. */
1589 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1590 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1591 {
1592 r->tree_label = new_dest_label;
1593 if (dump_file && (dump_flags & TDF_DETAILS))
1594 fprintf (dump_file, "Updating label for region %i\n",
1595 r->region_number);
1596 }
1597 gcc_assert (r->type == ERT_TRY);
1598
1599 /* Skip sibling catch regions from the trace.
1600 They are already updated. */
1601 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1602 {
1603 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1604 i--;
1605 }
1606 }
1607
1608 outer = r;
1609 }
1610
1611 if (is_resx || region->type == ERT_THROW)
1612 r = copy_eh_region (region, outer);
1613 }
1614
1615 VEC_free (eh_region, heap, trace);
1616 if (dump_file && (dump_flags & TDF_DETAILS))
1617 {
1618 dump_eh_tree (dump_file, cfun);
1619 fprintf (dump_file, "New region: %i\n", r->region_number);
1620 }
1621 return r;
1622 }
1623
1624 /* Return region number of region that is outer to both if REGION_A and
1625 REGION_B in IFUN. */
1626
1627 int
1628 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1629 {
1630 struct eh_region_d *rp_a, *rp_b;
1631 sbitmap b_outer;
1632
1633 gcc_assert (ifun->eh->last_region_number > 0);
1634 gcc_assert (ifun->eh->region_tree);
1635
1636 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1637 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1638 gcc_assert (rp_a != NULL);
1639 gcc_assert (rp_b != NULL);
1640
1641 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1642 sbitmap_zero (b_outer);
1643
1644 do
1645 {
1646 SET_BIT (b_outer, rp_b->region_number);
1647 rp_b = rp_b->outer;
1648 }
1649 while (rp_b);
1650
1651 do
1652 {
1653 if (TEST_BIT (b_outer, rp_a->region_number))
1654 {
1655 sbitmap_free (b_outer);
1656 return rp_a->region_number;
1657 }
1658 rp_a = rp_a->outer;
1659 }
1660 while (rp_a);
1661
1662 sbitmap_free (b_outer);
1663 return -1;
1664 }
1665 \f
1666 static int
1667 t2r_eq (const void *pentry, const void *pdata)
1668 {
1669 const_tree const entry = (const_tree) pentry;
1670 const_tree const data = (const_tree) pdata;
1671
1672 return TREE_PURPOSE (entry) == data;
1673 }
1674
1675 static hashval_t
1676 t2r_hash (const void *pentry)
1677 {
1678 const_tree const entry = (const_tree) pentry;
1679 return TREE_HASH (TREE_PURPOSE (entry));
1680 }
1681
1682 void
1683 add_type_for_runtime (tree type)
1684 {
1685 tree *slot;
1686
1687 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1688 TREE_HASH (type), INSERT);
1689 if (*slot == NULL)
1690 {
1691 tree runtime = (*lang_eh_runtime_type) (type);
1692 *slot = tree_cons (type, runtime, NULL_TREE);
1693 }
1694 }
1695
1696 tree
1697 lookup_type_for_runtime (tree type)
1698 {
1699 tree *slot;
1700
1701 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1702 TREE_HASH (type), NO_INSERT);
1703
1704 /* We should have always inserted the data earlier. */
1705 return TREE_VALUE (*slot);
1706 }
1707
1708 \f
1709 /* Represent an entry in @TTypes for either catch actions
1710 or exception filter actions. */
1711 struct GTY(()) ttypes_filter {
1712 tree t;
1713 int filter;
1714 };
1715
1716 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1717 (a tree) for a @TTypes type node we are thinking about adding. */
1718
1719 static int
1720 ttypes_filter_eq (const void *pentry, const void *pdata)
1721 {
1722 const struct ttypes_filter *const entry
1723 = (const struct ttypes_filter *) pentry;
1724 const_tree const data = (const_tree) pdata;
1725
1726 return entry->t == data;
1727 }
1728
1729 static hashval_t
1730 ttypes_filter_hash (const void *pentry)
1731 {
1732 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1733 return TREE_HASH (entry->t);
1734 }
1735
1736 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1737 exception specification list we are thinking about adding. */
1738 /* ??? Currently we use the type lists in the order given. Someone
1739 should put these in some canonical order. */
1740
1741 static int
1742 ehspec_filter_eq (const void *pentry, const void *pdata)
1743 {
1744 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1745 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1746
1747 return type_list_equal (entry->t, data->t);
1748 }
1749
1750 /* Hash function for exception specification lists. */
1751
1752 static hashval_t
1753 ehspec_filter_hash (const void *pentry)
1754 {
1755 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1756 hashval_t h = 0;
1757 tree list;
1758
1759 for (list = entry->t; list ; list = TREE_CHAIN (list))
1760 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1761 return h;
1762 }
1763
1764 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1765 to speed up the search. Return the filter value to be used. */
1766
1767 static int
1768 add_ttypes_entry (htab_t ttypes_hash, tree type)
1769 {
1770 struct ttypes_filter **slot, *n;
1771
1772 slot = (struct ttypes_filter **)
1773 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1774
1775 if ((n = *slot) == NULL)
1776 {
1777 /* Filter value is a 1 based table index. */
1778
1779 n = XNEW (struct ttypes_filter);
1780 n->t = type;
1781 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1782 *slot = n;
1783
1784 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1785 }
1786
1787 return n->filter;
1788 }
1789
1790 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1791 to speed up the search. Return the filter value to be used. */
1792
1793 static int
1794 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1795 {
1796 struct ttypes_filter **slot, *n;
1797 struct ttypes_filter dummy;
1798
1799 dummy.t = list;
1800 slot = (struct ttypes_filter **)
1801 htab_find_slot (ehspec_hash, &dummy, INSERT);
1802
1803 if ((n = *slot) == NULL)
1804 {
1805 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1806
1807 n = XNEW (struct ttypes_filter);
1808 n->t = list;
1809 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1810 *slot = n;
1811
1812 /* Generate a 0 terminated list of filter values. */
1813 for (; list ; list = TREE_CHAIN (list))
1814 {
1815 if (targetm.arm_eabi_unwinder)
1816 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1817 else
1818 {
1819 /* Look up each type in the list and encode its filter
1820 value as a uleb128. */
1821 push_uleb128 (&crtl->eh.ehspec_data,
1822 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1823 }
1824 }
1825 if (targetm.arm_eabi_unwinder)
1826 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1827 else
1828 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1829 }
1830
1831 return n->filter;
1832 }
1833
1834 /* Generate the action filter values to be used for CATCH and
1835 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1836 we use lots of landing pads, and so every type or list can share
1837 the same filter value, which saves table space. */
1838
1839 static void
1840 assign_filter_values (void)
1841 {
1842 int i;
1843 htab_t ttypes, ehspec;
1844
1845 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1846 if (targetm.arm_eabi_unwinder)
1847 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1848 else
1849 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1850
1851 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1852 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1853
1854 for (i = cfun->eh->last_region_number; i > 0; --i)
1855 {
1856 struct eh_region_d *r;
1857
1858 r = VEC_index (eh_region, cfun->eh->region_array, i);
1859
1860 /* Mind we don't process a region more than once. */
1861 if (!r || r->region_number != i)
1862 continue;
1863
1864 switch (r->type)
1865 {
1866 case ERT_CATCH:
1867 /* Whatever type_list is (NULL or true list), we build a list
1868 of filters for the region. */
1869 r->u.eh_catch.filter_list = NULL_TREE;
1870
1871 if (r->u.eh_catch.type_list != NULL)
1872 {
1873 /* Get a filter value for each of the types caught and store
1874 them in the region's dedicated list. */
1875 tree tp_node = r->u.eh_catch.type_list;
1876
1877 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1878 {
1879 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1880 tree flt_node = build_int_cst (NULL_TREE, flt);
1881
1882 r->u.eh_catch.filter_list
1883 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1884 }
1885 }
1886 else
1887 {
1888 /* Get a filter value for the NULL list also since it will need
1889 an action record anyway. */
1890 int flt = add_ttypes_entry (ttypes, NULL);
1891 tree flt_node = build_int_cst (NULL_TREE, flt);
1892
1893 r->u.eh_catch.filter_list
1894 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1895 }
1896
1897 break;
1898
1899 case ERT_ALLOWED_EXCEPTIONS:
1900 r->u.allowed.filter
1901 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1902 break;
1903
1904 default:
1905 break;
1906 }
1907 }
1908
1909 htab_delete (ttypes);
1910 htab_delete (ehspec);
1911 }
1912
1913 /* Emit SEQ into basic block just before INSN (that is assumed to be
1914 first instruction of some existing BB and return the newly
1915 produced block. */
1916 static basic_block
1917 emit_to_new_bb_before (rtx seq, rtx insn)
1918 {
1919 rtx last;
1920 basic_block bb;
1921 edge e;
1922 edge_iterator ei;
1923
1924 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1925 call), we don't want it to go into newly created landing pad or other EH
1926 construct. */
1927 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1928 if (e->flags & EDGE_FALLTHRU)
1929 force_nonfallthru (e);
1930 else
1931 ei_next (&ei);
1932 last = emit_insn_before (seq, insn);
1933 if (BARRIER_P (last))
1934 last = PREV_INSN (last);
1935 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1936 update_bb_for_insn (bb);
1937 bb->flags |= BB_SUPERBLOCK;
1938 return bb;
1939 }
1940
1941 /* Generate the code to actually handle exceptions, which will follow the
1942 landing pads. */
1943
1944 static void
1945 build_post_landing_pads (void)
1946 {
1947 int i;
1948
1949 for (i = cfun->eh->last_region_number; i > 0; --i)
1950 {
1951 struct eh_region_d *region;
1952 rtx seq;
1953
1954 region = VEC_index (eh_region, cfun->eh->region_array, i);
1955 /* Mind we don't process a region more than once. */
1956 if (!region || region->region_number != i)
1957 continue;
1958
1959 switch (region->type)
1960 {
1961 case ERT_TRY:
1962 /* It is possible that TRY region is kept alive only because some of
1963 contained catch region still have RESX instruction but they are
1964 reached via their copies. In this case we need to do nothing. */
1965 if (!region->u.eh_try.eh_catch->label)
1966 break;
1967
1968 /* ??? Collect the set of all non-overlapping catch handlers
1969 all the way up the chain until blocked by a cleanup. */
1970 /* ??? Outer try regions can share landing pads with inner
1971 try regions if the types are completely non-overlapping,
1972 and there are no intervening cleanups. */
1973
1974 region->post_landing_pad = gen_label_rtx ();
1975
1976 start_sequence ();
1977
1978 emit_label (region->post_landing_pad);
1979
1980 /* ??? It is mighty inconvenient to call back into the
1981 switch statement generation code in expand_end_case.
1982 Rapid prototyping sez a sequence of ifs. */
1983 {
1984 struct eh_region_d *c;
1985 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1986 {
1987 if (c->u.eh_catch.type_list == NULL)
1988 emit_jump (c->label);
1989 else
1990 {
1991 /* Need for one cmp/jump per type caught. Each type
1992 list entry has a matching entry in the filter list
1993 (see assign_filter_values). */
1994 tree tp_node = c->u.eh_catch.type_list;
1995 tree flt_node = c->u.eh_catch.filter_list;
1996
1997 for (; tp_node; )
1998 {
1999 emit_cmp_and_jump_insns
2000 (crtl->eh.filter,
2001 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
2002 EQ, NULL_RTX,
2003 targetm.eh_return_filter_mode (), 0, c->label);
2004
2005 tp_node = TREE_CHAIN (tp_node);
2006 flt_node = TREE_CHAIN (flt_node);
2007 }
2008 }
2009 }
2010 }
2011
2012 /* We delay the generation of the _Unwind_Resume until we generate
2013 landing pads. We emit a marker here so as to get good control
2014 flow data in the meantime. */
2015 region->resume
2016 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2017 emit_barrier ();
2018
2019 seq = get_insns ();
2020 end_sequence ();
2021
2022 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
2023
2024 break;
2025
2026 case ERT_ALLOWED_EXCEPTIONS:
2027 if (!region->label)
2028 break;
2029 region->post_landing_pad = gen_label_rtx ();
2030
2031 start_sequence ();
2032
2033 emit_label (region->post_landing_pad);
2034
2035 emit_cmp_and_jump_insns (crtl->eh.filter,
2036 GEN_INT (region->u.allowed.filter),
2037 EQ, NULL_RTX,
2038 targetm.eh_return_filter_mode (), 0, region->label);
2039
2040 /* We delay the generation of the _Unwind_Resume until we generate
2041 landing pads. We emit a marker here so as to get good control
2042 flow data in the meantime. */
2043 region->resume
2044 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2045 emit_barrier ();
2046
2047 seq = get_insns ();
2048 end_sequence ();
2049
2050 emit_to_new_bb_before (seq, region->label);
2051 break;
2052
2053 case ERT_CLEANUP:
2054 case ERT_MUST_NOT_THROW:
2055 region->post_landing_pad = region->label;
2056 break;
2057
2058 case ERT_CATCH:
2059 case ERT_THROW:
2060 /* Nothing to do. */
2061 break;
2062
2063 default:
2064 gcc_unreachable ();
2065 }
2066 }
2067 }
2068
2069 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2070 _Unwind_Resume otherwise. */
2071
2072 static void
2073 connect_post_landing_pads (void)
2074 {
2075 int i;
2076
2077 for (i = cfun->eh->last_region_number; i > 0; --i)
2078 {
2079 struct eh_region_d *region;
2080 struct eh_region_d *outer;
2081 rtx seq;
2082 rtx barrier;
2083
2084 region = VEC_index (eh_region, cfun->eh->region_array, i);
2085 /* Mind we don't process a region more than once. */
2086 if (!region || region->region_number != i)
2087 continue;
2088
2089 /* If there is no RESX, or it has been deleted by flow, there's
2090 nothing to fix up. */
2091 if (! region->resume || INSN_DELETED_P (region->resume))
2092 continue;
2093
2094 /* Search for another landing pad in this function. */
2095 for (outer = region->outer; outer ; outer = outer->outer)
2096 if (outer->post_landing_pad)
2097 break;
2098
2099 start_sequence ();
2100
2101 if (outer)
2102 {
2103 edge e;
2104 basic_block src, dest;
2105
2106 emit_jump (outer->post_landing_pad);
2107 src = BLOCK_FOR_INSN (region->resume);
2108 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2109 while (EDGE_COUNT (src->succs) > 0)
2110 remove_edge (EDGE_SUCC (src, 0));
2111 e = make_edge (src, dest, 0);
2112 e->probability = REG_BR_PROB_BASE;
2113 e->count = src->count;
2114 }
2115 else
2116 {
2117 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2118 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2119
2120 /* What we just emitted was a throwing libcall, so it got a
2121 barrier automatically added after it. If the last insn in
2122 the libcall sequence isn't the barrier, it's because the
2123 target emits multiple insns for a call, and there are insns
2124 after the actual call insn (which are redundant and would be
2125 optimized away). The barrier is inserted exactly after the
2126 call insn, so let's go get that and delete the insns after
2127 it, because below we need the barrier to be the last insn in
2128 the sequence. */
2129 delete_insns_since (NEXT_INSN (last_call_insn ()));
2130 }
2131
2132 seq = get_insns ();
2133 end_sequence ();
2134 barrier = emit_insn_before (seq, region->resume);
2135 /* Avoid duplicate barrier. */
2136 gcc_assert (BARRIER_P (barrier));
2137 delete_insn (barrier);
2138 delete_insn (region->resume);
2139
2140 /* ??? From tree-ssa we can wind up with catch regions whose
2141 label is not instantiated, but whose resx is present. Now
2142 that we've dealt with the resx, kill the region. */
2143 if (region->label == NULL && region->type == ERT_CLEANUP)
2144 remove_eh_handler (region);
2145 }
2146 }
2147
2148 \f
2149 static void
2150 dw2_build_landing_pads (void)
2151 {
2152 int i;
2153
2154 for (i = cfun->eh->last_region_number; i > 0; --i)
2155 {
2156 struct eh_region_d *region;
2157 rtx seq;
2158 basic_block bb;
2159 edge e;
2160
2161 region = VEC_index (eh_region, cfun->eh->region_array, i);
2162 /* Mind we don't process a region more than once. */
2163 if (!region || region->region_number != i)
2164 continue;
2165
2166 if (region->type != ERT_CLEANUP
2167 && region->type != ERT_TRY
2168 && region->type != ERT_ALLOWED_EXCEPTIONS)
2169 continue;
2170
2171 if (!region->post_landing_pad)
2172 continue;
2173
2174 start_sequence ();
2175
2176 region->landing_pad = gen_label_rtx ();
2177 emit_label (region->landing_pad);
2178
2179 #ifdef HAVE_exception_receiver
2180 if (HAVE_exception_receiver)
2181 emit_insn (gen_exception_receiver ());
2182 else
2183 #endif
2184 #ifdef HAVE_nonlocal_goto_receiver
2185 if (HAVE_nonlocal_goto_receiver)
2186 emit_insn (gen_nonlocal_goto_receiver ());
2187 else
2188 #endif
2189 { /* Nothing */ }
2190
2191 emit_move_insn (crtl->eh.exc_ptr,
2192 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2193 emit_move_insn (crtl->eh.filter,
2194 gen_rtx_REG (targetm.eh_return_filter_mode (),
2195 EH_RETURN_DATA_REGNO (1)));
2196
2197 seq = get_insns ();
2198 end_sequence ();
2199
2200 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2201 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2202 e->count = bb->count;
2203 e->probability = REG_BR_PROB_BASE;
2204 }
2205 }
2206
2207 \f
2208 struct sjlj_lp_info
2209 {
2210 int directly_reachable;
2211 int action_index;
2212 int dispatch_index;
2213 int call_site_index;
2214 };
2215
2216 static bool
2217 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2218 {
2219 rtx insn;
2220 bool found_one = false;
2221
2222 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2223 {
2224 struct eh_region_d *region;
2225 enum reachable_code rc;
2226 tree type_thrown;
2227 rtx note;
2228
2229 if (! INSN_P (insn))
2230 continue;
2231
2232 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2233 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2234 continue;
2235
2236 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2237 if (!region)
2238 continue;
2239
2240 type_thrown = NULL_TREE;
2241 if (region->type == ERT_THROW)
2242 {
2243 type_thrown = region->u.eh_throw.type;
2244 region = region->outer;
2245 }
2246
2247 /* Find the first containing region that might handle the exception.
2248 That's the landing pad to which we will transfer control. */
2249 rc = RNL_NOT_CAUGHT;
2250 for (; region; region = region->outer)
2251 {
2252 rc = reachable_next_level (region, type_thrown, NULL, false);
2253 if (rc != RNL_NOT_CAUGHT)
2254 break;
2255 }
2256 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2257 {
2258 lp_info[region->region_number].directly_reachable = 1;
2259 found_one = true;
2260 }
2261 }
2262
2263 return found_one;
2264 }
2265
2266 static void
2267 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2268 {
2269 htab_t ar_hash;
2270 int i, index;
2271
2272 /* First task: build the action table. */
2273
2274 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2275 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2276
2277 for (i = cfun->eh->last_region_number; i > 0; --i)
2278 if (lp_info[i].directly_reachable)
2279 {
2280 struct eh_region_d *r =
2281 VEC_index (eh_region, cfun->eh->region_array, i);
2282
2283 r->landing_pad = dispatch_label;
2284 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2285 if (lp_info[i].action_index != -1)
2286 crtl->uses_eh_lsda = 1;
2287 }
2288
2289 htab_delete (ar_hash);
2290
2291 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2292 landing pad label for the region. For sjlj though, there is one
2293 common landing pad from which we dispatch to the post-landing pads.
2294
2295 A region receives a dispatch index if it is directly reachable
2296 and requires in-function processing. Regions that share post-landing
2297 pads may share dispatch indices. */
2298 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2299 (see build_post_landing_pads) so we don't bother checking for it. */
2300
2301 index = 0;
2302 for (i = cfun->eh->last_region_number; i > 0; --i)
2303 if (lp_info[i].directly_reachable)
2304 lp_info[i].dispatch_index = index++;
2305
2306 /* Finally: assign call-site values. If dwarf2 terms, this would be
2307 the region number assigned by convert_to_eh_region_ranges, but
2308 handles no-action and must-not-throw differently. */
2309
2310 call_site_base = 1;
2311 for (i = cfun->eh->last_region_number; i > 0; --i)
2312 if (lp_info[i].directly_reachable)
2313 {
2314 int action = lp_info[i].action_index;
2315
2316 /* Map must-not-throw to otherwise unused call-site index 0. */
2317 if (action == -2)
2318 index = 0;
2319 /* Map no-action to otherwise unused call-site index -1. */
2320 else if (action == -1)
2321 index = -1;
2322 /* Otherwise, look it up in the table. */
2323 else
2324 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2325
2326 lp_info[i].call_site_index = index;
2327 }
2328 }
2329
2330 static void
2331 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2332 {
2333 int last_call_site = -2;
2334 rtx insn, mem;
2335
2336 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2337 {
2338 struct eh_region_d *region;
2339 int this_call_site;
2340 rtx note, before, p;
2341
2342 /* Reset value tracking at extended basic block boundaries. */
2343 if (LABEL_P (insn))
2344 last_call_site = -2;
2345
2346 if (! INSN_P (insn))
2347 continue;
2348
2349 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2350
2351 /* Calls that are known to not throw need not be marked. */
2352 if (note && INTVAL (XEXP (note, 0)) <= 0)
2353 continue;
2354
2355 if (note)
2356 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2357 else
2358 region = NULL;
2359
2360 if (!region)
2361 {
2362 /* Calls (and trapping insns) without notes are outside any
2363 exception handling region in this function. Mark them as
2364 no action. */
2365 if (CALL_P (insn)
2366 || (flag_non_call_exceptions
2367 && may_trap_p (PATTERN (insn))))
2368 this_call_site = -1;
2369 else
2370 continue;
2371 }
2372 else
2373 this_call_site = lp_info[region->region_number].call_site_index;
2374
2375 if (this_call_site == last_call_site)
2376 continue;
2377
2378 /* Don't separate a call from it's argument loads. */
2379 before = insn;
2380 if (CALL_P (insn))
2381 before = find_first_parameter_load (insn, NULL_RTX);
2382
2383 start_sequence ();
2384 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2385 sjlj_fc_call_site_ofs);
2386 emit_move_insn (mem, GEN_INT (this_call_site));
2387 p = get_insns ();
2388 end_sequence ();
2389
2390 emit_insn_before (p, before);
2391 last_call_site = this_call_site;
2392 }
2393 }
2394
2395 /* Construct the SjLj_Function_Context. */
2396
2397 static void
2398 sjlj_emit_function_enter (rtx dispatch_label)
2399 {
2400 rtx fn_begin, fc, mem, seq;
2401 bool fn_begin_outside_block;
2402
2403 fc = crtl->eh.sjlj_fc;
2404
2405 start_sequence ();
2406
2407 /* We're storing this libcall's address into memory instead of
2408 calling it directly. Thus, we must call assemble_external_libcall
2409 here, as we can not depend on emit_library_call to do it for us. */
2410 assemble_external_libcall (eh_personality_libfunc);
2411 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2412 emit_move_insn (mem, eh_personality_libfunc);
2413
2414 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2415 if (crtl->uses_eh_lsda)
2416 {
2417 char buf[20];
2418 rtx sym;
2419
2420 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2421 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2422 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2423 emit_move_insn (mem, sym);
2424 }
2425 else
2426 emit_move_insn (mem, const0_rtx);
2427
2428 #ifdef DONT_USE_BUILTIN_SETJMP
2429 {
2430 rtx x;
2431 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2432 TYPE_MODE (integer_type_node), 1,
2433 plus_constant (XEXP (fc, 0),
2434 sjlj_fc_jbuf_ofs), Pmode);
2435
2436 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2437 TYPE_MODE (integer_type_node), 0, dispatch_label);
2438 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2439 }
2440 #else
2441 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2442 dispatch_label);
2443 #endif
2444
2445 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2446 1, XEXP (fc, 0), Pmode);
2447
2448 seq = get_insns ();
2449 end_sequence ();
2450
2451 /* ??? Instead of doing this at the beginning of the function,
2452 do this in a block that is at loop level 0 and dominates all
2453 can_throw_internal instructions. */
2454
2455 fn_begin_outside_block = true;
2456 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2457 if (NOTE_P (fn_begin))
2458 {
2459 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2460 break;
2461 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2462 fn_begin_outside_block = false;
2463 }
2464
2465 if (fn_begin_outside_block)
2466 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2467 else
2468 emit_insn_after (seq, fn_begin);
2469 }
2470
2471 /* Call back from expand_function_end to know where we should put
2472 the call to unwind_sjlj_unregister_libfunc if needed. */
2473
2474 void
2475 sjlj_emit_function_exit_after (rtx after)
2476 {
2477 crtl->eh.sjlj_exit_after = after;
2478 }
2479
2480 static void
2481 sjlj_emit_function_exit (void)
2482 {
2483 rtx seq, insn;
2484
2485 start_sequence ();
2486
2487 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2488 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2489
2490 seq = get_insns ();
2491 end_sequence ();
2492
2493 /* ??? Really this can be done in any block at loop level 0 that
2494 post-dominates all can_throw_internal instructions. This is
2495 the last possible moment. */
2496
2497 insn = crtl->eh.sjlj_exit_after;
2498 if (LABEL_P (insn))
2499 insn = NEXT_INSN (insn);
2500
2501 emit_insn_after (seq, insn);
2502 }
2503
2504 static void
2505 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2506 {
2507 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2508 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2509 int i, first_reachable;
2510 rtx mem, dispatch, seq, fc;
2511 rtx before;
2512 basic_block bb;
2513 edge e;
2514
2515 fc = crtl->eh.sjlj_fc;
2516
2517 start_sequence ();
2518
2519 emit_label (dispatch_label);
2520
2521 #ifndef DONT_USE_BUILTIN_SETJMP
2522 expand_builtin_setjmp_receiver (dispatch_label);
2523 #endif
2524
2525 /* Load up dispatch index, exc_ptr and filter values from the
2526 function context. */
2527 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2528 sjlj_fc_call_site_ofs);
2529 dispatch = copy_to_reg (mem);
2530
2531 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2532 if (unwind_word_mode != ptr_mode)
2533 {
2534 #ifdef POINTERS_EXTEND_UNSIGNED
2535 mem = convert_memory_address (ptr_mode, mem);
2536 #else
2537 mem = convert_to_mode (ptr_mode, mem, 0);
2538 #endif
2539 }
2540 emit_move_insn (crtl->eh.exc_ptr, mem);
2541
2542 mem = adjust_address (fc, unwind_word_mode,
2543 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2544 if (unwind_word_mode != filter_mode)
2545 mem = convert_to_mode (filter_mode, mem, 0);
2546 emit_move_insn (crtl->eh.filter, mem);
2547
2548 /* Jump to one of the directly reachable regions. */
2549 /* ??? This really ought to be using a switch statement. */
2550
2551 first_reachable = 0;
2552 for (i = cfun->eh->last_region_number; i > 0; --i)
2553 {
2554 if (! lp_info[i].directly_reachable)
2555 continue;
2556
2557 if (! first_reachable)
2558 {
2559 first_reachable = i;
2560 continue;
2561 }
2562
2563 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2564 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2565 (((struct eh_region_d *)
2566 VEC_index (eh_region,
2567 cfun->eh->region_array, i))
2568 ->post_landing_pad));
2569 }
2570
2571 seq = get_insns ();
2572 end_sequence ();
2573
2574 before = (((struct eh_region_d *)
2575 VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2576 ->post_landing_pad);
2577
2578 bb = emit_to_new_bb_before (seq, before);
2579 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2580 e->count = bb->count;
2581 e->probability = REG_BR_PROB_BASE;
2582 }
2583
2584 static void
2585 sjlj_build_landing_pads (void)
2586 {
2587 struct sjlj_lp_info *lp_info;
2588
2589 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2590
2591 if (sjlj_find_directly_reachable_regions (lp_info))
2592 {
2593 rtx dispatch_label = gen_label_rtx ();
2594 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2595 TYPE_MODE (sjlj_fc_type_node),
2596 TYPE_ALIGN (sjlj_fc_type_node));
2597 crtl->eh.sjlj_fc
2598 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2599 int_size_in_bytes (sjlj_fc_type_node),
2600 align);
2601
2602 sjlj_assign_call_site_values (dispatch_label, lp_info);
2603 sjlj_mark_call_sites (lp_info);
2604
2605 sjlj_emit_function_enter (dispatch_label);
2606 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2607 sjlj_emit_function_exit ();
2608 }
2609
2610 free (lp_info);
2611 }
2612
2613 /* After initial rtl generation, call back to finish generating
2614 exception support code. */
2615
2616 static void
2617 finish_eh_generation (void)
2618 {
2619 basic_block bb;
2620
2621 /* Nothing to do if no regions created. */
2622 if (cfun->eh->region_tree == NULL)
2623 return;
2624
2625 /* The object here is to provide detailed information (via
2626 reachable_handlers) on how exception control flows within the
2627 function for the CFG construction. In this first pass, we can
2628 include type information garnered from ERT_THROW and
2629 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2630 in deleting unreachable handlers. Subsequently, we will generate
2631 landing pads which will connect many of the handlers, and then
2632 type information will not be effective. Still, this is a win
2633 over previous implementations. */
2634
2635 /* These registers are used by the landing pads. Make sure they
2636 have been generated. */
2637 get_exception_pointer ();
2638 get_exception_filter ();
2639
2640 /* Construct the landing pads. */
2641
2642 assign_filter_values ();
2643 build_post_landing_pads ();
2644 connect_post_landing_pads ();
2645 if (USING_SJLJ_EXCEPTIONS)
2646 sjlj_build_landing_pads ();
2647 else
2648 dw2_build_landing_pads ();
2649
2650 crtl->eh.built_landing_pads = 1;
2651
2652 /* We've totally changed the CFG. Start over. */
2653 find_exception_handler_labels ();
2654 break_superblocks ();
2655 if (USING_SJLJ_EXCEPTIONS
2656 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2657 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2658 commit_edge_insertions ();
2659 FOR_EACH_BB (bb)
2660 {
2661 edge e;
2662 edge_iterator ei;
2663 bool eh = false;
2664 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2665 {
2666 if (e->flags & EDGE_EH)
2667 {
2668 remove_edge (e);
2669 eh = true;
2670 }
2671 else
2672 ei_next (&ei);
2673 }
2674 if (eh)
2675 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2676 }
2677 }
2678 \f
2679 /* This section handles removing dead code for flow. */
2680
2681 /* Splice REGION from the region tree and replace it by REPLACE etc.
2682 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2683 region.*/
2684
2685 static void
2686 remove_eh_handler_and_replace (struct eh_region_d *region,
2687 struct eh_region_d *replace,
2688 bool update_catch_try)
2689 {
2690 struct eh_region_d **pp, **pp_start, *p, *outer, *inner;
2691 rtx lab;
2692
2693 outer = region->outer;
2694
2695 /* For the benefit of efficiently handling REG_EH_REGION notes,
2696 replace this region in the region array with its containing
2697 region. Note that previous region deletions may result in
2698 multiple copies of this region in the array, so we have a
2699 list of alternate numbers by which we are known. */
2700
2701 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2702 replace);
2703 if (region->aka)
2704 {
2705 unsigned i;
2706 bitmap_iterator bi;
2707
2708 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2709 {
2710 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2711 }
2712 }
2713
2714 if (replace)
2715 {
2716 if (!replace->aka)
2717 replace->aka = BITMAP_GGC_ALLOC ();
2718 if (region->aka)
2719 bitmap_ior_into (replace->aka, region->aka);
2720 bitmap_set_bit (replace->aka, region->region_number);
2721 }
2722
2723 if (crtl->eh.built_landing_pads)
2724 lab = region->landing_pad;
2725 else
2726 lab = region->label;
2727 if (outer)
2728 pp_start = &outer->inner;
2729 else
2730 pp_start = &cfun->eh->region_tree;
2731 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2732 continue;
2733 *pp = region->next_peer;
2734
2735 if (replace)
2736 pp_start = &replace->inner;
2737 else
2738 pp_start = &cfun->eh->region_tree;
2739 inner = region->inner;
2740 if (inner)
2741 {
2742 for (p = inner; p->next_peer ; p = p->next_peer)
2743 p->outer = replace;
2744 p->outer = replace;
2745
2746 p->next_peer = *pp_start;
2747 *pp_start = inner;
2748 }
2749
2750 if (region->type == ERT_CATCH
2751 && update_catch_try)
2752 {
2753 struct eh_region_d *eh_try, *next, *prev;
2754
2755 for (eh_try = region->next_peer;
2756 eh_try->type == ERT_CATCH;
2757 eh_try = eh_try->next_peer)
2758 continue;
2759 gcc_assert (eh_try->type == ERT_TRY);
2760
2761 next = region->u.eh_catch.next_catch;
2762 prev = region->u.eh_catch.prev_catch;
2763
2764 if (next)
2765 next->u.eh_catch.prev_catch = prev;
2766 else
2767 eh_try->u.eh_try.last_catch = prev;
2768 if (prev)
2769 prev->u.eh_catch.next_catch = next;
2770 else
2771 {
2772 eh_try->u.eh_try.eh_catch = next;
2773 if (! next)
2774 remove_eh_handler (eh_try);
2775 }
2776 }
2777 }
2778
2779 /* Splice REGION from the region tree and replace it by the outer region
2780 etc. */
2781
2782 static void
2783 remove_eh_handler (struct eh_region_d *region)
2784 {
2785 remove_eh_handler_and_replace (region, region->outer, true);
2786 }
2787
2788 /* Remove Eh region R that has turned out to have no code in its handler. */
2789
2790 void
2791 remove_eh_region (int r)
2792 {
2793 struct eh_region_d *region;
2794
2795 region = VEC_index (eh_region, cfun->eh->region_array, r);
2796 remove_eh_handler (region);
2797 }
2798
2799 /* Remove Eh region R that has turned out to have no code in its handler
2800 and replace in by R2. */
2801
2802 void
2803 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2804 {
2805 struct eh_region_d *region, *region2;
2806
2807 region = VEC_index (eh_region, cfun->eh->region_array, r);
2808 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2809 remove_eh_handler_and_replace (region, region2->outer, true);
2810 }
2811
2812 /* Invokes CALLBACK for every exception handler label. Only used by old
2813 loop hackery; should not be used by new code. */
2814
2815 void
2816 for_each_eh_label (void (*callback) (rtx))
2817 {
2818 int i;
2819 for (i = 0; i < cfun->eh->last_region_number; i++)
2820 {
2821 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
2822 if (r && r->region_number == i && r->label
2823 && LABEL_P (r->label))
2824 (*callback) (r->label);
2825 }
2826 }
2827
2828 /* Invoke CALLBACK for every exception region in the current function. */
2829
2830 void
2831 for_each_eh_region (void (*callback) (struct eh_region_d *))
2832 {
2833 int i, n = cfun->eh->last_region_number;
2834 for (i = 1; i <= n; ++i)
2835 {
2836 struct eh_region_d *region;
2837
2838 region = VEC_index (eh_region, cfun->eh->region_array, i);
2839 if (region)
2840 (*callback) (region);
2841 }
2842 }
2843 \f
2844 /* This section describes CFG exception edges for flow. */
2845
2846 /* For communicating between calls to reachable_next_level. */
2847 struct reachable_info
2848 {
2849 tree types_caught;
2850 tree types_allowed;
2851 void (*callback) (struct eh_region_d *, void *);
2852 void *callback_data;
2853 };
2854
2855 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2856 base class of TYPE, is in HANDLED. */
2857
2858 static int
2859 check_handled (tree handled, tree type)
2860 {
2861 tree t;
2862
2863 /* We can check for exact matches without front-end help. */
2864 if (! lang_eh_type_covers)
2865 {
2866 for (t = handled; t ; t = TREE_CHAIN (t))
2867 if (TREE_VALUE (t) == type)
2868 return 1;
2869 }
2870 else
2871 {
2872 for (t = handled; t ; t = TREE_CHAIN (t))
2873 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2874 return 1;
2875 }
2876
2877 return 0;
2878 }
2879
2880 /* A subroutine of reachable_next_level. If we are collecting a list
2881 of handlers, add one. After landing pad generation, reference
2882 it instead of the handlers themselves. Further, the handlers are
2883 all wired together, so by referencing one, we've got them all.
2884 Before landing pad generation we reference each handler individually.
2885
2886 LP_REGION contains the landing pad; REGION is the handler. */
2887
2888 static void
2889 add_reachable_handler (struct reachable_info *info,
2890 struct eh_region_d *lp_region,
2891 struct eh_region_d *region)
2892 {
2893 if (! info)
2894 return;
2895
2896 if (crtl->eh.built_landing_pads)
2897 info->callback (lp_region, info->callback_data);
2898 else
2899 info->callback (region, info->callback_data);
2900 }
2901
2902 /* Process one level of exception regions for reachability.
2903 If TYPE_THROWN is non-null, then it is the *exact* type being
2904 propagated. If INFO is non-null, then collect handler labels
2905 and caught/allowed type information between invocations. */
2906
2907 static enum reachable_code
2908 reachable_next_level (struct eh_region_d *region, tree type_thrown,
2909 struct reachable_info *info,
2910 bool maybe_resx)
2911 {
2912 switch (region->type)
2913 {
2914 case ERT_CLEANUP:
2915 /* Before landing-pad generation, we model control flow
2916 directly to the individual handlers. In this way we can
2917 see that catch handler types may shadow one another. */
2918 add_reachable_handler (info, region, region);
2919 return RNL_MAYBE_CAUGHT;
2920
2921 case ERT_TRY:
2922 {
2923 struct eh_region_d *c;
2924 enum reachable_code ret = RNL_NOT_CAUGHT;
2925
2926 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2927 {
2928 /* A catch-all handler ends the search. */
2929 if (c->u.eh_catch.type_list == NULL)
2930 {
2931 add_reachable_handler (info, region, c);
2932 return RNL_CAUGHT;
2933 }
2934
2935 if (type_thrown)
2936 {
2937 /* If we have at least one type match, end the search. */
2938 tree tp_node = c->u.eh_catch.type_list;
2939
2940 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2941 {
2942 tree type = TREE_VALUE (tp_node);
2943
2944 if (type == type_thrown
2945 || (lang_eh_type_covers
2946 && (*lang_eh_type_covers) (type, type_thrown)))
2947 {
2948 add_reachable_handler (info, region, c);
2949 return RNL_CAUGHT;
2950 }
2951 }
2952
2953 /* If we have definitive information of a match failure,
2954 the catch won't trigger. */
2955 if (lang_eh_type_covers)
2956 return RNL_NOT_CAUGHT;
2957 }
2958
2959 /* At this point, we either don't know what type is thrown or
2960 don't have front-end assistance to help deciding if it is
2961 covered by one of the types in the list for this region.
2962
2963 We'd then like to add this region to the list of reachable
2964 handlers since it is indeed potentially reachable based on the
2965 information we have.
2966
2967 Actually, this handler is for sure not reachable if all the
2968 types it matches have already been caught. That is, it is only
2969 potentially reachable if at least one of the types it catches
2970 has not been previously caught. */
2971
2972 if (! info)
2973 ret = RNL_MAYBE_CAUGHT;
2974 else
2975 {
2976 tree tp_node = c->u.eh_catch.type_list;
2977 bool maybe_reachable = false;
2978
2979 /* Compute the potential reachability of this handler and
2980 update the list of types caught at the same time. */
2981 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2982 {
2983 tree type = TREE_VALUE (tp_node);
2984
2985 if (! check_handled (info->types_caught, type))
2986 {
2987 info->types_caught
2988 = tree_cons (NULL, type, info->types_caught);
2989
2990 maybe_reachable = true;
2991 }
2992 }
2993
2994 if (maybe_reachable)
2995 {
2996 add_reachable_handler (info, region, c);
2997
2998 /* ??? If the catch type is a base class of every allowed
2999 type, then we know we can stop the search. */
3000 ret = RNL_MAYBE_CAUGHT;
3001 }
3002 }
3003 }
3004
3005 return ret;
3006 }
3007
3008 case ERT_ALLOWED_EXCEPTIONS:
3009 /* An empty list of types definitely ends the search. */
3010 if (region->u.allowed.type_list == NULL_TREE)
3011 {
3012 add_reachable_handler (info, region, region);
3013 return RNL_CAUGHT;
3014 }
3015
3016 /* Collect a list of lists of allowed types for use in detecting
3017 when a catch may be transformed into a catch-all. */
3018 if (info)
3019 info->types_allowed = tree_cons (NULL_TREE,
3020 region->u.allowed.type_list,
3021 info->types_allowed);
3022
3023 /* If we have definitive information about the type hierarchy,
3024 then we can tell if the thrown type will pass through the
3025 filter. */
3026 if (type_thrown && lang_eh_type_covers)
3027 {
3028 if (check_handled (region->u.allowed.type_list, type_thrown))
3029 return RNL_NOT_CAUGHT;
3030 else
3031 {
3032 add_reachable_handler (info, region, region);
3033 return RNL_CAUGHT;
3034 }
3035 }
3036
3037 add_reachable_handler (info, region, region);
3038 return RNL_MAYBE_CAUGHT;
3039
3040 case ERT_CATCH:
3041 /* Catch regions are handled by their controlling try region. */
3042 return RNL_NOT_CAUGHT;
3043
3044 case ERT_MUST_NOT_THROW:
3045 /* Here we end our search, since no exceptions may propagate.
3046
3047 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3048 only via locally handled RESX instructions.
3049
3050 When we inline a function call, we can bring in new handlers. In order
3051 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3052 assume that such handlers exists prior for any inlinable call prior
3053 inlining decisions are fixed. */
3054
3055 if (maybe_resx)
3056 {
3057 add_reachable_handler (info, region, region);
3058 return RNL_CAUGHT;
3059 }
3060 else
3061 return RNL_BLOCKED;
3062
3063 case ERT_THROW:
3064 case ERT_UNKNOWN:
3065 /* Shouldn't see these here. */
3066 gcc_unreachable ();
3067 break;
3068 default:
3069 gcc_unreachable ();
3070 }
3071 }
3072
3073 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3074
3075 void
3076 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3077 void (*callback) (struct eh_region_d *, void *),
3078 void *callback_data)
3079 {
3080 struct reachable_info info;
3081 struct eh_region_d *region;
3082 tree type_thrown;
3083
3084 memset (&info, 0, sizeof (info));
3085 info.callback = callback;
3086 info.callback_data = callback_data;
3087
3088 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3089 if (!region)
3090 return;
3091
3092 type_thrown = NULL_TREE;
3093 if (is_resx)
3094 {
3095 /* A RESX leaves a region instead of entering it. Thus the
3096 region itself may have been deleted out from under us. */
3097 if (region == NULL)
3098 return;
3099 region = region->outer;
3100 }
3101 else if (region->type == ERT_THROW)
3102 {
3103 type_thrown = region->u.eh_throw.type;
3104 region = region->outer;
3105 }
3106
3107 while (region)
3108 {
3109 if (reachable_next_level (region, type_thrown, &info,
3110 inlinable_call || is_resx) >= RNL_CAUGHT)
3111 break;
3112 /* If we have processed one cleanup, there is no point in
3113 processing any more of them. Each cleanup will have an edge
3114 to the next outer cleanup region, so the flow graph will be
3115 accurate. */
3116 if (region->type == ERT_CLEANUP)
3117 {
3118 enum reachable_code code = RNL_NOT_CAUGHT;
3119 region = find_prev_try (region->outer);
3120 /* Continue looking for outer TRY region until we find one
3121 that might cath something. */
3122 while (region
3123 && (code = reachable_next_level (region, type_thrown, &info,
3124 inlinable_call || is_resx))
3125 == RNL_NOT_CAUGHT)
3126 region = find_prev_try (region->outer);
3127 if (code >= RNL_CAUGHT)
3128 break;
3129 }
3130 if (region)
3131 region = region->outer;
3132 }
3133 }
3134
3135 /* Retrieve a list of labels of exception handlers which can be
3136 reached by a given insn. */
3137
3138 static void
3139 arh_to_landing_pad (struct eh_region_d *region, void *data)
3140 {
3141 rtx *p_handlers = (rtx *) data;
3142 if (! *p_handlers)
3143 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3144 }
3145
3146 static void
3147 arh_to_label (struct eh_region_d *region, void *data)
3148 {
3149 rtx *p_handlers = (rtx *) data;
3150 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3151 }
3152
3153 rtx
3154 reachable_handlers (rtx insn)
3155 {
3156 bool is_resx = false;
3157 rtx handlers = NULL;
3158 int region_number;
3159
3160 if (JUMP_P (insn)
3161 && GET_CODE (PATTERN (insn)) == RESX)
3162 {
3163 region_number = XINT (PATTERN (insn), 0);
3164 is_resx = true;
3165 }
3166 else
3167 {
3168 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3169 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3170 return NULL;
3171 region_number = INTVAL (XEXP (note, 0));
3172 }
3173
3174 foreach_reachable_handler (region_number, is_resx, false,
3175 (crtl->eh.built_landing_pads
3176 ? arh_to_landing_pad
3177 : arh_to_label),
3178 &handlers);
3179
3180 return handlers;
3181 }
3182
3183 /* Determine if the given INSN can throw an exception that is caught
3184 within the function. */
3185
3186 bool
3187 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3188 {
3189 struct eh_region_d *region;
3190 tree type_thrown;
3191
3192 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3193 if (!region)
3194 return false;
3195
3196 type_thrown = NULL_TREE;
3197 if (is_resx)
3198 region = region->outer;
3199 else if (region->type == ERT_THROW)
3200 {
3201 type_thrown = region->u.eh_throw.type;
3202 region = region->outer;
3203 }
3204
3205 /* If this exception is ignored by each and every containing region,
3206 then control passes straight out. The runtime may handle some
3207 regions, which also do not require processing internally. */
3208 for (; region; region = region->outer)
3209 {
3210 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3211 inlinable_call || is_resx);
3212 if (how == RNL_BLOCKED)
3213 return false;
3214 if (how != RNL_NOT_CAUGHT)
3215 return true;
3216 }
3217
3218 return false;
3219 }
3220
3221 bool
3222 can_throw_internal (const_rtx insn)
3223 {
3224 rtx note;
3225
3226 if (! INSN_P (insn))
3227 return false;
3228
3229 if (JUMP_P (insn)
3230 && GET_CODE (PATTERN (insn)) == RESX
3231 && XINT (PATTERN (insn), 0) > 0)
3232 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3233
3234 if (NONJUMP_INSN_P (insn)
3235 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3236 insn = XVECEXP (PATTERN (insn), 0, 0);
3237
3238 /* Every insn that might throw has an EH_REGION note. */
3239 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3240 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3241 return false;
3242
3243 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3244 }
3245
3246 /* Determine if the given INSN can throw an exception that is
3247 visible outside the function. */
3248
3249 bool
3250 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3251 {
3252 struct eh_region_d *region;
3253 tree type_thrown;
3254
3255 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3256 if (!region)
3257 return true;
3258
3259 type_thrown = NULL_TREE;
3260 if (is_resx)
3261 region = region->outer;
3262 else if (region->type == ERT_THROW)
3263 {
3264 type_thrown = region->u.eh_throw.type;
3265 region = region->outer;
3266 }
3267
3268 /* If the exception is caught or blocked by any containing region,
3269 then it is not seen by any calling function. */
3270 for (; region ; region = region->outer)
3271 if (reachable_next_level (region, type_thrown, NULL,
3272 inlinable_call || is_resx) >= RNL_CAUGHT)
3273 return false;
3274
3275 return true;
3276 }
3277
3278 bool
3279 can_throw_external (const_rtx insn)
3280 {
3281 rtx note;
3282
3283 if (! INSN_P (insn))
3284 return false;
3285
3286 if (JUMP_P (insn)
3287 && GET_CODE (PATTERN (insn)) == RESX
3288 && XINT (PATTERN (insn), 0) > 0)
3289 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3290
3291 if (NONJUMP_INSN_P (insn)
3292 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3293 {
3294 rtx seq = PATTERN (insn);
3295 int i, n = XVECLEN (seq, 0);
3296
3297 for (i = 0; i < n; i++)
3298 if (can_throw_external (XVECEXP (seq, 0, i)))
3299 return true;
3300
3301 return false;
3302 }
3303
3304 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3305 if (!note)
3306 {
3307 /* Calls (and trapping insns) without notes are outside any
3308 exception handling region in this function. We have to
3309 assume it might throw. Given that the front end and middle
3310 ends mark known NOTHROW functions, this isn't so wildly
3311 inaccurate. */
3312 return (CALL_P (insn)
3313 || (flag_non_call_exceptions
3314 && may_trap_p (PATTERN (insn))));
3315 }
3316 if (INTVAL (XEXP (note, 0)) <= 0)
3317 return false;
3318
3319 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3320 }
3321
3322 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3323
3324 unsigned int
3325 set_nothrow_function_flags (void)
3326 {
3327 rtx insn;
3328
3329 crtl->nothrow = 1;
3330
3331 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3332 something that can throw an exception. We specifically exempt
3333 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3334 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3335 is optimistic. */
3336
3337 crtl->all_throwers_are_sibcalls = 1;
3338
3339 /* If we don't know that this implementation of the function will
3340 actually be used, then we must not set TREE_NOTHROW, since
3341 callers must not assume that this function does not throw. */
3342 if (TREE_NOTHROW (current_function_decl))
3343 return 0;
3344
3345 if (! flag_exceptions)
3346 return 0;
3347
3348 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3349 if (can_throw_external (insn))
3350 {
3351 crtl->nothrow = 0;
3352
3353 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3354 {
3355 crtl->all_throwers_are_sibcalls = 0;
3356 return 0;
3357 }
3358 }
3359
3360 for (insn = crtl->epilogue_delay_list; insn;
3361 insn = XEXP (insn, 1))
3362 if (can_throw_external (insn))
3363 {
3364 crtl->nothrow = 0;
3365
3366 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3367 {
3368 crtl->all_throwers_are_sibcalls = 0;
3369 return 0;
3370 }
3371 }
3372 if (crtl->nothrow
3373 && (cgraph_function_body_availability (cgraph_node
3374 (current_function_decl))
3375 >= AVAIL_AVAILABLE))
3376 {
3377 struct cgraph_node *node = cgraph_node (current_function_decl);
3378 struct cgraph_edge *e;
3379 for (e = node->callers; e; e = e->next_caller)
3380 e->can_throw_external = false;
3381 TREE_NOTHROW (current_function_decl) = 1;
3382
3383 if (dump_file)
3384 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3385 current_function_name ());
3386 }
3387 return 0;
3388 }
3389
3390 struct rtl_opt_pass pass_set_nothrow_function_flags =
3391 {
3392 {
3393 RTL_PASS,
3394 "nothrow", /* name */
3395 NULL, /* gate */
3396 set_nothrow_function_flags, /* execute */
3397 NULL, /* sub */
3398 NULL, /* next */
3399 0, /* static_pass_number */
3400 TV_NONE, /* tv_id */
3401 0, /* properties_required */
3402 0, /* properties_provided */
3403 0, /* properties_destroyed */
3404 0, /* todo_flags_start */
3405 TODO_dump_func, /* todo_flags_finish */
3406 }
3407 };
3408
3409 \f
3410 /* Various hooks for unwind library. */
3411
3412 /* Do any necessary initialization to access arbitrary stack frames.
3413 On the SPARC, this means flushing the register windows. */
3414
3415 void
3416 expand_builtin_unwind_init (void)
3417 {
3418 /* Set this so all the registers get saved in our frame; we need to be
3419 able to copy the saved values for any registers from frames we unwind. */
3420 crtl->saves_all_registers = 1;
3421
3422 #ifdef SETUP_FRAME_ADDRESSES
3423 SETUP_FRAME_ADDRESSES ();
3424 #endif
3425 }
3426
3427 rtx
3428 expand_builtin_eh_return_data_regno (tree exp)
3429 {
3430 tree which = CALL_EXPR_ARG (exp, 0);
3431 unsigned HOST_WIDE_INT iwhich;
3432
3433 if (TREE_CODE (which) != INTEGER_CST)
3434 {
3435 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3436 return constm1_rtx;
3437 }
3438
3439 iwhich = tree_low_cst (which, 1);
3440 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3441 if (iwhich == INVALID_REGNUM)
3442 return constm1_rtx;
3443
3444 #ifdef DWARF_FRAME_REGNUM
3445 iwhich = DWARF_FRAME_REGNUM (iwhich);
3446 #else
3447 iwhich = DBX_REGISTER_NUMBER (iwhich);
3448 #endif
3449
3450 return GEN_INT (iwhich);
3451 }
3452
3453 /* Given a value extracted from the return address register or stack slot,
3454 return the actual address encoded in that value. */
3455
3456 rtx
3457 expand_builtin_extract_return_addr (tree addr_tree)
3458 {
3459 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3460
3461 if (GET_MODE (addr) != Pmode
3462 && GET_MODE (addr) != VOIDmode)
3463 {
3464 #ifdef POINTERS_EXTEND_UNSIGNED
3465 addr = convert_memory_address (Pmode, addr);
3466 #else
3467 addr = convert_to_mode (Pmode, addr, 0);
3468 #endif
3469 }
3470
3471 /* First mask out any unwanted bits. */
3472 #ifdef MASK_RETURN_ADDR
3473 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3474 #endif
3475
3476 /* Then adjust to find the real return address. */
3477 #if defined (RETURN_ADDR_OFFSET)
3478 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3479 #endif
3480
3481 return addr;
3482 }
3483
3484 /* Given an actual address in addr_tree, do any necessary encoding
3485 and return the value to be stored in the return address register or
3486 stack slot so the epilogue will return to that address. */
3487
3488 rtx
3489 expand_builtin_frob_return_addr (tree addr_tree)
3490 {
3491 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3492
3493 addr = convert_memory_address (Pmode, addr);
3494
3495 #ifdef RETURN_ADDR_OFFSET
3496 addr = force_reg (Pmode, addr);
3497 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3498 #endif
3499
3500 return addr;
3501 }
3502
3503 /* Set up the epilogue with the magic bits we'll need to return to the
3504 exception handler. */
3505
3506 void
3507 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3508 tree handler_tree)
3509 {
3510 rtx tmp;
3511
3512 #ifdef EH_RETURN_STACKADJ_RTX
3513 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3514 VOIDmode, EXPAND_NORMAL);
3515 tmp = convert_memory_address (Pmode, tmp);
3516 if (!crtl->eh.ehr_stackadj)
3517 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3518 else if (tmp != crtl->eh.ehr_stackadj)
3519 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3520 #endif
3521
3522 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3523 VOIDmode, EXPAND_NORMAL);
3524 tmp = convert_memory_address (Pmode, tmp);
3525 if (!crtl->eh.ehr_handler)
3526 crtl->eh.ehr_handler = copy_to_reg (tmp);
3527 else if (tmp != crtl->eh.ehr_handler)
3528 emit_move_insn (crtl->eh.ehr_handler, tmp);
3529
3530 if (!crtl->eh.ehr_label)
3531 crtl->eh.ehr_label = gen_label_rtx ();
3532 emit_jump (crtl->eh.ehr_label);
3533 }
3534
3535 void
3536 expand_eh_return (void)
3537 {
3538 rtx around_label;
3539
3540 if (! crtl->eh.ehr_label)
3541 return;
3542
3543 crtl->calls_eh_return = 1;
3544
3545 #ifdef EH_RETURN_STACKADJ_RTX
3546 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3547 #endif
3548
3549 around_label = gen_label_rtx ();
3550 emit_jump (around_label);
3551
3552 emit_label (crtl->eh.ehr_label);
3553 clobber_return_register ();
3554
3555 #ifdef EH_RETURN_STACKADJ_RTX
3556 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3557 #endif
3558
3559 #ifdef HAVE_eh_return
3560 if (HAVE_eh_return)
3561 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3562 else
3563 #endif
3564 {
3565 #ifdef EH_RETURN_HANDLER_RTX
3566 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3567 #else
3568 error ("__builtin_eh_return not supported on this target");
3569 #endif
3570 }
3571
3572 emit_label (around_label);
3573 }
3574
3575 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3576 POINTERS_EXTEND_UNSIGNED and return it. */
3577
3578 rtx
3579 expand_builtin_extend_pointer (tree addr_tree)
3580 {
3581 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3582 int extend;
3583
3584 #ifdef POINTERS_EXTEND_UNSIGNED
3585 extend = POINTERS_EXTEND_UNSIGNED;
3586 #else
3587 /* The previous EH code did an unsigned extend by default, so we do this also
3588 for consistency. */
3589 extend = 1;
3590 #endif
3591
3592 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3593 }
3594 \f
3595 /* In the following functions, we represent entries in the action table
3596 as 1-based indices. Special cases are:
3597
3598 0: null action record, non-null landing pad; implies cleanups
3599 -1: null action record, null landing pad; implies no action
3600 -2: no call-site entry; implies must_not_throw
3601 -3: we have yet to process outer regions
3602
3603 Further, no special cases apply to the "next" field of the record.
3604 For next, 0 means end of list. */
3605
3606 struct action_record
3607 {
3608 int offset;
3609 int filter;
3610 int next;
3611 };
3612
3613 static int
3614 action_record_eq (const void *pentry, const void *pdata)
3615 {
3616 const struct action_record *entry = (const struct action_record *) pentry;
3617 const struct action_record *data = (const struct action_record *) pdata;
3618 return entry->filter == data->filter && entry->next == data->next;
3619 }
3620
3621 static hashval_t
3622 action_record_hash (const void *pentry)
3623 {
3624 const struct action_record *entry = (const struct action_record *) pentry;
3625 return entry->next * 1009 + entry->filter;
3626 }
3627
3628 static int
3629 add_action_record (htab_t ar_hash, int filter, int next)
3630 {
3631 struct action_record **slot, *new_ar, tmp;
3632
3633 tmp.filter = filter;
3634 tmp.next = next;
3635 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3636
3637 if ((new_ar = *slot) == NULL)
3638 {
3639 new_ar = XNEW (struct action_record);
3640 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3641 new_ar->filter = filter;
3642 new_ar->next = next;
3643 *slot = new_ar;
3644
3645 /* The filter value goes in untouched. The link to the next
3646 record is a "self-relative" byte offset, or zero to indicate
3647 that there is no next record. So convert the absolute 1 based
3648 indices we've been carrying around into a displacement. */
3649
3650 push_sleb128 (&crtl->eh.action_record_data, filter);
3651 if (next)
3652 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3653 push_sleb128 (&crtl->eh.action_record_data, next);
3654 }
3655
3656 return new_ar->offset;
3657 }
3658
3659 static int
3660 collect_one_action_chain (htab_t ar_hash, struct eh_region_d *region)
3661 {
3662 struct eh_region_d *c;
3663 int next;
3664
3665 /* If we've reached the top of the region chain, then we have
3666 no actions, and require no landing pad. */
3667 if (region == NULL)
3668 return -1;
3669
3670 switch (region->type)
3671 {
3672 case ERT_CLEANUP:
3673 /* A cleanup adds a zero filter to the beginning of the chain, but
3674 there are special cases to look out for. If there are *only*
3675 cleanups along a path, then it compresses to a zero action.
3676 Further, if there are multiple cleanups along a path, we only
3677 need to represent one of them, as that is enough to trigger
3678 entry to the landing pad at runtime. */
3679 next = collect_one_action_chain (ar_hash, region->outer);
3680 if (next <= 0)
3681 return 0;
3682 for (c = region->outer; c ; c = c->outer)
3683 if (c->type == ERT_CLEANUP)
3684 return next;
3685 return add_action_record (ar_hash, 0, next);
3686
3687 case ERT_TRY:
3688 /* Process the associated catch regions in reverse order.
3689 If there's a catch-all handler, then we don't need to
3690 search outer regions. Use a magic -3 value to record
3691 that we haven't done the outer search. */
3692 next = -3;
3693 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3694 {
3695 if (c->u.eh_catch.type_list == NULL)
3696 {
3697 /* Retrieve the filter from the head of the filter list
3698 where we have stored it (see assign_filter_values). */
3699 int filter
3700 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3701
3702 next = add_action_record (ar_hash, filter, 0);
3703 }
3704 else
3705 {
3706 /* Once the outer search is done, trigger an action record for
3707 each filter we have. */
3708 tree flt_node;
3709
3710 if (next == -3)
3711 {
3712 next = collect_one_action_chain (ar_hash, region->outer);
3713
3714 /* If there is no next action, terminate the chain. */
3715 if (next == -1)
3716 next = 0;
3717 /* If all outer actions are cleanups or must_not_throw,
3718 we'll have no action record for it, since we had wanted
3719 to encode these states in the call-site record directly.
3720 Add a cleanup action to the chain to catch these. */
3721 else if (next <= 0)
3722 next = add_action_record (ar_hash, 0, 0);
3723 }
3724
3725 flt_node = c->u.eh_catch.filter_list;
3726 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3727 {
3728 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3729 next = add_action_record (ar_hash, filter, next);
3730 }
3731 }
3732 }
3733 return next;
3734
3735 case ERT_ALLOWED_EXCEPTIONS:
3736 /* An exception specification adds its filter to the
3737 beginning of the chain. */
3738 next = collect_one_action_chain (ar_hash, region->outer);
3739
3740 /* If there is no next action, terminate the chain. */
3741 if (next == -1)
3742 next = 0;
3743 /* If all outer actions are cleanups or must_not_throw,
3744 we'll have no action record for it, since we had wanted
3745 to encode these states in the call-site record directly.
3746 Add a cleanup action to the chain to catch these. */
3747 else if (next <= 0)
3748 next = add_action_record (ar_hash, 0, 0);
3749
3750 return add_action_record (ar_hash, region->u.allowed.filter, next);
3751
3752 case ERT_MUST_NOT_THROW:
3753 /* A must-not-throw region with no inner handlers or cleanups
3754 requires no call-site entry. Note that this differs from
3755 the no handler or cleanup case in that we do require an lsda
3756 to be generated. Return a magic -2 value to record this. */
3757 return -2;
3758
3759 case ERT_CATCH:
3760 case ERT_THROW:
3761 /* CATCH regions are handled in TRY above. THROW regions are
3762 for optimization information only and produce no output. */
3763 return collect_one_action_chain (ar_hash, region->outer);
3764
3765 default:
3766 gcc_unreachable ();
3767 }
3768 }
3769
3770 static int
3771 add_call_site (rtx landing_pad, int action)
3772 {
3773 call_site_record record;
3774
3775 record = GGC_NEW (struct call_site_record_d);
3776 record->landing_pad = landing_pad;
3777 record->action = action;
3778
3779 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3780
3781 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3782 }
3783
3784 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3785 The new note numbers will not refer to region numbers, but
3786 instead to call site entries. */
3787
3788 unsigned int
3789 convert_to_eh_region_ranges (void)
3790 {
3791 rtx insn, iter, note;
3792 htab_t ar_hash;
3793 int last_action = -3;
3794 rtx last_action_insn = NULL_RTX;
3795 rtx last_landing_pad = NULL_RTX;
3796 rtx first_no_action_insn = NULL_RTX;
3797 int call_site = 0;
3798
3799 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3800 return 0;
3801
3802 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3803
3804 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3805
3806 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3807 if (INSN_P (iter))
3808 {
3809 struct eh_region_d *region;
3810 int this_action;
3811 rtx this_landing_pad;
3812
3813 insn = iter;
3814 if (NONJUMP_INSN_P (insn)
3815 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3816 insn = XVECEXP (PATTERN (insn), 0, 0);
3817
3818 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3819 if (!note)
3820 {
3821 if (! (CALL_P (insn)
3822 || (flag_non_call_exceptions
3823 && may_trap_p (PATTERN (insn)))))
3824 continue;
3825 this_action = -1;
3826 region = NULL;
3827 }
3828 else
3829 {
3830 if (INTVAL (XEXP (note, 0)) <= 0)
3831 continue;
3832 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3833 this_action = collect_one_action_chain (ar_hash, region);
3834 }
3835
3836 /* Existence of catch handlers, or must-not-throw regions
3837 implies that an lsda is needed (even if empty). */
3838 if (this_action != -1)
3839 crtl->uses_eh_lsda = 1;
3840
3841 /* Delay creation of region notes for no-action regions
3842 until we're sure that an lsda will be required. */
3843 else if (last_action == -3)
3844 {
3845 first_no_action_insn = iter;
3846 last_action = -1;
3847 }
3848
3849 /* Cleanups and handlers may share action chains but not
3850 landing pads. Collect the landing pad for this region. */
3851 if (this_action >= 0)
3852 {
3853 struct eh_region_d *o;
3854 for (o = region; ! o->landing_pad ; o = o->outer)
3855 continue;
3856 this_landing_pad = o->landing_pad;
3857 }
3858 else
3859 this_landing_pad = NULL_RTX;
3860
3861 /* Differing actions or landing pads implies a change in call-site
3862 info, which implies some EH_REGION note should be emitted. */
3863 if (last_action != this_action
3864 || last_landing_pad != this_landing_pad)
3865 {
3866 /* If we'd not seen a previous action (-3) or the previous
3867 action was must-not-throw (-2), then we do not need an
3868 end note. */
3869 if (last_action >= -1)
3870 {
3871 /* If we delayed the creation of the begin, do it now. */
3872 if (first_no_action_insn)
3873 {
3874 call_site = add_call_site (NULL_RTX, 0);
3875 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3876 first_no_action_insn);
3877 NOTE_EH_HANDLER (note) = call_site;
3878 first_no_action_insn = NULL_RTX;
3879 }
3880
3881 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3882 last_action_insn);
3883 NOTE_EH_HANDLER (note) = call_site;
3884 }
3885
3886 /* If the new action is must-not-throw, then no region notes
3887 are created. */
3888 if (this_action >= -1)
3889 {
3890 call_site = add_call_site (this_landing_pad,
3891 this_action < 0 ? 0 : this_action);
3892 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3893 NOTE_EH_HANDLER (note) = call_site;
3894 }
3895
3896 last_action = this_action;
3897 last_landing_pad = this_landing_pad;
3898 }
3899 last_action_insn = iter;
3900 }
3901
3902 if (last_action >= -1 && ! first_no_action_insn)
3903 {
3904 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3905 NOTE_EH_HANDLER (note) = call_site;
3906 }
3907
3908 htab_delete (ar_hash);
3909 return 0;
3910 }
3911
3912 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3913 {
3914 {
3915 RTL_PASS,
3916 "eh_ranges", /* name */
3917 NULL, /* gate */
3918 convert_to_eh_region_ranges, /* execute */
3919 NULL, /* sub */
3920 NULL, /* next */
3921 0, /* static_pass_number */
3922 TV_NONE, /* tv_id */
3923 0, /* properties_required */
3924 0, /* properties_provided */
3925 0, /* properties_destroyed */
3926 0, /* todo_flags_start */
3927 TODO_dump_func, /* todo_flags_finish */
3928 }
3929 };
3930
3931 \f
3932 static void
3933 push_uleb128 (varray_type *data_area, unsigned int value)
3934 {
3935 do
3936 {
3937 unsigned char byte = value & 0x7f;
3938 value >>= 7;
3939 if (value)
3940 byte |= 0x80;
3941 VARRAY_PUSH_UCHAR (*data_area, byte);
3942 }
3943 while (value);
3944 }
3945
3946 static void
3947 push_sleb128 (varray_type *data_area, int value)
3948 {
3949 unsigned char byte;
3950 int more;
3951
3952 do
3953 {
3954 byte = value & 0x7f;
3955 value >>= 7;
3956 more = ! ((value == 0 && (byte & 0x40) == 0)
3957 || (value == -1 && (byte & 0x40) != 0));
3958 if (more)
3959 byte |= 0x80;
3960 VARRAY_PUSH_UCHAR (*data_area, byte);
3961 }
3962 while (more);
3963 }
3964
3965 \f
3966 #ifndef HAVE_AS_LEB128
3967 static int
3968 dw2_size_of_call_site_table (void)
3969 {
3970 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3971 int size = n * (4 + 4 + 4);
3972 int i;
3973
3974 for (i = 0; i < n; ++i)
3975 {
3976 struct call_site_record_d *cs =
3977 VEC_index (call_site_record, crtl->eh.call_site_record, i);
3978 size += size_of_uleb128 (cs->action);
3979 }
3980
3981 return size;
3982 }
3983
3984 static int
3985 sjlj_size_of_call_site_table (void)
3986 {
3987 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3988 int size = 0;
3989 int i;
3990
3991 for (i = 0; i < n; ++i)
3992 {
3993 struct call_site_record_d *cs =
3994 VEC_index (call_site_record, crtl->eh.call_site_record, i);
3995 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3996 size += size_of_uleb128 (cs->action);
3997 }
3998
3999 return size;
4000 }
4001 #endif
4002
4003 static void
4004 dw2_output_call_site_table (void)
4005 {
4006 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4007 int i;
4008
4009 for (i = 0; i < n; ++i)
4010 {
4011 struct call_site_record_d *cs =
4012 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4013 char reg_start_lab[32];
4014 char reg_end_lab[32];
4015 char landing_pad_lab[32];
4016
4017 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
4018 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
4019
4020 if (cs->landing_pad)
4021 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
4022 CODE_LABEL_NUMBER (cs->landing_pad));
4023
4024 /* ??? Perhaps use insn length scaling if the assembler supports
4025 generic arithmetic. */
4026 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
4027 data4 if the function is small enough. */
4028 #ifdef HAVE_AS_LEB128
4029 dw2_asm_output_delta_uleb128 (reg_start_lab,
4030 current_function_func_begin_label,
4031 "region %d start", i);
4032 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
4033 "length");
4034 if (cs->landing_pad)
4035 dw2_asm_output_delta_uleb128 (landing_pad_lab,
4036 current_function_func_begin_label,
4037 "landing pad");
4038 else
4039 dw2_asm_output_data_uleb128 (0, "landing pad");
4040 #else
4041 dw2_asm_output_delta (4, reg_start_lab,
4042 current_function_func_begin_label,
4043 "region %d start", i);
4044 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
4045 if (cs->landing_pad)
4046 dw2_asm_output_delta (4, landing_pad_lab,
4047 current_function_func_begin_label,
4048 "landing pad");
4049 else
4050 dw2_asm_output_data (4, 0, "landing pad");
4051 #endif
4052 dw2_asm_output_data_uleb128 (cs->action, "action");
4053 }
4054
4055 call_site_base += n;
4056 }
4057
4058 static void
4059 sjlj_output_call_site_table (void)
4060 {
4061 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4062 int i;
4063
4064 for (i = 0; i < n; ++i)
4065 {
4066 struct call_site_record_d *cs =
4067 VEC_index (call_site_record, crtl->eh.call_site_record, i);
4068
4069 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4070 "region %d landing pad", i);
4071 dw2_asm_output_data_uleb128 (cs->action, "action");
4072 }
4073
4074 call_site_base += n;
4075 }
4076
4077 #ifndef TARGET_UNWIND_INFO
4078 /* Switch to the section that should be used for exception tables. */
4079
4080 static void
4081 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4082 {
4083 section *s;
4084
4085 if (exception_section)
4086 s = exception_section;
4087 else
4088 {
4089 /* Compute the section and cache it into exception_section,
4090 unless it depends on the function name. */
4091 if (targetm.have_named_sections)
4092 {
4093 int flags;
4094
4095 if (EH_TABLES_CAN_BE_READ_ONLY)
4096 {
4097 int tt_format =
4098 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4099 flags = ((! flag_pic
4100 || ((tt_format & 0x70) != DW_EH_PE_absptr
4101 && (tt_format & 0x70) != DW_EH_PE_aligned))
4102 ? 0 : SECTION_WRITE);
4103 }
4104 else
4105 flags = SECTION_WRITE;
4106
4107 #ifdef HAVE_LD_EH_GC_SECTIONS
4108 if (flag_function_sections)
4109 {
4110 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4111 sprintf (section_name, ".gcc_except_table.%s", fnname);
4112 s = get_section (section_name, flags, NULL);
4113 free (section_name);
4114 }
4115 else
4116 #endif
4117 exception_section
4118 = s = get_section (".gcc_except_table", flags, NULL);
4119 }
4120 else
4121 exception_section
4122 = s = flag_pic ? data_section : readonly_data_section;
4123 }
4124
4125 switch_to_section (s);
4126 }
4127 #endif
4128
4129
4130 /* Output a reference from an exception table to the type_info object TYPE.
4131 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4132 the value. */
4133
4134 static void
4135 output_ttype (tree type, int tt_format, int tt_format_size)
4136 {
4137 rtx value;
4138 bool is_public = true;
4139
4140 if (type == NULL_TREE)
4141 value = const0_rtx;
4142 else
4143 {
4144 struct varpool_node *node;
4145
4146 type = lookup_type_for_runtime (type);
4147 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4148
4149 /* Let cgraph know that the rtti decl is used. Not all of the
4150 paths below go through assemble_integer, which would take
4151 care of this for us. */
4152 STRIP_NOPS (type);
4153 if (TREE_CODE (type) == ADDR_EXPR)
4154 {
4155 type = TREE_OPERAND (type, 0);
4156 if (TREE_CODE (type) == VAR_DECL)
4157 {
4158 node = varpool_node (type);
4159 if (node)
4160 varpool_mark_needed_node (node);
4161 is_public = TREE_PUBLIC (type);
4162 }
4163 }
4164 else
4165 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4166 }
4167
4168 /* Allow the target to override the type table entry format. */
4169 if (targetm.asm_out.ttype (value))
4170 return;
4171
4172 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4173 assemble_integer (value, tt_format_size,
4174 tt_format_size * BITS_PER_UNIT, 1);
4175 else
4176 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4177 }
4178
4179 void
4180 output_function_exception_table (const char * ARG_UNUSED (fnname))
4181 {
4182 int tt_format, cs_format, lp_format, i, n;
4183 #ifdef HAVE_AS_LEB128
4184 char ttype_label[32];
4185 char cs_after_size_label[32];
4186 char cs_end_label[32];
4187 #else
4188 int call_site_len;
4189 #endif
4190 int have_tt_data;
4191 int tt_format_size = 0;
4192
4193 /* Not all functions need anything. */
4194 if (! crtl->uses_eh_lsda)
4195 return;
4196
4197 if (eh_personality_libfunc)
4198 assemble_external_libcall (eh_personality_libfunc);
4199
4200 #ifdef TARGET_UNWIND_INFO
4201 /* TODO: Move this into target file. */
4202 fputs ("\t.personality\t", asm_out_file);
4203 output_addr_const (asm_out_file, eh_personality_libfunc);
4204 fputs ("\n\t.handlerdata\n", asm_out_file);
4205 /* Note that varasm still thinks we're in the function's code section.
4206 The ".endp" directive that will immediately follow will take us back. */
4207 #else
4208 switch_to_exception_section (fnname);
4209 #endif
4210
4211 /* If the target wants a label to begin the table, emit it here. */
4212 targetm.asm_out.except_table_label (asm_out_file);
4213
4214 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4215 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4216
4217 /* Indicate the format of the @TType entries. */
4218 if (! have_tt_data)
4219 tt_format = DW_EH_PE_omit;
4220 else
4221 {
4222 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4223 #ifdef HAVE_AS_LEB128
4224 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
4225 current_function_funcdef_no);
4226 #endif
4227 tt_format_size = size_of_encoded_value (tt_format);
4228
4229 assemble_align (tt_format_size * BITS_PER_UNIT);
4230 }
4231
4232 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4233 current_function_funcdef_no);
4234
4235 /* The LSDA header. */
4236
4237 /* Indicate the format of the landing pad start pointer. An omitted
4238 field implies @LPStart == @Start. */
4239 /* Currently we always put @LPStart == @Start. This field would
4240 be most useful in moving the landing pads completely out of
4241 line to another section, but it could also be used to minimize
4242 the size of uleb128 landing pad offsets. */
4243 lp_format = DW_EH_PE_omit;
4244 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4245 eh_data_format_name (lp_format));
4246
4247 /* @LPStart pointer would go here. */
4248
4249 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4250 eh_data_format_name (tt_format));
4251
4252 #ifndef HAVE_AS_LEB128
4253 if (USING_SJLJ_EXCEPTIONS)
4254 call_site_len = sjlj_size_of_call_site_table ();
4255 else
4256 call_site_len = dw2_size_of_call_site_table ();
4257 #endif
4258
4259 /* A pc-relative 4-byte displacement to the @TType data. */
4260 if (have_tt_data)
4261 {
4262 #ifdef HAVE_AS_LEB128
4263 char ttype_after_disp_label[32];
4264 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4265 current_function_funcdef_no);
4266 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4267 "@TType base offset");
4268 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4269 #else
4270 /* Ug. Alignment queers things. */
4271 unsigned int before_disp, after_disp, last_disp, disp;
4272
4273 before_disp = 1 + 1;
4274 after_disp = (1 + size_of_uleb128 (call_site_len)
4275 + call_site_len
4276 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4277 + (VEC_length (tree, crtl->eh.ttype_data)
4278 * tt_format_size));
4279
4280 disp = after_disp;
4281 do
4282 {
4283 unsigned int disp_size, pad;
4284
4285 last_disp = disp;
4286 disp_size = size_of_uleb128 (disp);
4287 pad = before_disp + disp_size + after_disp;
4288 if (pad % tt_format_size)
4289 pad = tt_format_size - (pad % tt_format_size);
4290 else
4291 pad = 0;
4292 disp = after_disp + pad;
4293 }
4294 while (disp != last_disp);
4295
4296 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4297 #endif
4298 }
4299
4300 /* Indicate the format of the call-site offsets. */
4301 #ifdef HAVE_AS_LEB128
4302 cs_format = DW_EH_PE_uleb128;
4303 #else
4304 cs_format = DW_EH_PE_udata4;
4305 #endif
4306 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4307 eh_data_format_name (cs_format));
4308
4309 #ifdef HAVE_AS_LEB128
4310 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4311 current_function_funcdef_no);
4312 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4313 current_function_funcdef_no);
4314 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4315 "Call-site table length");
4316 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4317 if (USING_SJLJ_EXCEPTIONS)
4318 sjlj_output_call_site_table ();
4319 else
4320 dw2_output_call_site_table ();
4321 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4322 #else
4323 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4324 if (USING_SJLJ_EXCEPTIONS)
4325 sjlj_output_call_site_table ();
4326 else
4327 dw2_output_call_site_table ();
4328 #endif
4329
4330 /* ??? Decode and interpret the data for flag_debug_asm. */
4331 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4332 for (i = 0; i < n; ++i)
4333 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4334 (i ? NULL : "Action record table"));
4335
4336 if (have_tt_data)
4337 assemble_align (tt_format_size * BITS_PER_UNIT);
4338
4339 i = VEC_length (tree, crtl->eh.ttype_data);
4340 while (i-- > 0)
4341 {
4342 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4343 output_ttype (type, tt_format, tt_format_size);
4344 }
4345
4346 #ifdef HAVE_AS_LEB128
4347 if (have_tt_data)
4348 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4349 #endif
4350
4351 /* ??? Decode and interpret the data for flag_debug_asm. */
4352 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4353 for (i = 0; i < n; ++i)
4354 {
4355 if (targetm.arm_eabi_unwinder)
4356 {
4357 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4358 output_ttype (type, tt_format, tt_format_size);
4359 }
4360 else
4361 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4362 (i ? NULL : "Exception specification table"));
4363 }
4364
4365 switch_to_section (current_function_section ());
4366 }
4367
4368 void
4369 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4370 {
4371 fun->eh->throw_stmt_table = table;
4372 }
4373
4374 htab_t
4375 get_eh_throw_stmt_table (struct function *fun)
4376 {
4377 return fun->eh->throw_stmt_table;
4378 }
4379
4380 /* Dump EH information to OUT. */
4381
4382 void
4383 dump_eh_tree (FILE * out, struct function *fun)
4384 {
4385 struct eh_region_d *i;
4386 int depth = 0;
4387 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4388 "allowed_exceptions", "must_not_throw",
4389 "throw"
4390 };
4391
4392 i = fun->eh->region_tree;
4393 if (!i)
4394 return;
4395
4396 fprintf (out, "Eh tree:\n");
4397 while (1)
4398 {
4399 fprintf (out, " %*s %i %s", depth * 2, "",
4400 i->region_number, type_name[(int) i->type]);
4401 if (i->tree_label)
4402 {
4403 fprintf (out, " tree_label:");
4404 print_generic_expr (out, i->tree_label, 0);
4405 }
4406 if (i->label)
4407 fprintf (out, " label:%i", INSN_UID (i->label));
4408 if (i->landing_pad)
4409 {
4410 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
4411 if (NOTE_P (i->landing_pad))
4412 fprintf (out, " (deleted)");
4413 }
4414 if (i->post_landing_pad)
4415 {
4416 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
4417 if (NOTE_P (i->post_landing_pad))
4418 fprintf (out, " (deleted)");
4419 }
4420 if (i->resume)
4421 {
4422 fprintf (out, " resume:%i", INSN_UID (i->resume));
4423 if (NOTE_P (i->resume))
4424 fprintf (out, " (deleted)");
4425 }
4426 if (i->may_contain_throw)
4427 fprintf (out, " may_contain_throw");
4428 switch (i->type)
4429 {
4430 case ERT_CLEANUP:
4431 break;
4432
4433 case ERT_TRY:
4434 {
4435 struct eh_region_d *c;
4436 fprintf (out, " catch regions:");
4437 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4438 fprintf (out, " %i", c->region_number);
4439 }
4440 break;
4441
4442 case ERT_CATCH:
4443 if (i->u.eh_catch.prev_catch)
4444 fprintf (out, " prev: %i",
4445 i->u.eh_catch.prev_catch->region_number);
4446 if (i->u.eh_catch.next_catch)
4447 fprintf (out, " next %i",
4448 i->u.eh_catch.next_catch->region_number);
4449 fprintf (out, " type:");
4450 print_generic_expr (out, i->u.eh_catch.type_list, 0);
4451 break;
4452
4453 case ERT_ALLOWED_EXCEPTIONS:
4454 fprintf (out, " filter :%i types:", i->u.allowed.filter);
4455 print_generic_expr (out, i->u.allowed.type_list, 0);
4456 break;
4457
4458 case ERT_THROW:
4459 fprintf (out, " type:");
4460 print_generic_expr (out, i->u.eh_throw.type, 0);
4461 break;
4462
4463 case ERT_MUST_NOT_THROW:
4464 break;
4465
4466 case ERT_UNKNOWN:
4467 break;
4468 }
4469 if (i->aka)
4470 {
4471 fprintf (out, " also known as:");
4472 dump_bitmap (out, i->aka);
4473 }
4474 else
4475 fprintf (out, "\n");
4476 /* If there are sub-regions, process them. */
4477 if (i->inner)
4478 i = i->inner, depth++;
4479 /* If there are peers, process them. */
4480 else if (i->next_peer)
4481 i = i->next_peer;
4482 /* Otherwise, step back up the tree to the next peer. */
4483 else
4484 {
4485 do
4486 {
4487 i = i->outer;
4488 depth--;
4489 if (i == NULL)
4490 return;
4491 }
4492 while (i->next_peer == NULL);
4493 i = i->next_peer;
4494 }
4495 }
4496 }
4497
4498 /* Dump the EH tree for FN on stderr. */
4499
4500 void
4501 debug_eh_tree (struct function *fn)
4502 {
4503 dump_eh_tree (stderr, fn);
4504 }
4505
4506
4507 /* Verify EH region invariants. */
4508
4509 static bool
4510 verify_eh_region (struct eh_region_d *region)
4511 {
4512 bool found = false;
4513 if (!region)
4514 return false;
4515 switch (region->type)
4516 {
4517 case ERT_TRY:
4518 {
4519 struct eh_region_d *c, *prev = NULL;
4520 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4521 {
4522 error ("Try region %i has wrong rh_catch pointer to %i",
4523 region->region_number,
4524 region->u.eh_try.eh_catch->region_number);
4525 found = true;
4526 }
4527 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4528 {
4529 if (c->outer != region->outer)
4530 {
4531 error
4532 ("Catch region %i has different outer region than try region %i",
4533 c->region_number, region->region_number);
4534 found = true;
4535 }
4536 if (c->u.eh_catch.prev_catch != prev)
4537 {
4538 error ("Catch region %i has corrupted catchlist",
4539 c->region_number);
4540 found = true;
4541 }
4542 prev = c;
4543 }
4544 if (prev != region->u.eh_try.last_catch)
4545 {
4546 error
4547 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4548 region->region_number,
4549 region->u.eh_try.last_catch->region_number,
4550 prev->region_number);
4551 found = true;
4552 }
4553 }
4554 break;
4555 case ERT_CATCH:
4556 if (!region->u.eh_catch.prev_catch
4557 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4558 {
4559 error ("Catch region %i should be followed by try", region->region_number);
4560 found = true;
4561 }
4562 break;
4563 case ERT_CLEANUP:
4564 case ERT_ALLOWED_EXCEPTIONS:
4565 case ERT_MUST_NOT_THROW:
4566 case ERT_THROW:
4567 break;
4568 case ERT_UNKNOWN:
4569 gcc_unreachable ();
4570 }
4571 for (region = region->inner; region; region = region->next_peer)
4572 found |= verify_eh_region (region);
4573 return found;
4574 }
4575
4576 /* Verify invariants on EH datastructures. */
4577
4578 void
4579 verify_eh_tree (struct function *fun)
4580 {
4581 struct eh_region_d *i, *outer = NULL;
4582 bool err = false;
4583 int nvisited = 0;
4584 int count = 0;
4585 int j;
4586 int depth = 0;
4587
4588 if (!fun->eh->region_tree)
4589 return;
4590 for (j = fun->eh->last_region_number; j > 0; --j)
4591 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4592 {
4593 if (i->region_number == j)
4594 count++;
4595 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4596 {
4597 error ("region_array is corrupted for region %i",
4598 i->region_number);
4599 err = true;
4600 }
4601 }
4602 i = fun->eh->region_tree;
4603
4604 while (1)
4605 {
4606 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4607 {
4608 error ("region_array is corrupted for region %i", i->region_number);
4609 err = true;
4610 }
4611 if (i->outer != outer)
4612 {
4613 error ("outer block of region %i is wrong", i->region_number);
4614 err = true;
4615 }
4616 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4617 {
4618 error
4619 ("region %i may contain throw and is contained in region that may not",
4620 i->region_number);
4621 err = true;
4622 }
4623 if (depth < 0)
4624 {
4625 error ("negative nesting depth of region %i", i->region_number);
4626 err = true;
4627 }
4628 nvisited++;
4629 /* If there are sub-regions, process them. */
4630 if (i->inner)
4631 outer = i, i = i->inner, depth++;
4632 /* If there are peers, process them. */
4633 else if (i->next_peer)
4634 i = i->next_peer;
4635 /* Otherwise, step back up the tree to the next peer. */
4636 else
4637 {
4638 do
4639 {
4640 i = i->outer;
4641 depth--;
4642 if (i == NULL)
4643 {
4644 if (depth != -1)
4645 {
4646 error ("tree list ends on depth %i", depth + 1);
4647 err = true;
4648 }
4649 if (count != nvisited)
4650 {
4651 error ("array does not match the region tree");
4652 err = true;
4653 }
4654 if (!err)
4655 for (i = fun->eh->region_tree; i; i = i->next_peer)
4656 err |= verify_eh_region (i);
4657
4658 if (err)
4659 {
4660 dump_eh_tree (stderr, fun);
4661 internal_error ("verify_eh_tree failed");
4662 }
4663 return;
4664 }
4665 outer = i->outer;
4666 }
4667 while (i->next_peer == NULL);
4668 i = i->next_peer;
4669 }
4670 }
4671 }
4672
4673 /* Initialize unwind_resume_libfunc. */
4674
4675 void
4676 default_init_unwind_resume_libfunc (void)
4677 {
4678 /* The default c++ routines aren't actually c++ specific, so use those. */
4679 unwind_resume_libfunc =
4680 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4681 : "_Unwind_Resume");
4682 }
4683
4684 \f
4685 static bool
4686 gate_handle_eh (void)
4687 {
4688 return doing_eh (0);
4689 }
4690
4691 /* Complete generation of exception handling code. */
4692 static unsigned int
4693 rest_of_handle_eh (void)
4694 {
4695 finish_eh_generation ();
4696 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4697 return 0;
4698 }
4699
4700 struct rtl_opt_pass pass_rtl_eh =
4701 {
4702 {
4703 RTL_PASS,
4704 "eh", /* name */
4705 gate_handle_eh, /* gate */
4706 rest_of_handle_eh, /* execute */
4707 NULL, /* sub */
4708 NULL, /* next */
4709 0, /* static_pass_number */
4710 TV_JUMP, /* tv_id */
4711 0, /* properties_required */
4712 0, /* properties_provided */
4713 0, /* properties_destroyed */
4714 0, /* todo_flags_start */
4715 TODO_dump_func /* todo_flags_finish */
4716 }
4717 };
4718
4719 #include "gt-except.h"