cgraph.c (cgraph_make_edge, [...]): Set nothrow flag.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80
81 /* Provide defaults for stuff that may not be defined when using
82 sjlj exceptions. */
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 #endif
86
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
90
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
93
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
96
97 /* A hash table of label to region number. */
98
99 struct ehl_map_entry GTY(())
100 {
101 rtx label;
102 struct eh_region *region;
103 };
104
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
108
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
116 \f
117
118 struct call_site_record GTY(())
119 {
120 rtx landing_pad;
121 int action;
122 };
123 \f
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
126
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
137
138 struct sjlj_lp_info;
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
146
147 static void remove_eh_handler (struct eh_region *);
148 static void remove_eh_handler_and_replace (struct eh_region *,
149 struct eh_region *);
150
151 /* The return value of reachable_next_level. */
152 enum reachable_code
153 {
154 /* The given exception is not processed by the given region. */
155 RNL_NOT_CAUGHT,
156 /* The given exception may need processing by the given region. */
157 RNL_MAYBE_CAUGHT,
158 /* The given exception is completely processed by the given region. */
159 RNL_CAUGHT,
160 /* The given exception is completely processed by the runtime. */
161 RNL_BLOCKED
162 };
163
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region *, tree,
166 struct reachable_info *, bool);
167
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region *);
172 static int add_call_site (rtx, int);
173
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
179 #endif
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
182
183 \f
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
187
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
190
191 int
192 doing_eh (int do_warn)
193 {
194 if (! flag_exceptions)
195 {
196 static int warned = 0;
197 if (! warned && do_warn)
198 {
199 error ("exception handling disabled, use -fexceptions to enable");
200 warned = 1;
201 }
202 return 0;
203 }
204 return 1;
205 }
206
207 \f
208 void
209 init_eh (void)
210 {
211 if (! flag_exceptions)
212 return;
213
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
215
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
219 {
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
221
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
223
224 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
225 build_pointer_type (sjlj_fc_type_node));
226 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
227
228 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
229 integer_type_node);
230 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
231
232 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
233 tmp = build_array_type (lang_hooks.types.type_for_mode
234 (targetm.unwind_word_mode (), 1),
235 tmp);
236 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
237 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
238
239 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
240 ptr_type_node);
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
242
243 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
246
247 #ifdef DONT_USE_BUILTIN_SETJMP
248 #ifdef JMP_BUF_SIZE
249 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
250 #else
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
256 #endif
257 #else
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
260 #endif
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
268 DECL_USER_ALIGN (f_jbuf) = 1;
269 #endif
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
271
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
273 TREE_CHAIN (f_prev) = f_cs;
274 TREE_CHAIN (f_cs) = f_data;
275 TREE_CHAIN (f_data) = f_per;
276 TREE_CHAIN (f_per) = f_lsda;
277 TREE_CHAIN (f_lsda) = f_jbuf;
278
279 layout_type (sjlj_fc_type_node);
280
281 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */
283 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
286 sjlj_fc_data_ofs
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
292 sjlj_fc_lsda_ofs
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
295 sjlj_fc_jbuf_ofs
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
298 }
299 }
300
301 void
302 init_eh_for_function (void)
303 {
304 cfun->eh = GGC_CNEW (struct eh_status);
305 }
306 \f
307 /* Routines to generate the exception tree somewhat directly.
308 These are used from tree-eh.c when processing exception related
309 nodes during tree optimization. */
310
311 static struct eh_region *
312 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
313 {
314 struct eh_region *new_eh;
315
316 #ifdef ENABLE_CHECKING
317 gcc_assert (doing_eh (0));
318 #endif
319
320 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = GGC_CNEW (struct eh_region);
322 new_eh->type = type;
323 new_eh->outer = outer;
324 if (outer)
325 {
326 new_eh->next_peer = outer->inner;
327 outer->inner = new_eh;
328 }
329 else
330 {
331 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh;
333 }
334
335 new_eh->region_number = ++cfun->eh->last_region_number;
336
337 return new_eh;
338 }
339
340 struct eh_region *
341 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
342 {
343 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
344 cleanup->u.cleanup.prev_try = prev_try;
345 return cleanup;
346 }
347
348 struct eh_region *
349 gen_eh_region_try (struct eh_region *outer)
350 {
351 return gen_eh_region (ERT_TRY, outer);
352 }
353
354 struct eh_region *
355 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
356 {
357 struct eh_region *c, *l;
358 tree type_list, type_node;
359
360 /* Ensure to always end up with a type list to normalize further
361 processing, then register each type against the runtime types map. */
362 type_list = type_or_list;
363 if (type_or_list)
364 {
365 if (TREE_CODE (type_or_list) != TREE_LIST)
366 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
367
368 type_node = type_list;
369 for (; type_node; type_node = TREE_CHAIN (type_node))
370 add_type_for_runtime (TREE_VALUE (type_node));
371 }
372
373 c = gen_eh_region (ERT_CATCH, t->outer);
374 c->u.eh_catch.type_list = type_list;
375 l = t->u.eh_try.last_catch;
376 c->u.eh_catch.prev_catch = l;
377 if (l)
378 l->u.eh_catch.next_catch = c;
379 else
380 t->u.eh_try.eh_catch = c;
381 t->u.eh_try.last_catch = c;
382
383 return c;
384 }
385
386 struct eh_region *
387 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
388 {
389 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
390 region->u.allowed.type_list = allowed;
391
392 for (; allowed ; allowed = TREE_CHAIN (allowed))
393 add_type_for_runtime (TREE_VALUE (allowed));
394
395 return region;
396 }
397
398 struct eh_region *
399 gen_eh_region_must_not_throw (struct eh_region *outer)
400 {
401 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
402 }
403
404 int
405 get_eh_region_number (struct eh_region *region)
406 {
407 return region->region_number;
408 }
409
410 bool
411 get_eh_region_may_contain_throw (struct eh_region *region)
412 {
413 return region->may_contain_throw;
414 }
415
416 tree
417 get_eh_region_tree_label (struct eh_region *region)
418 {
419 return region->tree_label;
420 }
421
422 tree
423 get_eh_region_no_tree_label (int region)
424 {
425 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
426 }
427
428 void
429 set_eh_region_tree_label (struct eh_region *region, tree lab)
430 {
431 region->tree_label = lab;
432 }
433 \f
434 void
435 expand_resx_expr (tree exp)
436 {
437 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
438 struct eh_region *reg = VEC_index (eh_region,
439 cfun->eh->region_array, region_nr);
440
441 gcc_assert (!reg->resume);
442 do_pending_stack_adjust ();
443 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
444 emit_barrier ();
445 }
446
447 /* Note that the current EH region (if any) may contain a throw, or a
448 call to a function which itself may contain a throw. */
449
450 void
451 note_eh_region_may_contain_throw (struct eh_region *region)
452 {
453 while (region && !region->may_contain_throw)
454 {
455 region->may_contain_throw = 1;
456 region = region->outer;
457 }
458 }
459
460
461 /* Return an rtl expression for a pointer to the exception object
462 within a handler. */
463
464 rtx
465 get_exception_pointer (void)
466 {
467 if (! crtl->eh.exc_ptr)
468 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
469 return crtl->eh.exc_ptr;
470 }
471
472 /* Return an rtl expression for the exception dispatch filter
473 within a handler. */
474
475 rtx
476 get_exception_filter (void)
477 {
478 if (! crtl->eh.filter)
479 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
480 return crtl->eh.filter;
481 }
482 \f
483 /* This section is for the exception handling specific optimization pass. */
484
485 /* Random access the exception region tree. */
486
487 void
488 collect_eh_region_array (void)
489 {
490 struct eh_region *i;
491
492 i = cfun->eh->region_tree;
493 if (! i)
494 return;
495
496 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
497 cfun->eh->last_region_number + 1);
498 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
499
500 while (1)
501 {
502 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
503
504 /* If there are sub-regions, process them. */
505 if (i->inner)
506 i = i->inner;
507 /* If there are peers, process them. */
508 else if (i->next_peer)
509 i = i->next_peer;
510 /* Otherwise, step back up the tree to the next peer. */
511 else
512 {
513 do {
514 i = i->outer;
515 if (i == NULL)
516 return;
517 } while (i->next_peer == NULL);
518 i = i->next_peer;
519 }
520 }
521 }
522
523 /* R is MUST_NOT_THROW region that is not reachable via local
524 RESX instructions. It still must be kept in the tree in case runtime
525 can unwind through it, or we will eliminate out terminate call
526 runtime would do otherwise. Return TRUE if R contains throwing statements
527 or some of the exceptions in inner regions can be unwound up to R.
528
529 CONTAINS_STMT is bitmap of all regions that contains some throwing
530 statements.
531
532 Function looks O(^3) at first sight. In fact the function is called at most
533 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
534 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
535 the outer loop examines every region at most once. The inner loop
536 is doing unwinding from the throwing statement same way as we do during
537 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
538 of CFG. In practice Eh trees are wide, not deep, so this is not
539 a problem. */
540
541 static bool
542 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
543 {
544 struct eh_region *i = r->inner;
545 unsigned n;
546 bitmap_iterator bi;
547
548 if (TEST_BIT (contains_stmt, r->region_number))
549 return true;
550 if (r->aka)
551 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
552 if (TEST_BIT (contains_stmt, n))
553 return true;
554 if (!i)
555 return false;
556 while (1)
557 {
558 /* It is pointless to look into MUST_NOT_THROW
559 or dive into subregions. They never unwind up. */
560 if (i->type != ERT_MUST_NOT_THROW)
561 {
562 bool found = TEST_BIT (contains_stmt, i->region_number);
563 if (!found)
564 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
565 if (TEST_BIT (contains_stmt, n))
566 {
567 found = true;
568 break;
569 }
570 /* We have nested region that contains throwing statement.
571 See if resuming might lead up to the resx or we get locally
572 caught sooner. If we get locally caught sooner, we either
573 know region R is not reachable or it would have direct edge
574 from the EH resx and thus consider region reachable at
575 firest place. */
576 if (found)
577 {
578 struct eh_region *i1 = i;
579 tree type_thrown = NULL_TREE;
580
581 if (i1->type == ERT_THROW)
582 {
583 type_thrown = i1->u.eh_throw.type;
584 i1 = i1->outer;
585 }
586 for (; i1 != r; i1 = i1->outer)
587 if (reachable_next_level (i1, type_thrown, NULL,
588 false) >= RNL_CAUGHT)
589 break;
590 if (i1 == r)
591 return true;
592 }
593 }
594 /* If there are sub-regions, process them. */
595 if (i->type != ERT_MUST_NOT_THROW && i->inner)
596 i = i->inner;
597 /* If there are peers, process them. */
598 else if (i->next_peer)
599 i = i->next_peer;
600 /* Otherwise, step back up the tree to the next peer. */
601 else
602 {
603 do
604 {
605 i = i->outer;
606 if (i == r)
607 return false;
608 }
609 while (i->next_peer == NULL);
610 i = i->next_peer;
611 }
612 }
613 }
614
615 /* Bring region R to the root of tree. */
616
617 static void
618 bring_to_root (struct eh_region *r)
619 {
620 struct eh_region **pp;
621 struct eh_region *outer = r->outer;
622 if (!r->outer)
623 return;
624 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
625 continue;
626 *pp = r->next_peer;
627 r->outer = NULL;
628 r->next_peer = cfun->eh->region_tree;
629 cfun->eh->region_tree = r;
630 }
631
632 /* Remove all regions whose labels are not reachable.
633 REACHABLE is bitmap of all regions that are used by the function
634 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
635
636 void
637 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
638 {
639 int i;
640 struct eh_region *r;
641 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
642 struct eh_region *local_must_not_throw = NULL;
643 struct eh_region *first_must_not_throw = NULL;
644
645 for (i = cfun->eh->last_region_number; i > 0; --i)
646 {
647 r = VEC_index (eh_region, cfun->eh->region_array, i);
648 if (!r || r->region_number != i)
649 continue;
650 if (!TEST_BIT (reachable, i) && !r->resume)
651 {
652 bool kill_it = true;
653
654 r->tree_label = NULL;
655 switch (r->type)
656 {
657 case ERT_THROW:
658 /* Don't remove ERT_THROW regions if their outer region
659 is reachable. */
660 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
661 kill_it = false;
662 break;
663 case ERT_MUST_NOT_THROW:
664 /* MUST_NOT_THROW regions are implementable solely in the
665 runtime, but we need them when inlining function.
666
667 Keep them if outer region is not MUST_NOT_THROW a well
668 and if they contain some statement that might unwind through
669 them. */
670 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
671 && (!contains_stmt
672 || can_be_reached_by_runtime (contains_stmt, r)))
673 kill_it = false;
674 break;
675 case ERT_TRY:
676 {
677 /* TRY regions are reachable if any of its CATCH regions
678 are reachable. */
679 struct eh_region *c;
680 for (c = r->u.eh_try.eh_catch; c;
681 c = c->u.eh_catch.next_catch)
682 if (TEST_BIT (reachable, c->region_number))
683 {
684 kill_it = false;
685 break;
686 }
687 break;
688 }
689
690 default:
691 break;
692 }
693
694 if (kill_it)
695 {
696 if (dump_file)
697 fprintf (dump_file, "Removing unreachable eh region %i\n",
698 r->region_number);
699 remove_eh_handler (r);
700 }
701 else if (r->type == ERT_MUST_NOT_THROW)
702 {
703 if (!first_must_not_throw)
704 first_must_not_throw = r;
705 VEC_safe_push (eh_region, heap, must_not_throws, r);
706 }
707 }
708 else
709 if (r->type == ERT_MUST_NOT_THROW)
710 {
711 if (!local_must_not_throw)
712 local_must_not_throw = r;
713 if (r->outer)
714 VEC_safe_push (eh_region, heap, must_not_throws, r);
715 }
716 }
717
718 /* MUST_NOT_THROW regions without local handler are all the same; they
719 trigger terminate call in runtime.
720 MUST_NOT_THROW handled locally can differ in debug info associated
721 to std::terminate () call or if one is coming from Java and other
722 from C++ whether they call terminate or abort.
723
724 We merge all MUST_NOT_THROW regions handled by the run-time into one.
725 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
726 (since unwinding never continues to the outer region anyway).
727 If MUST_NOT_THROW with local handler is present in the tree, we use
728 that region to merge into, since it will remain in tree anyway;
729 otherwise we use first MUST_NOT_THROW.
730
731 Merging of locally handled regions needs changes to the CFG. Crossjumping
732 should take care of this, by looking at the actual code and
733 ensuring that the cleanup actions are really the same. */
734
735 if (local_must_not_throw)
736 first_must_not_throw = local_must_not_throw;
737
738 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
739 {
740 if (!r->label && !r->tree_label && r != first_must_not_throw)
741 {
742 if (dump_file)
743 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
744 r->region_number,
745 first_must_not_throw->region_number);
746 remove_eh_handler_and_replace (r, first_must_not_throw);
747 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
748 }
749 else
750 bring_to_root (r);
751 }
752 #ifdef ENABLE_CHECKING
753 verify_eh_tree (cfun);
754 #endif
755 VEC_free (eh_region, heap, must_not_throws);
756 }
757
758 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
759 is identical to label. */
760
761 VEC(int,heap) *
762 label_to_region_map (void)
763 {
764 VEC(int,heap) * label_to_region = NULL;
765 int i;
766
767 VEC_safe_grow_cleared (int, heap, label_to_region,
768 cfun->cfg->last_label_uid + 1);
769 for (i = cfun->eh->last_region_number; i > 0; --i)
770 {
771 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
772 if (r && r->region_number == i
773 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
774 {
775 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
776 i);
777 }
778 }
779 return label_to_region;
780 }
781
782 /* Return number of EH regions. */
783 int
784 num_eh_regions (void)
785 {
786 return cfun->eh->last_region_number + 1;
787 }
788
789 /* Set up EH labels for RTL. */
790
791 void
792 convert_from_eh_region_ranges (void)
793 {
794 int i, n = cfun->eh->last_region_number;
795
796 /* Most of the work is already done at the tree level. All we need to
797 do is collect the rtl labels that correspond to the tree labels that
798 collect the rtl labels that correspond to the tree labels
799 we allocated earlier. */
800 for (i = 1; i <= n; ++i)
801 {
802 struct eh_region *region;
803
804 region = VEC_index (eh_region, cfun->eh->region_array, i);
805 if (region && region->tree_label)
806 region->label = DECL_RTL_IF_SET (region->tree_label);
807 }
808 }
809
810 void
811 find_exception_handler_labels (void)
812 {
813 int i;
814
815 if (cfun->eh->region_tree == NULL)
816 return;
817
818 for (i = cfun->eh->last_region_number; i > 0; --i)
819 {
820 struct eh_region *region;
821 rtx lab;
822
823 region = VEC_index (eh_region, cfun->eh->region_array, i);
824 if (! region || region->region_number != i)
825 continue;
826 if (crtl->eh.built_landing_pads)
827 lab = region->landing_pad;
828 else
829 lab = region->label;
830 }
831 }
832
833 /* Returns true if the current function has exception handling regions. */
834
835 bool
836 current_function_has_exception_handlers (void)
837 {
838 int i;
839
840 for (i = cfun->eh->last_region_number; i > 0; --i)
841 {
842 struct eh_region *region;
843
844 region = VEC_index (eh_region, cfun->eh->region_array, i);
845 if (region
846 && region->region_number == i
847 && region->type != ERT_THROW)
848 return true;
849 }
850
851 return false;
852 }
853 \f
854 /* A subroutine of duplicate_eh_regions. Search the region tree under O
855 for the minimum and maximum region numbers. Update *MIN and *MAX. */
856
857 static void
858 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
859 {
860 int i;
861
862 if (o->aka)
863 {
864 i = bitmap_first_set_bit (o->aka);
865 if (i < *min)
866 *min = i;
867 i = bitmap_last_set_bit (o->aka);
868 if (i > *max)
869 *max = i;
870 }
871 if (o->region_number < *min)
872 *min = o->region_number;
873 if (o->region_number > *max)
874 *max = o->region_number;
875
876 if (o->inner)
877 {
878 o = o->inner;
879 duplicate_eh_regions_0 (o, min, max);
880 while (o->next_peer)
881 {
882 o = o->next_peer;
883 duplicate_eh_regions_0 (o, min, max);
884 }
885 }
886 }
887
888 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
889 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
890 about the other internal pointers just yet, just the tree-like pointers. */
891
892 static eh_region
893 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
894 {
895 eh_region ret, n;
896
897 ret = n = GGC_NEW (struct eh_region);
898
899 *n = *old;
900 n->outer = outer;
901 n->next_peer = NULL;
902 if (old->aka)
903 {
904 unsigned i;
905 bitmap_iterator bi;
906 n->aka = BITMAP_GGC_ALLOC ();
907
908 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
909 {
910 bitmap_set_bit (n->aka, i + eh_offset);
911 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
912 }
913 }
914
915 n->region_number += eh_offset;
916 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
917
918 if (old->inner)
919 {
920 old = old->inner;
921 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
922 while (old->next_peer)
923 {
924 old = old->next_peer;
925 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
926 }
927 }
928
929 return ret;
930 }
931
932 /* Return prev_try pointers catch subregions of R should
933 point to. */
934
935 static struct eh_region *
936 find_prev_try (struct eh_region * r)
937 {
938 for (; r && r->type != ERT_TRY; r = r->outer)
939 if (r->type == ERT_MUST_NOT_THROW
940 || (r->type == ERT_ALLOWED_EXCEPTIONS
941 && !r->u.allowed.type_list))
942 {
943 r = NULL;
944 break;
945 }
946 return r;
947 }
948
949 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
950 function and root the tree below OUTER_REGION. Remap labels using MAP
951 callback. The special case of COPY_REGION of 0 means all regions. */
952
953 int
954 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
955 void *data, int copy_region, int outer_region)
956 {
957 eh_region cur, prev_try, old_prev_try, outer, *splice;
958 int i, min_region, max_region, eh_offset, cfun_last_region_number;
959 int num_regions;
960
961 if (!ifun->eh)
962 return 0;
963 #ifdef ENABLE_CHECKING
964 verify_eh_tree (ifun);
965 #endif
966
967 /* Find the range of region numbers to be copied. The interface we
968 provide here mandates a single offset to find new number from old,
969 which means we must look at the numbers present, instead of the
970 count or something else. */
971 if (copy_region > 0)
972 {
973 min_region = INT_MAX;
974 max_region = 0;
975
976 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
977 old_prev_try = find_prev_try (cur);
978 duplicate_eh_regions_0 (cur, &min_region, &max_region);
979 }
980 else
981 {
982 min_region = 1;
983 max_region = ifun->eh->last_region_number;
984 old_prev_try = NULL;
985 }
986 num_regions = max_region - min_region + 1;
987 cfun_last_region_number = cfun->eh->last_region_number;
988 eh_offset = cfun_last_region_number + 1 - min_region;
989
990 /* If we've not yet created a region array, do so now. */
991 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
992 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
993 cfun->eh->last_region_number + 1);
994
995 /* Locate the spot at which to insert the new tree. */
996 if (outer_region > 0)
997 {
998 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
999 if (outer)
1000 splice = &outer->inner;
1001 else
1002 splice = &cfun->eh->region_tree;
1003 }
1004 else
1005 {
1006 outer = NULL;
1007 splice = &cfun->eh->region_tree;
1008 }
1009 while (*splice)
1010 splice = &(*splice)->next_peer;
1011
1012 if (!ifun->eh->region_tree)
1013 {
1014 if (outer)
1015 for (i = cfun_last_region_number + 1;
1016 i <= cfun->eh->last_region_number; i++)
1017 {
1018 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1019 if (outer->aka == NULL)
1020 outer->aka = BITMAP_GGC_ALLOC ();
1021 bitmap_set_bit (outer->aka, i);
1022 }
1023 return eh_offset;
1024 }
1025
1026 /* Copy all the regions in the subtree. */
1027 if (copy_region > 0)
1028 {
1029 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1030 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1031 }
1032 else
1033 {
1034 eh_region n;
1035
1036 cur = ifun->eh->region_tree;
1037 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1038 while (cur->next_peer)
1039 {
1040 cur = cur->next_peer;
1041 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1042 }
1043 }
1044
1045 /* Remap all the labels in the new regions. */
1046 for (i = cfun_last_region_number + 1;
1047 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1048 if (cur && cur->tree_label)
1049 cur->tree_label = map (cur->tree_label, data);
1050
1051 /* Search for the containing ERT_TRY region to fix up
1052 the prev_try short-cuts for ERT_CLEANUP regions. */
1053 prev_try = NULL;
1054 if (outer_region > 0)
1055 prev_try = find_prev_try (VEC_index (eh_region, cfun->eh->region_array, outer_region));
1056
1057 /* Remap all of the internal catch and cleanup linkages. Since we
1058 duplicate entire subtrees, all of the referenced regions will have
1059 been copied too. And since we renumbered them as a block, a simple
1060 bit of arithmetic finds us the index for the replacement region. */
1061 for (i = cfun_last_region_number + 1;
1062 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1063 {
1064 /* All removed EH that is toplevel in input function is now
1065 in outer EH of output function. */
1066 if (cur == NULL)
1067 {
1068 gcc_assert (VEC_index
1069 (eh_region, ifun->eh->region_array,
1070 i - eh_offset) == NULL);
1071 if (outer)
1072 {
1073 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1074 if (outer->aka == NULL)
1075 outer->aka = BITMAP_GGC_ALLOC ();
1076 bitmap_set_bit (outer->aka, i);
1077 }
1078 continue;
1079 }
1080 if (i != cur->region_number)
1081 continue;
1082
1083 #define REMAP(REG) \
1084 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1085 (REG)->region_number + eh_offset)
1086
1087 switch (cur->type)
1088 {
1089 case ERT_TRY:
1090 if (cur->u.eh_try.eh_catch)
1091 REMAP (cur->u.eh_try.eh_catch);
1092 if (cur->u.eh_try.last_catch)
1093 REMAP (cur->u.eh_try.last_catch);
1094 break;
1095
1096 case ERT_CATCH:
1097 if (cur->u.eh_catch.next_catch)
1098 REMAP (cur->u.eh_catch.next_catch);
1099 if (cur->u.eh_catch.prev_catch)
1100 REMAP (cur->u.eh_catch.prev_catch);
1101 break;
1102
1103 case ERT_CLEANUP:
1104 if (cur->u.cleanup.prev_try != old_prev_try)
1105 REMAP (cur->u.cleanup.prev_try);
1106 else
1107 cur->u.cleanup.prev_try = prev_try;
1108 break;
1109
1110 default:
1111 break;
1112 }
1113
1114 #undef REMAP
1115 }
1116 #ifdef ENABLE_CHECKING
1117 verify_eh_tree (cfun);
1118 #endif
1119
1120 return eh_offset;
1121 }
1122
1123 /* Return region number of region that is outer to both if REGION_A and
1124 REGION_B in IFUN. */
1125
1126 int
1127 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1128 {
1129 struct eh_region *rp_a, *rp_b;
1130 sbitmap b_outer;
1131
1132 gcc_assert (ifun->eh->last_region_number > 0);
1133 gcc_assert (ifun->eh->region_tree);
1134
1135 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1136 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1137 gcc_assert (rp_a != NULL);
1138 gcc_assert (rp_b != NULL);
1139
1140 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1141 sbitmap_zero (b_outer);
1142
1143 do
1144 {
1145 SET_BIT (b_outer, rp_b->region_number);
1146 rp_b = rp_b->outer;
1147 }
1148 while (rp_b);
1149
1150 do
1151 {
1152 if (TEST_BIT (b_outer, rp_a->region_number))
1153 {
1154 sbitmap_free (b_outer);
1155 return rp_a->region_number;
1156 }
1157 rp_a = rp_a->outer;
1158 }
1159 while (rp_a);
1160
1161 sbitmap_free (b_outer);
1162 return -1;
1163 }
1164 \f
1165 static int
1166 t2r_eq (const void *pentry, const void *pdata)
1167 {
1168 const_tree const entry = (const_tree) pentry;
1169 const_tree const data = (const_tree) pdata;
1170
1171 return TREE_PURPOSE (entry) == data;
1172 }
1173
1174 static hashval_t
1175 t2r_hash (const void *pentry)
1176 {
1177 const_tree const entry = (const_tree) pentry;
1178 return TREE_HASH (TREE_PURPOSE (entry));
1179 }
1180
1181 void
1182 add_type_for_runtime (tree type)
1183 {
1184 tree *slot;
1185
1186 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1187 TREE_HASH (type), INSERT);
1188 if (*slot == NULL)
1189 {
1190 tree runtime = (*lang_eh_runtime_type) (type);
1191 *slot = tree_cons (type, runtime, NULL_TREE);
1192 }
1193 }
1194
1195 tree
1196 lookup_type_for_runtime (tree type)
1197 {
1198 tree *slot;
1199
1200 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1201 TREE_HASH (type), NO_INSERT);
1202
1203 /* We should have always inserted the data earlier. */
1204 return TREE_VALUE (*slot);
1205 }
1206
1207 \f
1208 /* Represent an entry in @TTypes for either catch actions
1209 or exception filter actions. */
1210 struct ttypes_filter GTY(())
1211 {
1212 tree t;
1213 int filter;
1214 };
1215
1216 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1217 (a tree) for a @TTypes type node we are thinking about adding. */
1218
1219 static int
1220 ttypes_filter_eq (const void *pentry, const void *pdata)
1221 {
1222 const struct ttypes_filter *const entry
1223 = (const struct ttypes_filter *) pentry;
1224 const_tree const data = (const_tree) pdata;
1225
1226 return entry->t == data;
1227 }
1228
1229 static hashval_t
1230 ttypes_filter_hash (const void *pentry)
1231 {
1232 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1233 return TREE_HASH (entry->t);
1234 }
1235
1236 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1237 exception specification list we are thinking about adding. */
1238 /* ??? Currently we use the type lists in the order given. Someone
1239 should put these in some canonical order. */
1240
1241 static int
1242 ehspec_filter_eq (const void *pentry, const void *pdata)
1243 {
1244 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1245 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1246
1247 return type_list_equal (entry->t, data->t);
1248 }
1249
1250 /* Hash function for exception specification lists. */
1251
1252 static hashval_t
1253 ehspec_filter_hash (const void *pentry)
1254 {
1255 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1256 hashval_t h = 0;
1257 tree list;
1258
1259 for (list = entry->t; list ; list = TREE_CHAIN (list))
1260 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1261 return h;
1262 }
1263
1264 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1265 to speed up the search. Return the filter value to be used. */
1266
1267 static int
1268 add_ttypes_entry (htab_t ttypes_hash, tree type)
1269 {
1270 struct ttypes_filter **slot, *n;
1271
1272 slot = (struct ttypes_filter **)
1273 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1274
1275 if ((n = *slot) == NULL)
1276 {
1277 /* Filter value is a 1 based table index. */
1278
1279 n = XNEW (struct ttypes_filter);
1280 n->t = type;
1281 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1282 *slot = n;
1283
1284 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1285 }
1286
1287 return n->filter;
1288 }
1289
1290 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1291 to speed up the search. Return the filter value to be used. */
1292
1293 static int
1294 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1295 {
1296 struct ttypes_filter **slot, *n;
1297 struct ttypes_filter dummy;
1298
1299 dummy.t = list;
1300 slot = (struct ttypes_filter **)
1301 htab_find_slot (ehspec_hash, &dummy, INSERT);
1302
1303 if ((n = *slot) == NULL)
1304 {
1305 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1306
1307 n = XNEW (struct ttypes_filter);
1308 n->t = list;
1309 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1310 *slot = n;
1311
1312 /* Generate a 0 terminated list of filter values. */
1313 for (; list ; list = TREE_CHAIN (list))
1314 {
1315 if (targetm.arm_eabi_unwinder)
1316 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1317 else
1318 {
1319 /* Look up each type in the list and encode its filter
1320 value as a uleb128. */
1321 push_uleb128 (&crtl->eh.ehspec_data,
1322 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1323 }
1324 }
1325 if (targetm.arm_eabi_unwinder)
1326 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1327 else
1328 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1329 }
1330
1331 return n->filter;
1332 }
1333
1334 /* Generate the action filter values to be used for CATCH and
1335 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1336 we use lots of landing pads, and so every type or list can share
1337 the same filter value, which saves table space. */
1338
1339 static void
1340 assign_filter_values (void)
1341 {
1342 int i;
1343 htab_t ttypes, ehspec;
1344
1345 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1346 if (targetm.arm_eabi_unwinder)
1347 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1348 else
1349 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1350
1351 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1352 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1353
1354 for (i = cfun->eh->last_region_number; i > 0; --i)
1355 {
1356 struct eh_region *r;
1357
1358 r = VEC_index (eh_region, cfun->eh->region_array, i);
1359
1360 /* Mind we don't process a region more than once. */
1361 if (!r || r->region_number != i)
1362 continue;
1363
1364 switch (r->type)
1365 {
1366 case ERT_CATCH:
1367 /* Whatever type_list is (NULL or true list), we build a list
1368 of filters for the region. */
1369 r->u.eh_catch.filter_list = NULL_TREE;
1370
1371 if (r->u.eh_catch.type_list != NULL)
1372 {
1373 /* Get a filter value for each of the types caught and store
1374 them in the region's dedicated list. */
1375 tree tp_node = r->u.eh_catch.type_list;
1376
1377 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1378 {
1379 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1380 tree flt_node = build_int_cst (NULL_TREE, flt);
1381
1382 r->u.eh_catch.filter_list
1383 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1384 }
1385 }
1386 else
1387 {
1388 /* Get a filter value for the NULL list also since it will need
1389 an action record anyway. */
1390 int flt = add_ttypes_entry (ttypes, NULL);
1391 tree flt_node = build_int_cst (NULL_TREE, flt);
1392
1393 r->u.eh_catch.filter_list
1394 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1395 }
1396
1397 break;
1398
1399 case ERT_ALLOWED_EXCEPTIONS:
1400 r->u.allowed.filter
1401 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1402 break;
1403
1404 default:
1405 break;
1406 }
1407 }
1408
1409 htab_delete (ttypes);
1410 htab_delete (ehspec);
1411 }
1412
1413 /* Emit SEQ into basic block just before INSN (that is assumed to be
1414 first instruction of some existing BB and return the newly
1415 produced block. */
1416 static basic_block
1417 emit_to_new_bb_before (rtx seq, rtx insn)
1418 {
1419 rtx last;
1420 basic_block bb;
1421 edge e;
1422 edge_iterator ei;
1423
1424 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1425 call), we don't want it to go into newly created landing pad or other EH
1426 construct. */
1427 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1428 if (e->flags & EDGE_FALLTHRU)
1429 force_nonfallthru (e);
1430 else
1431 ei_next (&ei);
1432 last = emit_insn_before (seq, insn);
1433 if (BARRIER_P (last))
1434 last = PREV_INSN (last);
1435 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1436 update_bb_for_insn (bb);
1437 bb->flags |= BB_SUPERBLOCK;
1438 return bb;
1439 }
1440
1441 /* Generate the code to actually handle exceptions, which will follow the
1442 landing pads. */
1443
1444 static void
1445 build_post_landing_pads (void)
1446 {
1447 int i;
1448
1449 for (i = cfun->eh->last_region_number; i > 0; --i)
1450 {
1451 struct eh_region *region;
1452 rtx seq;
1453
1454 region = VEC_index (eh_region, cfun->eh->region_array, i);
1455 /* Mind we don't process a region more than once. */
1456 if (!region || region->region_number != i)
1457 continue;
1458
1459 switch (region->type)
1460 {
1461 case ERT_TRY:
1462 /* ??? Collect the set of all non-overlapping catch handlers
1463 all the way up the chain until blocked by a cleanup. */
1464 /* ??? Outer try regions can share landing pads with inner
1465 try regions if the types are completely non-overlapping,
1466 and there are no intervening cleanups. */
1467
1468 region->post_landing_pad = gen_label_rtx ();
1469
1470 start_sequence ();
1471
1472 emit_label (region->post_landing_pad);
1473
1474 /* ??? It is mighty inconvenient to call back into the
1475 switch statement generation code in expand_end_case.
1476 Rapid prototyping sez a sequence of ifs. */
1477 {
1478 struct eh_region *c;
1479 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1480 {
1481 if (c->u.eh_catch.type_list == NULL)
1482 emit_jump (c->label);
1483 else
1484 {
1485 /* Need for one cmp/jump per type caught. Each type
1486 list entry has a matching entry in the filter list
1487 (see assign_filter_values). */
1488 tree tp_node = c->u.eh_catch.type_list;
1489 tree flt_node = c->u.eh_catch.filter_list;
1490
1491 for (; tp_node; )
1492 {
1493 emit_cmp_and_jump_insns
1494 (crtl->eh.filter,
1495 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1496 EQ, NULL_RTX,
1497 targetm.eh_return_filter_mode (), 0, c->label);
1498
1499 tp_node = TREE_CHAIN (tp_node);
1500 flt_node = TREE_CHAIN (flt_node);
1501 }
1502 }
1503 }
1504 }
1505
1506 /* We delay the generation of the _Unwind_Resume until we generate
1507 landing pads. We emit a marker here so as to get good control
1508 flow data in the meantime. */
1509 region->resume
1510 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1511 emit_barrier ();
1512
1513 seq = get_insns ();
1514 end_sequence ();
1515
1516 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1517
1518 break;
1519
1520 case ERT_ALLOWED_EXCEPTIONS:
1521 region->post_landing_pad = gen_label_rtx ();
1522
1523 start_sequence ();
1524
1525 emit_label (region->post_landing_pad);
1526
1527 emit_cmp_and_jump_insns (crtl->eh.filter,
1528 GEN_INT (region->u.allowed.filter),
1529 EQ, NULL_RTX,
1530 targetm.eh_return_filter_mode (), 0, region->label);
1531
1532 /* We delay the generation of the _Unwind_Resume until we generate
1533 landing pads. We emit a marker here so as to get good control
1534 flow data in the meantime. */
1535 region->resume
1536 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1537 emit_barrier ();
1538
1539 seq = get_insns ();
1540 end_sequence ();
1541
1542 emit_to_new_bb_before (seq, region->label);
1543 break;
1544
1545 case ERT_CLEANUP:
1546 case ERT_MUST_NOT_THROW:
1547 region->post_landing_pad = region->label;
1548 break;
1549
1550 case ERT_CATCH:
1551 case ERT_THROW:
1552 /* Nothing to do. */
1553 break;
1554
1555 default:
1556 gcc_unreachable ();
1557 }
1558 }
1559 }
1560
1561 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1562 _Unwind_Resume otherwise. */
1563
1564 static void
1565 connect_post_landing_pads (void)
1566 {
1567 int i;
1568
1569 for (i = cfun->eh->last_region_number; i > 0; --i)
1570 {
1571 struct eh_region *region;
1572 struct eh_region *outer;
1573 rtx seq;
1574 rtx barrier;
1575
1576 region = VEC_index (eh_region, cfun->eh->region_array, i);
1577 /* Mind we don't process a region more than once. */
1578 if (!region || region->region_number != i)
1579 continue;
1580
1581 /* If there is no RESX, or it has been deleted by flow, there's
1582 nothing to fix up. */
1583 if (! region->resume || INSN_DELETED_P (region->resume))
1584 continue;
1585
1586 /* Search for another landing pad in this function. */
1587 for (outer = region->outer; outer ; outer = outer->outer)
1588 if (outer->post_landing_pad)
1589 break;
1590
1591 start_sequence ();
1592
1593 if (outer)
1594 {
1595 edge e;
1596 basic_block src, dest;
1597
1598 emit_jump (outer->post_landing_pad);
1599 src = BLOCK_FOR_INSN (region->resume);
1600 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1601 while (EDGE_COUNT (src->succs) > 0)
1602 remove_edge (EDGE_SUCC (src, 0));
1603 e = make_edge (src, dest, 0);
1604 e->probability = REG_BR_PROB_BASE;
1605 e->count = src->count;
1606 }
1607 else
1608 {
1609 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1610 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1611
1612 /* What we just emitted was a throwing libcall, so it got a
1613 barrier automatically added after it. If the last insn in
1614 the libcall sequence isn't the barrier, it's because the
1615 target emits multiple insns for a call, and there are insns
1616 after the actual call insn (which are redundant and would be
1617 optimized away). The barrier is inserted exactly after the
1618 call insn, so let's go get that and delete the insns after
1619 it, because below we need the barrier to be the last insn in
1620 the sequence. */
1621 delete_insns_since (NEXT_INSN (last_call_insn ()));
1622 }
1623
1624 seq = get_insns ();
1625 end_sequence ();
1626 barrier = emit_insn_before (seq, region->resume);
1627 /* Avoid duplicate barrier. */
1628 gcc_assert (BARRIER_P (barrier));
1629 delete_insn (barrier);
1630 delete_insn (region->resume);
1631
1632 /* ??? From tree-ssa we can wind up with catch regions whose
1633 label is not instantiated, but whose resx is present. Now
1634 that we've dealt with the resx, kill the region. */
1635 if (region->label == NULL && region->type == ERT_CLEANUP)
1636 remove_eh_handler (region);
1637 }
1638 }
1639
1640 \f
1641 static void
1642 dw2_build_landing_pads (void)
1643 {
1644 int i;
1645
1646 for (i = cfun->eh->last_region_number; i > 0; --i)
1647 {
1648 struct eh_region *region;
1649 rtx seq;
1650 basic_block bb;
1651 edge e;
1652
1653 region = VEC_index (eh_region, cfun->eh->region_array, i);
1654 /* Mind we don't process a region more than once. */
1655 if (!region || region->region_number != i)
1656 continue;
1657
1658 if (region->type != ERT_CLEANUP
1659 && region->type != ERT_TRY
1660 && region->type != ERT_ALLOWED_EXCEPTIONS)
1661 continue;
1662
1663 start_sequence ();
1664
1665 region->landing_pad = gen_label_rtx ();
1666 emit_label (region->landing_pad);
1667
1668 #ifdef HAVE_exception_receiver
1669 if (HAVE_exception_receiver)
1670 emit_insn (gen_exception_receiver ());
1671 else
1672 #endif
1673 #ifdef HAVE_nonlocal_goto_receiver
1674 if (HAVE_nonlocal_goto_receiver)
1675 emit_insn (gen_nonlocal_goto_receiver ());
1676 else
1677 #endif
1678 { /* Nothing */ }
1679
1680 emit_move_insn (crtl->eh.exc_ptr,
1681 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1682 emit_move_insn (crtl->eh.filter,
1683 gen_rtx_REG (targetm.eh_return_filter_mode (),
1684 EH_RETURN_DATA_REGNO (1)));
1685
1686 seq = get_insns ();
1687 end_sequence ();
1688
1689 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1690 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1691 e->count = bb->count;
1692 e->probability = REG_BR_PROB_BASE;
1693 }
1694 }
1695
1696 \f
1697 struct sjlj_lp_info
1698 {
1699 int directly_reachable;
1700 int action_index;
1701 int dispatch_index;
1702 int call_site_index;
1703 };
1704
1705 static bool
1706 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1707 {
1708 rtx insn;
1709 bool found_one = false;
1710
1711 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1712 {
1713 struct eh_region *region;
1714 enum reachable_code rc;
1715 tree type_thrown;
1716 rtx note;
1717
1718 if (! INSN_P (insn))
1719 continue;
1720
1721 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1722 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1723 continue;
1724
1725 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1726 if (!region)
1727 continue;
1728
1729 type_thrown = NULL_TREE;
1730 if (region->type == ERT_THROW)
1731 {
1732 type_thrown = region->u.eh_throw.type;
1733 region = region->outer;
1734 }
1735
1736 /* Find the first containing region that might handle the exception.
1737 That's the landing pad to which we will transfer control. */
1738 rc = RNL_NOT_CAUGHT;
1739 for (; region; region = region->outer)
1740 {
1741 rc = reachable_next_level (region, type_thrown, NULL, false);
1742 if (rc != RNL_NOT_CAUGHT)
1743 break;
1744 }
1745 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1746 {
1747 lp_info[region->region_number].directly_reachable = 1;
1748 found_one = true;
1749 }
1750 }
1751
1752 return found_one;
1753 }
1754
1755 static void
1756 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1757 {
1758 htab_t ar_hash;
1759 int i, index;
1760
1761 /* First task: build the action table. */
1762
1763 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1764 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1765
1766 for (i = cfun->eh->last_region_number; i > 0; --i)
1767 if (lp_info[i].directly_reachable)
1768 {
1769 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1770
1771 r->landing_pad = dispatch_label;
1772 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1773 if (lp_info[i].action_index != -1)
1774 crtl->uses_eh_lsda = 1;
1775 }
1776
1777 htab_delete (ar_hash);
1778
1779 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1780 landing pad label for the region. For sjlj though, there is one
1781 common landing pad from which we dispatch to the post-landing pads.
1782
1783 A region receives a dispatch index if it is directly reachable
1784 and requires in-function processing. Regions that share post-landing
1785 pads may share dispatch indices. */
1786 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1787 (see build_post_landing_pads) so we don't bother checking for it. */
1788
1789 index = 0;
1790 for (i = cfun->eh->last_region_number; i > 0; --i)
1791 if (lp_info[i].directly_reachable)
1792 lp_info[i].dispatch_index = index++;
1793
1794 /* Finally: assign call-site values. If dwarf2 terms, this would be
1795 the region number assigned by convert_to_eh_region_ranges, but
1796 handles no-action and must-not-throw differently. */
1797
1798 call_site_base = 1;
1799 for (i = cfun->eh->last_region_number; i > 0; --i)
1800 if (lp_info[i].directly_reachable)
1801 {
1802 int action = lp_info[i].action_index;
1803
1804 /* Map must-not-throw to otherwise unused call-site index 0. */
1805 if (action == -2)
1806 index = 0;
1807 /* Map no-action to otherwise unused call-site index -1. */
1808 else if (action == -1)
1809 index = -1;
1810 /* Otherwise, look it up in the table. */
1811 else
1812 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1813
1814 lp_info[i].call_site_index = index;
1815 }
1816 }
1817
1818 static void
1819 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1820 {
1821 int last_call_site = -2;
1822 rtx insn, mem;
1823
1824 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1825 {
1826 struct eh_region *region;
1827 int this_call_site;
1828 rtx note, before, p;
1829
1830 /* Reset value tracking at extended basic block boundaries. */
1831 if (LABEL_P (insn))
1832 last_call_site = -2;
1833
1834 if (! INSN_P (insn))
1835 continue;
1836
1837 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1838
1839 /* Calls that are known to not throw need not be marked. */
1840 if (note && INTVAL (XEXP (note, 0)) <= 0)
1841 continue;
1842
1843 if (note)
1844 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1845 else
1846 region = NULL;
1847
1848 if (!region)
1849 {
1850 /* Calls (and trapping insns) without notes are outside any
1851 exception handling region in this function. Mark them as
1852 no action. */
1853 if (CALL_P (insn)
1854 || (flag_non_call_exceptions
1855 && may_trap_p (PATTERN (insn))))
1856 this_call_site = -1;
1857 else
1858 continue;
1859 }
1860 else
1861 this_call_site = lp_info[region->region_number].call_site_index;
1862
1863 if (this_call_site == last_call_site)
1864 continue;
1865
1866 /* Don't separate a call from it's argument loads. */
1867 before = insn;
1868 if (CALL_P (insn))
1869 before = find_first_parameter_load (insn, NULL_RTX);
1870
1871 start_sequence ();
1872 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1873 sjlj_fc_call_site_ofs);
1874 emit_move_insn (mem, GEN_INT (this_call_site));
1875 p = get_insns ();
1876 end_sequence ();
1877
1878 emit_insn_before (p, before);
1879 last_call_site = this_call_site;
1880 }
1881 }
1882
1883 /* Construct the SjLj_Function_Context. */
1884
1885 static void
1886 sjlj_emit_function_enter (rtx dispatch_label)
1887 {
1888 rtx fn_begin, fc, mem, seq;
1889 bool fn_begin_outside_block;
1890
1891 fc = crtl->eh.sjlj_fc;
1892
1893 start_sequence ();
1894
1895 /* We're storing this libcall's address into memory instead of
1896 calling it directly. Thus, we must call assemble_external_libcall
1897 here, as we can not depend on emit_library_call to do it for us. */
1898 assemble_external_libcall (eh_personality_libfunc);
1899 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1900 emit_move_insn (mem, eh_personality_libfunc);
1901
1902 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1903 if (crtl->uses_eh_lsda)
1904 {
1905 char buf[20];
1906 rtx sym;
1907
1908 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1909 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1910 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1911 emit_move_insn (mem, sym);
1912 }
1913 else
1914 emit_move_insn (mem, const0_rtx);
1915
1916 #ifdef DONT_USE_BUILTIN_SETJMP
1917 {
1918 rtx x;
1919 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1920 TYPE_MODE (integer_type_node), 1,
1921 plus_constant (XEXP (fc, 0),
1922 sjlj_fc_jbuf_ofs), Pmode);
1923
1924 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1925 TYPE_MODE (integer_type_node), 0, dispatch_label);
1926 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
1927 }
1928 #else
1929 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1930 dispatch_label);
1931 #endif
1932
1933 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1934 1, XEXP (fc, 0), Pmode);
1935
1936 seq = get_insns ();
1937 end_sequence ();
1938
1939 /* ??? Instead of doing this at the beginning of the function,
1940 do this in a block that is at loop level 0 and dominates all
1941 can_throw_internal instructions. */
1942
1943 fn_begin_outside_block = true;
1944 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1945 if (NOTE_P (fn_begin))
1946 {
1947 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1948 break;
1949 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1950 fn_begin_outside_block = false;
1951 }
1952
1953 if (fn_begin_outside_block)
1954 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1955 else
1956 emit_insn_after (seq, fn_begin);
1957 }
1958
1959 /* Call back from expand_function_end to know where we should put
1960 the call to unwind_sjlj_unregister_libfunc if needed. */
1961
1962 void
1963 sjlj_emit_function_exit_after (rtx after)
1964 {
1965 crtl->eh.sjlj_exit_after = after;
1966 }
1967
1968 static void
1969 sjlj_emit_function_exit (void)
1970 {
1971 rtx seq;
1972 edge e;
1973 edge_iterator ei;
1974
1975 start_sequence ();
1976
1977 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1978 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1979
1980 seq = get_insns ();
1981 end_sequence ();
1982
1983 /* ??? Really this can be done in any block at loop level 0 that
1984 post-dominates all can_throw_internal instructions. This is
1985 the last possible moment. */
1986
1987 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1988 if (e->flags & EDGE_FALLTHRU)
1989 break;
1990 if (e)
1991 {
1992 rtx insn;
1993
1994 /* Figure out whether the place we are supposed to insert libcall
1995 is inside the last basic block or after it. In the other case
1996 we need to emit to edge. */
1997 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1998 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1999 {
2000 if (insn == crtl->eh.sjlj_exit_after)
2001 {
2002 if (LABEL_P (insn))
2003 insn = NEXT_INSN (insn);
2004 emit_insn_after (seq, insn);
2005 return;
2006 }
2007 if (insn == BB_END (e->src))
2008 break;
2009 }
2010 insert_insn_on_edge (seq, e);
2011 }
2012 }
2013
2014 static void
2015 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2016 {
2017 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2018 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2019 int i, first_reachable;
2020 rtx mem, dispatch, seq, fc;
2021 rtx before;
2022 basic_block bb;
2023 edge e;
2024
2025 fc = crtl->eh.sjlj_fc;
2026
2027 start_sequence ();
2028
2029 emit_label (dispatch_label);
2030
2031 #ifndef DONT_USE_BUILTIN_SETJMP
2032 expand_builtin_setjmp_receiver (dispatch_label);
2033 #endif
2034
2035 /* Load up dispatch index, exc_ptr and filter values from the
2036 function context. */
2037 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2038 sjlj_fc_call_site_ofs);
2039 dispatch = copy_to_reg (mem);
2040
2041 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2042 if (unwind_word_mode != ptr_mode)
2043 {
2044 #ifdef POINTERS_EXTEND_UNSIGNED
2045 mem = convert_memory_address (ptr_mode, mem);
2046 #else
2047 mem = convert_to_mode (ptr_mode, mem, 0);
2048 #endif
2049 }
2050 emit_move_insn (crtl->eh.exc_ptr, mem);
2051
2052 mem = adjust_address (fc, unwind_word_mode,
2053 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2054 if (unwind_word_mode != filter_mode)
2055 mem = convert_to_mode (filter_mode, mem, 0);
2056 emit_move_insn (crtl->eh.filter, mem);
2057
2058 /* Jump to one of the directly reachable regions. */
2059 /* ??? This really ought to be using a switch statement. */
2060
2061 first_reachable = 0;
2062 for (i = cfun->eh->last_region_number; i > 0; --i)
2063 {
2064 if (! lp_info[i].directly_reachable)
2065 continue;
2066
2067 if (! first_reachable)
2068 {
2069 first_reachable = i;
2070 continue;
2071 }
2072
2073 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2074 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2075 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2076 ->post_landing_pad);
2077 }
2078
2079 seq = get_insns ();
2080 end_sequence ();
2081
2082 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2083 ->post_landing_pad);
2084
2085 bb = emit_to_new_bb_before (seq, before);
2086 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2087 e->count = bb->count;
2088 e->probability = REG_BR_PROB_BASE;
2089 }
2090
2091 static void
2092 sjlj_build_landing_pads (void)
2093 {
2094 struct sjlj_lp_info *lp_info;
2095
2096 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2097
2098 if (sjlj_find_directly_reachable_regions (lp_info))
2099 {
2100 rtx dispatch_label = gen_label_rtx ();
2101 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2102 TYPE_MODE (sjlj_fc_type_node),
2103 TYPE_ALIGN (sjlj_fc_type_node));
2104 crtl->eh.sjlj_fc
2105 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2106 int_size_in_bytes (sjlj_fc_type_node),
2107 align);
2108
2109 sjlj_assign_call_site_values (dispatch_label, lp_info);
2110 sjlj_mark_call_sites (lp_info);
2111
2112 sjlj_emit_function_enter (dispatch_label);
2113 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2114 sjlj_emit_function_exit ();
2115 }
2116
2117 free (lp_info);
2118 }
2119
2120 void
2121 finish_eh_generation (void)
2122 {
2123 basic_block bb;
2124
2125 /* Nothing to do if no regions created. */
2126 if (cfun->eh->region_tree == NULL)
2127 return;
2128
2129 /* The object here is to provide find_basic_blocks with detailed
2130 information (via reachable_handlers) on how exception control
2131 flows within the function. In this first pass, we can include
2132 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2133 regions, and hope that it will be useful in deleting unreachable
2134 handlers. Subsequently, we will generate landing pads which will
2135 connect many of the handlers, and then type information will not
2136 be effective. Still, this is a win over previous implementations. */
2137
2138 /* These registers are used by the landing pads. Make sure they
2139 have been generated. */
2140 get_exception_pointer ();
2141 get_exception_filter ();
2142
2143 /* Construct the landing pads. */
2144
2145 assign_filter_values ();
2146 build_post_landing_pads ();
2147 connect_post_landing_pads ();
2148 if (USING_SJLJ_EXCEPTIONS)
2149 sjlj_build_landing_pads ();
2150 else
2151 dw2_build_landing_pads ();
2152
2153 crtl->eh.built_landing_pads = 1;
2154
2155 /* We've totally changed the CFG. Start over. */
2156 find_exception_handler_labels ();
2157 break_superblocks ();
2158 if (USING_SJLJ_EXCEPTIONS
2159 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2160 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2161 commit_edge_insertions ();
2162 FOR_EACH_BB (bb)
2163 {
2164 edge e;
2165 edge_iterator ei;
2166 bool eh = false;
2167 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2168 {
2169 if (e->flags & EDGE_EH)
2170 {
2171 remove_edge (e);
2172 eh = true;
2173 }
2174 else
2175 ei_next (&ei);
2176 }
2177 if (eh)
2178 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2179 }
2180 }
2181 \f
2182 /* This section handles removing dead code for flow. */
2183
2184 /* Splice REGION from the region tree and replace it by REPLACE etc. */
2185
2186 static void
2187 remove_eh_handler_and_replace (struct eh_region *region,
2188 struct eh_region *replace)
2189 {
2190 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2191 rtx lab;
2192
2193 outer = region->outer;
2194 /* For the benefit of efficiently handling REG_EH_REGION notes,
2195 replace this region in the region array with its containing
2196 region. Note that previous region deletions may result in
2197 multiple copies of this region in the array, so we have a
2198 list of alternate numbers by which we are known. */
2199
2200 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2201 replace);
2202 if (region->aka)
2203 {
2204 unsigned i;
2205 bitmap_iterator bi;
2206
2207 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2208 {
2209 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2210 }
2211 }
2212
2213 if (replace)
2214 {
2215 if (!replace->aka)
2216 replace->aka = BITMAP_GGC_ALLOC ();
2217 if (region->aka)
2218 bitmap_ior_into (replace->aka, region->aka);
2219 bitmap_set_bit (replace->aka, region->region_number);
2220 }
2221
2222 if (crtl->eh.built_landing_pads)
2223 lab = region->landing_pad;
2224 else
2225 lab = region->label;
2226 if (outer)
2227 pp_start = &outer->inner;
2228 else
2229 pp_start = &cfun->eh->region_tree;
2230 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2231 continue;
2232 *pp = region->next_peer;
2233
2234 if (replace)
2235 pp_start = &replace->inner;
2236 else
2237 pp_start = &cfun->eh->region_tree;
2238 inner = region->inner;
2239 if (inner)
2240 {
2241 for (p = inner; p->next_peer ; p = p->next_peer)
2242 p->outer = replace;
2243 p->outer = replace;
2244
2245 p->next_peer = *pp_start;
2246 *pp_start = inner;
2247 }
2248
2249 if (region->type == ERT_CATCH)
2250 {
2251 struct eh_region *eh_try, *next, *prev;
2252
2253 for (eh_try = region->next_peer;
2254 eh_try->type == ERT_CATCH;
2255 eh_try = eh_try->next_peer)
2256 continue;
2257 gcc_assert (eh_try->type == ERT_TRY);
2258
2259 next = region->u.eh_catch.next_catch;
2260 prev = region->u.eh_catch.prev_catch;
2261
2262 if (next)
2263 next->u.eh_catch.prev_catch = prev;
2264 else
2265 eh_try->u.eh_try.last_catch = prev;
2266 if (prev)
2267 prev->u.eh_catch.next_catch = next;
2268 else
2269 {
2270 eh_try->u.eh_try.eh_catch = next;
2271 if (! next)
2272 remove_eh_handler (eh_try);
2273 }
2274 }
2275 }
2276
2277 /* Splice REGION from the region tree and replace it by the outer region
2278 etc. */
2279
2280 static void
2281 remove_eh_handler (struct eh_region *region)
2282 {
2283 remove_eh_handler_and_replace (region, region->outer);
2284 }
2285
2286 /* Remove Eh region R that has turned out to have no code in its handler. */
2287
2288 void
2289 remove_eh_region (int r)
2290 {
2291 struct eh_region *region;
2292
2293 region = VEC_index (eh_region, cfun->eh->region_array, r);
2294 remove_eh_handler (region);
2295 }
2296
2297 /* Invokes CALLBACK for every exception handler label. Only used by old
2298 loop hackery; should not be used by new code. */
2299
2300 void
2301 for_each_eh_label (void (*callback) (rtx))
2302 {
2303 int i;
2304 for (i = 0; i < cfun->eh->last_region_number; i++)
2305 {
2306 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2307 if (r && r->region_number == i && r->label
2308 && GET_CODE (r->label) == CODE_LABEL)
2309 (*callback) (r->label);
2310 }
2311 }
2312
2313 /* Invoke CALLBACK for every exception region in the current function. */
2314
2315 void
2316 for_each_eh_region (void (*callback) (struct eh_region *))
2317 {
2318 int i, n = cfun->eh->last_region_number;
2319 for (i = 1; i <= n; ++i)
2320 {
2321 struct eh_region *region;
2322
2323 region = VEC_index (eh_region, cfun->eh->region_array, i);
2324 if (region)
2325 (*callback) (region);
2326 }
2327 }
2328 \f
2329 /* This section describes CFG exception edges for flow. */
2330
2331 /* For communicating between calls to reachable_next_level. */
2332 struct reachable_info
2333 {
2334 tree types_caught;
2335 tree types_allowed;
2336 void (*callback) (struct eh_region *, void *);
2337 void *callback_data;
2338 };
2339
2340 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2341 base class of TYPE, is in HANDLED. */
2342
2343 static int
2344 check_handled (tree handled, tree type)
2345 {
2346 tree t;
2347
2348 /* We can check for exact matches without front-end help. */
2349 if (! lang_eh_type_covers)
2350 {
2351 for (t = handled; t ; t = TREE_CHAIN (t))
2352 if (TREE_VALUE (t) == type)
2353 return 1;
2354 }
2355 else
2356 {
2357 for (t = handled; t ; t = TREE_CHAIN (t))
2358 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2359 return 1;
2360 }
2361
2362 return 0;
2363 }
2364
2365 /* A subroutine of reachable_next_level. If we are collecting a list
2366 of handlers, add one. After landing pad generation, reference
2367 it instead of the handlers themselves. Further, the handlers are
2368 all wired together, so by referencing one, we've got them all.
2369 Before landing pad generation we reference each handler individually.
2370
2371 LP_REGION contains the landing pad; REGION is the handler. */
2372
2373 static void
2374 add_reachable_handler (struct reachable_info *info,
2375 struct eh_region *lp_region, struct eh_region *region)
2376 {
2377 if (! info)
2378 return;
2379
2380 if (crtl->eh.built_landing_pads)
2381 info->callback (lp_region, info->callback_data);
2382 else
2383 info->callback (region, info->callback_data);
2384 }
2385
2386 /* Process one level of exception regions for reachability.
2387 If TYPE_THROWN is non-null, then it is the *exact* type being
2388 propagated. If INFO is non-null, then collect handler labels
2389 and caught/allowed type information between invocations. */
2390
2391 static enum reachable_code
2392 reachable_next_level (struct eh_region *region, tree type_thrown,
2393 struct reachable_info *info,
2394 bool maybe_resx)
2395 {
2396 switch (region->type)
2397 {
2398 case ERT_CLEANUP:
2399 /* Before landing-pad generation, we model control flow
2400 directly to the individual handlers. In this way we can
2401 see that catch handler types may shadow one another. */
2402 add_reachable_handler (info, region, region);
2403 return RNL_MAYBE_CAUGHT;
2404
2405 case ERT_TRY:
2406 {
2407 struct eh_region *c;
2408 enum reachable_code ret = RNL_NOT_CAUGHT;
2409
2410 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2411 {
2412 /* A catch-all handler ends the search. */
2413 if (c->u.eh_catch.type_list == NULL)
2414 {
2415 add_reachable_handler (info, region, c);
2416 return RNL_CAUGHT;
2417 }
2418
2419 if (type_thrown)
2420 {
2421 /* If we have at least one type match, end the search. */
2422 tree tp_node = c->u.eh_catch.type_list;
2423
2424 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2425 {
2426 tree type = TREE_VALUE (tp_node);
2427
2428 if (type == type_thrown
2429 || (lang_eh_type_covers
2430 && (*lang_eh_type_covers) (type, type_thrown)))
2431 {
2432 add_reachable_handler (info, region, c);
2433 return RNL_CAUGHT;
2434 }
2435 }
2436
2437 /* If we have definitive information of a match failure,
2438 the catch won't trigger. */
2439 if (lang_eh_type_covers)
2440 return RNL_NOT_CAUGHT;
2441 }
2442
2443 /* At this point, we either don't know what type is thrown or
2444 don't have front-end assistance to help deciding if it is
2445 covered by one of the types in the list for this region.
2446
2447 We'd then like to add this region to the list of reachable
2448 handlers since it is indeed potentially reachable based on the
2449 information we have.
2450
2451 Actually, this handler is for sure not reachable if all the
2452 types it matches have already been caught. That is, it is only
2453 potentially reachable if at least one of the types it catches
2454 has not been previously caught. */
2455
2456 if (! info)
2457 ret = RNL_MAYBE_CAUGHT;
2458 else
2459 {
2460 tree tp_node = c->u.eh_catch.type_list;
2461 bool maybe_reachable = false;
2462
2463 /* Compute the potential reachability of this handler and
2464 update the list of types caught at the same time. */
2465 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2466 {
2467 tree type = TREE_VALUE (tp_node);
2468
2469 if (! check_handled (info->types_caught, type))
2470 {
2471 info->types_caught
2472 = tree_cons (NULL, type, info->types_caught);
2473
2474 maybe_reachable = true;
2475 }
2476 }
2477
2478 if (maybe_reachable)
2479 {
2480 add_reachable_handler (info, region, c);
2481
2482 /* ??? If the catch type is a base class of every allowed
2483 type, then we know we can stop the search. */
2484 ret = RNL_MAYBE_CAUGHT;
2485 }
2486 }
2487 }
2488
2489 return ret;
2490 }
2491
2492 case ERT_ALLOWED_EXCEPTIONS:
2493 /* An empty list of types definitely ends the search. */
2494 if (region->u.allowed.type_list == NULL_TREE)
2495 {
2496 add_reachable_handler (info, region, region);
2497 return RNL_CAUGHT;
2498 }
2499
2500 /* Collect a list of lists of allowed types for use in detecting
2501 when a catch may be transformed into a catch-all. */
2502 if (info)
2503 info->types_allowed = tree_cons (NULL_TREE,
2504 region->u.allowed.type_list,
2505 info->types_allowed);
2506
2507 /* If we have definitive information about the type hierarchy,
2508 then we can tell if the thrown type will pass through the
2509 filter. */
2510 if (type_thrown && lang_eh_type_covers)
2511 {
2512 if (check_handled (region->u.allowed.type_list, type_thrown))
2513 return RNL_NOT_CAUGHT;
2514 else
2515 {
2516 add_reachable_handler (info, region, region);
2517 return RNL_CAUGHT;
2518 }
2519 }
2520
2521 add_reachable_handler (info, region, region);
2522 return RNL_MAYBE_CAUGHT;
2523
2524 case ERT_CATCH:
2525 /* Catch regions are handled by their controlling try region. */
2526 return RNL_NOT_CAUGHT;
2527
2528 case ERT_MUST_NOT_THROW:
2529 /* Here we end our search, since no exceptions may propagate.
2530
2531 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2532 only via locally handled RESX instructions.
2533
2534 When we inline a function call, we can bring in new handlers. In order
2535 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2536 assume that such handlers exists prior for any inlinable call prior
2537 inlining decisions are fixed. */
2538
2539 if (maybe_resx)
2540 {
2541 add_reachable_handler (info, region, region);
2542 return RNL_CAUGHT;
2543 }
2544 else
2545 return RNL_BLOCKED;
2546
2547 case ERT_THROW:
2548 case ERT_UNKNOWN:
2549 /* Shouldn't see these here. */
2550 gcc_unreachable ();
2551 break;
2552 default:
2553 gcc_unreachable ();
2554 }
2555 }
2556
2557 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2558
2559 void
2560 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2561 void (*callback) (struct eh_region *, void *),
2562 void *callback_data)
2563 {
2564 struct reachable_info info;
2565 struct eh_region *region;
2566 tree type_thrown;
2567
2568 memset (&info, 0, sizeof (info));
2569 info.callback = callback;
2570 info.callback_data = callback_data;
2571
2572 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2573 if (!region)
2574 return;
2575
2576 type_thrown = NULL_TREE;
2577 if (is_resx)
2578 {
2579 /* A RESX leaves a region instead of entering it. Thus the
2580 region itself may have been deleted out from under us. */
2581 if (region == NULL)
2582 return;
2583 region = region->outer;
2584 }
2585 else if (region->type == ERT_THROW)
2586 {
2587 type_thrown = region->u.eh_throw.type;
2588 region = region->outer;
2589 }
2590
2591 while (region)
2592 {
2593 if (reachable_next_level (region, type_thrown, &info,
2594 inlinable_call || is_resx) >= RNL_CAUGHT)
2595 break;
2596 /* If we have processed one cleanup, there is no point in
2597 processing any more of them. Each cleanup will have an edge
2598 to the next outer cleanup region, so the flow graph will be
2599 accurate. */
2600 if (region->type == ERT_CLEANUP)
2601 region = region->u.cleanup.prev_try;
2602 else
2603 region = region->outer;
2604 }
2605 }
2606
2607 /* Retrieve a list of labels of exception handlers which can be
2608 reached by a given insn. */
2609
2610 static void
2611 arh_to_landing_pad (struct eh_region *region, void *data)
2612 {
2613 rtx *p_handlers = (rtx *) data;
2614 if (! *p_handlers)
2615 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2616 }
2617
2618 static void
2619 arh_to_label (struct eh_region *region, void *data)
2620 {
2621 rtx *p_handlers = (rtx *) data;
2622 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2623 }
2624
2625 rtx
2626 reachable_handlers (rtx insn)
2627 {
2628 bool is_resx = false;
2629 rtx handlers = NULL;
2630 int region_number;
2631
2632 if (JUMP_P (insn)
2633 && GET_CODE (PATTERN (insn)) == RESX)
2634 {
2635 region_number = XINT (PATTERN (insn), 0);
2636 is_resx = true;
2637 }
2638 else
2639 {
2640 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2641 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2642 return NULL;
2643 region_number = INTVAL (XEXP (note, 0));
2644 }
2645
2646 foreach_reachable_handler (region_number, is_resx, false,
2647 (crtl->eh.built_landing_pads
2648 ? arh_to_landing_pad
2649 : arh_to_label),
2650 &handlers);
2651
2652 return handlers;
2653 }
2654
2655 /* Determine if the given INSN can throw an exception that is caught
2656 within the function. */
2657
2658 bool
2659 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2660 {
2661 struct eh_region *region;
2662 tree type_thrown;
2663
2664 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2665 if (!region)
2666 return false;
2667
2668 type_thrown = NULL_TREE;
2669 if (is_resx)
2670 region = region->outer;
2671 else if (region->type == ERT_THROW)
2672 {
2673 type_thrown = region->u.eh_throw.type;
2674 region = region->outer;
2675 }
2676
2677 /* If this exception is ignored by each and every containing region,
2678 then control passes straight out. The runtime may handle some
2679 regions, which also do not require processing internally. */
2680 for (; region; region = region->outer)
2681 {
2682 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2683 inlinable_call || is_resx);
2684 if (how == RNL_BLOCKED)
2685 return false;
2686 if (how != RNL_NOT_CAUGHT)
2687 return true;
2688 }
2689
2690 return false;
2691 }
2692
2693 bool
2694 can_throw_internal (const_rtx insn)
2695 {
2696 rtx note;
2697
2698 if (! INSN_P (insn))
2699 return false;
2700
2701 if (JUMP_P (insn)
2702 && GET_CODE (PATTERN (insn)) == RESX
2703 && XINT (PATTERN (insn), 0) > 0)
2704 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2705
2706 if (NONJUMP_INSN_P (insn)
2707 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2708 insn = XVECEXP (PATTERN (insn), 0, 0);
2709
2710 /* Every insn that might throw has an EH_REGION note. */
2711 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2712 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2713 return false;
2714
2715 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2716 }
2717
2718 /* Determine if the given INSN can throw an exception that is
2719 visible outside the function. */
2720
2721 bool
2722 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2723 {
2724 struct eh_region *region;
2725 tree type_thrown;
2726
2727 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2728 if (!region)
2729 return true;
2730
2731 type_thrown = NULL_TREE;
2732 if (is_resx)
2733 region = region->outer;
2734 else if (region->type == ERT_THROW)
2735 {
2736 type_thrown = region->u.eh_throw.type;
2737 region = region->outer;
2738 }
2739
2740 /* If the exception is caught or blocked by any containing region,
2741 then it is not seen by any calling function. */
2742 for (; region ; region = region->outer)
2743 if (reachable_next_level (region, type_thrown, NULL,
2744 inlinable_call || is_resx) >= RNL_CAUGHT)
2745 return false;
2746
2747 return true;
2748 }
2749
2750 bool
2751 can_throw_external (const_rtx insn)
2752 {
2753 rtx note;
2754
2755 if (! INSN_P (insn))
2756 return false;
2757
2758 if (JUMP_P (insn)
2759 && GET_CODE (PATTERN (insn)) == RESX
2760 && XINT (PATTERN (insn), 0) > 0)
2761 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2762
2763 if (NONJUMP_INSN_P (insn)
2764 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2765 {
2766 rtx seq = PATTERN (insn);
2767 int i, n = XVECLEN (seq, 0);
2768
2769 for (i = 0; i < n; i++)
2770 if (can_throw_external (XVECEXP (seq, 0, i)))
2771 return true;
2772
2773 return false;
2774 }
2775
2776 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2777 if (!note)
2778 {
2779 /* Calls (and trapping insns) without notes are outside any
2780 exception handling region in this function. We have to
2781 assume it might throw. Given that the front end and middle
2782 ends mark known NOTHROW functions, this isn't so wildly
2783 inaccurate. */
2784 return (CALL_P (insn)
2785 || (flag_non_call_exceptions
2786 && may_trap_p (PATTERN (insn))));
2787 }
2788 if (INTVAL (XEXP (note, 0)) <= 0)
2789 return false;
2790
2791 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2792 }
2793
2794 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2795
2796 unsigned int
2797 set_nothrow_function_flags (void)
2798 {
2799 rtx insn;
2800
2801 crtl->nothrow = 1;
2802
2803 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2804 something that can throw an exception. We specifically exempt
2805 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2806 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2807 is optimistic. */
2808
2809 crtl->all_throwers_are_sibcalls = 1;
2810
2811 /* If we don't know that this implementation of the function will
2812 actually be used, then we must not set TREE_NOTHROW, since
2813 callers must not assume that this function does not throw. */
2814 if (TREE_NOTHROW (current_function_decl))
2815 return 0;
2816
2817 if (! flag_exceptions)
2818 return 0;
2819
2820 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2821 if (can_throw_external (insn))
2822 {
2823 crtl->nothrow = 0;
2824
2825 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2826 {
2827 crtl->all_throwers_are_sibcalls = 0;
2828 return 0;
2829 }
2830 }
2831
2832 for (insn = crtl->epilogue_delay_list; insn;
2833 insn = XEXP (insn, 1))
2834 if (can_throw_external (insn))
2835 {
2836 crtl->nothrow = 0;
2837
2838 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2839 {
2840 crtl->all_throwers_are_sibcalls = 0;
2841 return 0;
2842 }
2843 }
2844 if (crtl->nothrow
2845 && (cgraph_function_body_availability (cgraph_node
2846 (current_function_decl))
2847 >= AVAIL_AVAILABLE))
2848 {
2849 struct cgraph_node *node = cgraph_node (current_function_decl);
2850 struct cgraph_edge *e;
2851 for (e = node->callers; e; e = e->next_caller)
2852 e->can_throw_external = false;
2853 TREE_NOTHROW (current_function_decl) = 1;
2854
2855 if (dump_file)
2856 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2857 current_function_name ());
2858 }
2859 return 0;
2860 }
2861
2862 struct rtl_opt_pass pass_set_nothrow_function_flags =
2863 {
2864 {
2865 RTL_PASS,
2866 "nothrow", /* name */
2867 NULL, /* gate */
2868 set_nothrow_function_flags, /* execute */
2869 NULL, /* sub */
2870 NULL, /* next */
2871 0, /* static_pass_number */
2872 TV_NONE, /* tv_id */
2873 0, /* properties_required */
2874 0, /* properties_provided */
2875 0, /* properties_destroyed */
2876 0, /* todo_flags_start */
2877 TODO_dump_func, /* todo_flags_finish */
2878 }
2879 };
2880
2881 \f
2882 /* Various hooks for unwind library. */
2883
2884 /* Do any necessary initialization to access arbitrary stack frames.
2885 On the SPARC, this means flushing the register windows. */
2886
2887 void
2888 expand_builtin_unwind_init (void)
2889 {
2890 /* Set this so all the registers get saved in our frame; we need to be
2891 able to copy the saved values for any registers from frames we unwind. */
2892 crtl->saves_all_registers = 1;
2893
2894 #ifdef SETUP_FRAME_ADDRESSES
2895 SETUP_FRAME_ADDRESSES ();
2896 #endif
2897 }
2898
2899 rtx
2900 expand_builtin_eh_return_data_regno (tree exp)
2901 {
2902 tree which = CALL_EXPR_ARG (exp, 0);
2903 unsigned HOST_WIDE_INT iwhich;
2904
2905 if (TREE_CODE (which) != INTEGER_CST)
2906 {
2907 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2908 return constm1_rtx;
2909 }
2910
2911 iwhich = tree_low_cst (which, 1);
2912 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2913 if (iwhich == INVALID_REGNUM)
2914 return constm1_rtx;
2915
2916 #ifdef DWARF_FRAME_REGNUM
2917 iwhich = DWARF_FRAME_REGNUM (iwhich);
2918 #else
2919 iwhich = DBX_REGISTER_NUMBER (iwhich);
2920 #endif
2921
2922 return GEN_INT (iwhich);
2923 }
2924
2925 /* Given a value extracted from the return address register or stack slot,
2926 return the actual address encoded in that value. */
2927
2928 rtx
2929 expand_builtin_extract_return_addr (tree addr_tree)
2930 {
2931 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2932
2933 if (GET_MODE (addr) != Pmode
2934 && GET_MODE (addr) != VOIDmode)
2935 {
2936 #ifdef POINTERS_EXTEND_UNSIGNED
2937 addr = convert_memory_address (Pmode, addr);
2938 #else
2939 addr = convert_to_mode (Pmode, addr, 0);
2940 #endif
2941 }
2942
2943 /* First mask out any unwanted bits. */
2944 #ifdef MASK_RETURN_ADDR
2945 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2946 #endif
2947
2948 /* Then adjust to find the real return address. */
2949 #if defined (RETURN_ADDR_OFFSET)
2950 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2951 #endif
2952
2953 return addr;
2954 }
2955
2956 /* Given an actual address in addr_tree, do any necessary encoding
2957 and return the value to be stored in the return address register or
2958 stack slot so the epilogue will return to that address. */
2959
2960 rtx
2961 expand_builtin_frob_return_addr (tree addr_tree)
2962 {
2963 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2964
2965 addr = convert_memory_address (Pmode, addr);
2966
2967 #ifdef RETURN_ADDR_OFFSET
2968 addr = force_reg (Pmode, addr);
2969 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2970 #endif
2971
2972 return addr;
2973 }
2974
2975 /* Set up the epilogue with the magic bits we'll need to return to the
2976 exception handler. */
2977
2978 void
2979 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2980 tree handler_tree)
2981 {
2982 rtx tmp;
2983
2984 #ifdef EH_RETURN_STACKADJ_RTX
2985 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2986 VOIDmode, EXPAND_NORMAL);
2987 tmp = convert_memory_address (Pmode, tmp);
2988 if (!crtl->eh.ehr_stackadj)
2989 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2990 else if (tmp != crtl->eh.ehr_stackadj)
2991 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2992 #endif
2993
2994 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2995 VOIDmode, EXPAND_NORMAL);
2996 tmp = convert_memory_address (Pmode, tmp);
2997 if (!crtl->eh.ehr_handler)
2998 crtl->eh.ehr_handler = copy_to_reg (tmp);
2999 else if (tmp != crtl->eh.ehr_handler)
3000 emit_move_insn (crtl->eh.ehr_handler, tmp);
3001
3002 if (!crtl->eh.ehr_label)
3003 crtl->eh.ehr_label = gen_label_rtx ();
3004 emit_jump (crtl->eh.ehr_label);
3005 }
3006
3007 void
3008 expand_eh_return (void)
3009 {
3010 rtx around_label;
3011
3012 if (! crtl->eh.ehr_label)
3013 return;
3014
3015 crtl->calls_eh_return = 1;
3016
3017 #ifdef EH_RETURN_STACKADJ_RTX
3018 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3019 #endif
3020
3021 around_label = gen_label_rtx ();
3022 emit_jump (around_label);
3023
3024 emit_label (crtl->eh.ehr_label);
3025 clobber_return_register ();
3026
3027 #ifdef EH_RETURN_STACKADJ_RTX
3028 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3029 #endif
3030
3031 #ifdef HAVE_eh_return
3032 if (HAVE_eh_return)
3033 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3034 else
3035 #endif
3036 {
3037 #ifdef EH_RETURN_HANDLER_RTX
3038 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3039 #else
3040 error ("__builtin_eh_return not supported on this target");
3041 #endif
3042 }
3043
3044 emit_label (around_label);
3045 }
3046
3047 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3048 POINTERS_EXTEND_UNSIGNED and return it. */
3049
3050 rtx
3051 expand_builtin_extend_pointer (tree addr_tree)
3052 {
3053 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3054 int extend;
3055
3056 #ifdef POINTERS_EXTEND_UNSIGNED
3057 extend = POINTERS_EXTEND_UNSIGNED;
3058 #else
3059 /* The previous EH code did an unsigned extend by default, so we do this also
3060 for consistency. */
3061 extend = 1;
3062 #endif
3063
3064 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3065 }
3066 \f
3067 /* In the following functions, we represent entries in the action table
3068 as 1-based indices. Special cases are:
3069
3070 0: null action record, non-null landing pad; implies cleanups
3071 -1: null action record, null landing pad; implies no action
3072 -2: no call-site entry; implies must_not_throw
3073 -3: we have yet to process outer regions
3074
3075 Further, no special cases apply to the "next" field of the record.
3076 For next, 0 means end of list. */
3077
3078 struct action_record
3079 {
3080 int offset;
3081 int filter;
3082 int next;
3083 };
3084
3085 static int
3086 action_record_eq (const void *pentry, const void *pdata)
3087 {
3088 const struct action_record *entry = (const struct action_record *) pentry;
3089 const struct action_record *data = (const struct action_record *) pdata;
3090 return entry->filter == data->filter && entry->next == data->next;
3091 }
3092
3093 static hashval_t
3094 action_record_hash (const void *pentry)
3095 {
3096 const struct action_record *entry = (const struct action_record *) pentry;
3097 return entry->next * 1009 + entry->filter;
3098 }
3099
3100 static int
3101 add_action_record (htab_t ar_hash, int filter, int next)
3102 {
3103 struct action_record **slot, *new_ar, tmp;
3104
3105 tmp.filter = filter;
3106 tmp.next = next;
3107 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3108
3109 if ((new_ar = *slot) == NULL)
3110 {
3111 new_ar = XNEW (struct action_record);
3112 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3113 new_ar->filter = filter;
3114 new_ar->next = next;
3115 *slot = new_ar;
3116
3117 /* The filter value goes in untouched. The link to the next
3118 record is a "self-relative" byte offset, or zero to indicate
3119 that there is no next record. So convert the absolute 1 based
3120 indices we've been carrying around into a displacement. */
3121
3122 push_sleb128 (&crtl->eh.action_record_data, filter);
3123 if (next)
3124 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3125 push_sleb128 (&crtl->eh.action_record_data, next);
3126 }
3127
3128 return new_ar->offset;
3129 }
3130
3131 static int
3132 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3133 {
3134 struct eh_region *c;
3135 int next;
3136
3137 /* If we've reached the top of the region chain, then we have
3138 no actions, and require no landing pad. */
3139 if (region == NULL)
3140 return -1;
3141
3142 switch (region->type)
3143 {
3144 case ERT_CLEANUP:
3145 /* A cleanup adds a zero filter to the beginning of the chain, but
3146 there are special cases to look out for. If there are *only*
3147 cleanups along a path, then it compresses to a zero action.
3148 Further, if there are multiple cleanups along a path, we only
3149 need to represent one of them, as that is enough to trigger
3150 entry to the landing pad at runtime. */
3151 next = collect_one_action_chain (ar_hash, region->outer);
3152 if (next <= 0)
3153 return 0;
3154 for (c = region->outer; c ; c = c->outer)
3155 if (c->type == ERT_CLEANUP)
3156 return next;
3157 return add_action_record (ar_hash, 0, next);
3158
3159 case ERT_TRY:
3160 /* Process the associated catch regions in reverse order.
3161 If there's a catch-all handler, then we don't need to
3162 search outer regions. Use a magic -3 value to record
3163 that we haven't done the outer search. */
3164 next = -3;
3165 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3166 {
3167 if (c->u.eh_catch.type_list == NULL)
3168 {
3169 /* Retrieve the filter from the head of the filter list
3170 where we have stored it (see assign_filter_values). */
3171 int filter
3172 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3173
3174 next = add_action_record (ar_hash, filter, 0);
3175 }
3176 else
3177 {
3178 /* Once the outer search is done, trigger an action record for
3179 each filter we have. */
3180 tree flt_node;
3181
3182 if (next == -3)
3183 {
3184 next = collect_one_action_chain (ar_hash, region->outer);
3185
3186 /* If there is no next action, terminate the chain. */
3187 if (next == -1)
3188 next = 0;
3189 /* If all outer actions are cleanups or must_not_throw,
3190 we'll have no action record for it, since we had wanted
3191 to encode these states in the call-site record directly.
3192 Add a cleanup action to the chain to catch these. */
3193 else if (next <= 0)
3194 next = add_action_record (ar_hash, 0, 0);
3195 }
3196
3197 flt_node = c->u.eh_catch.filter_list;
3198 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3199 {
3200 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3201 next = add_action_record (ar_hash, filter, next);
3202 }
3203 }
3204 }
3205 return next;
3206
3207 case ERT_ALLOWED_EXCEPTIONS:
3208 /* An exception specification adds its filter to the
3209 beginning of the chain. */
3210 next = collect_one_action_chain (ar_hash, region->outer);
3211
3212 /* If there is no next action, terminate the chain. */
3213 if (next == -1)
3214 next = 0;
3215 /* If all outer actions are cleanups or must_not_throw,
3216 we'll have no action record for it, since we had wanted
3217 to encode these states in the call-site record directly.
3218 Add a cleanup action to the chain to catch these. */
3219 else if (next <= 0)
3220 next = add_action_record (ar_hash, 0, 0);
3221
3222 return add_action_record (ar_hash, region->u.allowed.filter, next);
3223
3224 case ERT_MUST_NOT_THROW:
3225 /* A must-not-throw region with no inner handlers or cleanups
3226 requires no call-site entry. Note that this differs from
3227 the no handler or cleanup case in that we do require an lsda
3228 to be generated. Return a magic -2 value to record this. */
3229 return -2;
3230
3231 case ERT_CATCH:
3232 case ERT_THROW:
3233 /* CATCH regions are handled in TRY above. THROW regions are
3234 for optimization information only and produce no output. */
3235 return collect_one_action_chain (ar_hash, region->outer);
3236
3237 default:
3238 gcc_unreachable ();
3239 }
3240 }
3241
3242 static int
3243 add_call_site (rtx landing_pad, int action)
3244 {
3245 call_site_record record;
3246
3247 record = GGC_NEW (struct call_site_record);
3248 record->landing_pad = landing_pad;
3249 record->action = action;
3250
3251 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3252
3253 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3254 }
3255
3256 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3257 The new note numbers will not refer to region numbers, but
3258 instead to call site entries. */
3259
3260 unsigned int
3261 convert_to_eh_region_ranges (void)
3262 {
3263 rtx insn, iter, note;
3264 htab_t ar_hash;
3265 int last_action = -3;
3266 rtx last_action_insn = NULL_RTX;
3267 rtx last_landing_pad = NULL_RTX;
3268 rtx first_no_action_insn = NULL_RTX;
3269 int call_site = 0;
3270
3271 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3272 return 0;
3273
3274 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3275
3276 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3277
3278 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3279 if (INSN_P (iter))
3280 {
3281 struct eh_region *region;
3282 int this_action;
3283 rtx this_landing_pad;
3284
3285 insn = iter;
3286 if (NONJUMP_INSN_P (insn)
3287 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3288 insn = XVECEXP (PATTERN (insn), 0, 0);
3289
3290 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3291 if (!note)
3292 {
3293 if (! (CALL_P (insn)
3294 || (flag_non_call_exceptions
3295 && may_trap_p (PATTERN (insn)))))
3296 continue;
3297 this_action = -1;
3298 region = NULL;
3299 }
3300 else
3301 {
3302 if (INTVAL (XEXP (note, 0)) <= 0)
3303 continue;
3304 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3305 this_action = collect_one_action_chain (ar_hash, region);
3306 }
3307
3308 /* Existence of catch handlers, or must-not-throw regions
3309 implies that an lsda is needed (even if empty). */
3310 if (this_action != -1)
3311 crtl->uses_eh_lsda = 1;
3312
3313 /* Delay creation of region notes for no-action regions
3314 until we're sure that an lsda will be required. */
3315 else if (last_action == -3)
3316 {
3317 first_no_action_insn = iter;
3318 last_action = -1;
3319 }
3320
3321 /* Cleanups and handlers may share action chains but not
3322 landing pads. Collect the landing pad for this region. */
3323 if (this_action >= 0)
3324 {
3325 struct eh_region *o;
3326 for (o = region; ! o->landing_pad ; o = o->outer)
3327 continue;
3328 this_landing_pad = o->landing_pad;
3329 }
3330 else
3331 this_landing_pad = NULL_RTX;
3332
3333 /* Differing actions or landing pads implies a change in call-site
3334 info, which implies some EH_REGION note should be emitted. */
3335 if (last_action != this_action
3336 || last_landing_pad != this_landing_pad)
3337 {
3338 /* If we'd not seen a previous action (-3) or the previous
3339 action was must-not-throw (-2), then we do not need an
3340 end note. */
3341 if (last_action >= -1)
3342 {
3343 /* If we delayed the creation of the begin, do it now. */
3344 if (first_no_action_insn)
3345 {
3346 call_site = add_call_site (NULL_RTX, 0);
3347 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3348 first_no_action_insn);
3349 NOTE_EH_HANDLER (note) = call_site;
3350 first_no_action_insn = NULL_RTX;
3351 }
3352
3353 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3354 last_action_insn);
3355 NOTE_EH_HANDLER (note) = call_site;
3356 }
3357
3358 /* If the new action is must-not-throw, then no region notes
3359 are created. */
3360 if (this_action >= -1)
3361 {
3362 call_site = add_call_site (this_landing_pad,
3363 this_action < 0 ? 0 : this_action);
3364 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3365 NOTE_EH_HANDLER (note) = call_site;
3366 }
3367
3368 last_action = this_action;
3369 last_landing_pad = this_landing_pad;
3370 }
3371 last_action_insn = iter;
3372 }
3373
3374 if (last_action >= -1 && ! first_no_action_insn)
3375 {
3376 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3377 NOTE_EH_HANDLER (note) = call_site;
3378 }
3379
3380 htab_delete (ar_hash);
3381 return 0;
3382 }
3383
3384 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3385 {
3386 {
3387 RTL_PASS,
3388 "eh_ranges", /* name */
3389 NULL, /* gate */
3390 convert_to_eh_region_ranges, /* execute */
3391 NULL, /* sub */
3392 NULL, /* next */
3393 0, /* static_pass_number */
3394 TV_NONE, /* tv_id */
3395 0, /* properties_required */
3396 0, /* properties_provided */
3397 0, /* properties_destroyed */
3398 0, /* todo_flags_start */
3399 TODO_dump_func, /* todo_flags_finish */
3400 }
3401 };
3402
3403 \f
3404 static void
3405 push_uleb128 (varray_type *data_area, unsigned int value)
3406 {
3407 do
3408 {
3409 unsigned char byte = value & 0x7f;
3410 value >>= 7;
3411 if (value)
3412 byte |= 0x80;
3413 VARRAY_PUSH_UCHAR (*data_area, byte);
3414 }
3415 while (value);
3416 }
3417
3418 static void
3419 push_sleb128 (varray_type *data_area, int value)
3420 {
3421 unsigned char byte;
3422 int more;
3423
3424 do
3425 {
3426 byte = value & 0x7f;
3427 value >>= 7;
3428 more = ! ((value == 0 && (byte & 0x40) == 0)
3429 || (value == -1 && (byte & 0x40) != 0));
3430 if (more)
3431 byte |= 0x80;
3432 VARRAY_PUSH_UCHAR (*data_area, byte);
3433 }
3434 while (more);
3435 }
3436
3437 \f
3438 #ifndef HAVE_AS_LEB128
3439 static int
3440 dw2_size_of_call_site_table (void)
3441 {
3442 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3443 int size = n * (4 + 4 + 4);
3444 int i;
3445
3446 for (i = 0; i < n; ++i)
3447 {
3448 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3449 size += size_of_uleb128 (cs->action);
3450 }
3451
3452 return size;
3453 }
3454
3455 static int
3456 sjlj_size_of_call_site_table (void)
3457 {
3458 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3459 int size = 0;
3460 int i;
3461
3462 for (i = 0; i < n; ++i)
3463 {
3464 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3465 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3466 size += size_of_uleb128 (cs->action);
3467 }
3468
3469 return size;
3470 }
3471 #endif
3472
3473 static void
3474 dw2_output_call_site_table (void)
3475 {
3476 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3477 int i;
3478
3479 for (i = 0; i < n; ++i)
3480 {
3481 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3482 char reg_start_lab[32];
3483 char reg_end_lab[32];
3484 char landing_pad_lab[32];
3485
3486 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3487 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3488
3489 if (cs->landing_pad)
3490 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3491 CODE_LABEL_NUMBER (cs->landing_pad));
3492
3493 /* ??? Perhaps use insn length scaling if the assembler supports
3494 generic arithmetic. */
3495 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3496 data4 if the function is small enough. */
3497 #ifdef HAVE_AS_LEB128
3498 dw2_asm_output_delta_uleb128 (reg_start_lab,
3499 current_function_func_begin_label,
3500 "region %d start", i);
3501 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3502 "length");
3503 if (cs->landing_pad)
3504 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3505 current_function_func_begin_label,
3506 "landing pad");
3507 else
3508 dw2_asm_output_data_uleb128 (0, "landing pad");
3509 #else
3510 dw2_asm_output_delta (4, reg_start_lab,
3511 current_function_func_begin_label,
3512 "region %d start", i);
3513 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3514 if (cs->landing_pad)
3515 dw2_asm_output_delta (4, landing_pad_lab,
3516 current_function_func_begin_label,
3517 "landing pad");
3518 else
3519 dw2_asm_output_data (4, 0, "landing pad");
3520 #endif
3521 dw2_asm_output_data_uleb128 (cs->action, "action");
3522 }
3523
3524 call_site_base += n;
3525 }
3526
3527 static void
3528 sjlj_output_call_site_table (void)
3529 {
3530 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3531 int i;
3532
3533 for (i = 0; i < n; ++i)
3534 {
3535 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3536
3537 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3538 "region %d landing pad", i);
3539 dw2_asm_output_data_uleb128 (cs->action, "action");
3540 }
3541
3542 call_site_base += n;
3543 }
3544
3545 #ifndef TARGET_UNWIND_INFO
3546 /* Switch to the section that should be used for exception tables. */
3547
3548 static void
3549 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3550 {
3551 section *s;
3552
3553 if (exception_section)
3554 s = exception_section;
3555 else
3556 {
3557 /* Compute the section and cache it into exception_section,
3558 unless it depends on the function name. */
3559 if (targetm.have_named_sections)
3560 {
3561 int flags;
3562
3563 if (EH_TABLES_CAN_BE_READ_ONLY)
3564 {
3565 int tt_format =
3566 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3567 flags = ((! flag_pic
3568 || ((tt_format & 0x70) != DW_EH_PE_absptr
3569 && (tt_format & 0x70) != DW_EH_PE_aligned))
3570 ? 0 : SECTION_WRITE);
3571 }
3572 else
3573 flags = SECTION_WRITE;
3574
3575 #ifdef HAVE_LD_EH_GC_SECTIONS
3576 if (flag_function_sections)
3577 {
3578 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3579 sprintf (section_name, ".gcc_except_table.%s", fnname);
3580 s = get_section (section_name, flags, NULL);
3581 free (section_name);
3582 }
3583 else
3584 #endif
3585 exception_section
3586 = s = get_section (".gcc_except_table", flags, NULL);
3587 }
3588 else
3589 exception_section
3590 = s = flag_pic ? data_section : readonly_data_section;
3591 }
3592
3593 switch_to_section (s);
3594 }
3595 #endif
3596
3597
3598 /* Output a reference from an exception table to the type_info object TYPE.
3599 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3600 the value. */
3601
3602 static void
3603 output_ttype (tree type, int tt_format, int tt_format_size)
3604 {
3605 rtx value;
3606 bool is_public = true;
3607
3608 if (type == NULL_TREE)
3609 value = const0_rtx;
3610 else
3611 {
3612 struct varpool_node *node;
3613
3614 type = lookup_type_for_runtime (type);
3615 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3616
3617 /* Let cgraph know that the rtti decl is used. Not all of the
3618 paths below go through assemble_integer, which would take
3619 care of this for us. */
3620 STRIP_NOPS (type);
3621 if (TREE_CODE (type) == ADDR_EXPR)
3622 {
3623 type = TREE_OPERAND (type, 0);
3624 if (TREE_CODE (type) == VAR_DECL)
3625 {
3626 node = varpool_node (type);
3627 if (node)
3628 varpool_mark_needed_node (node);
3629 is_public = TREE_PUBLIC (type);
3630 }
3631 }
3632 else
3633 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3634 }
3635
3636 /* Allow the target to override the type table entry format. */
3637 if (targetm.asm_out.ttype (value))
3638 return;
3639
3640 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3641 assemble_integer (value, tt_format_size,
3642 tt_format_size * BITS_PER_UNIT, 1);
3643 else
3644 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3645 }
3646
3647 void
3648 output_function_exception_table (const char * ARG_UNUSED (fnname))
3649 {
3650 int tt_format, cs_format, lp_format, i, n;
3651 #ifdef HAVE_AS_LEB128
3652 char ttype_label[32];
3653 char cs_after_size_label[32];
3654 char cs_end_label[32];
3655 #else
3656 int call_site_len;
3657 #endif
3658 int have_tt_data;
3659 int tt_format_size = 0;
3660
3661 /* Not all functions need anything. */
3662 if (! crtl->uses_eh_lsda)
3663 return;
3664
3665 if (eh_personality_libfunc)
3666 assemble_external_libcall (eh_personality_libfunc);
3667
3668 #ifdef TARGET_UNWIND_INFO
3669 /* TODO: Move this into target file. */
3670 fputs ("\t.personality\t", asm_out_file);
3671 output_addr_const (asm_out_file, eh_personality_libfunc);
3672 fputs ("\n\t.handlerdata\n", asm_out_file);
3673 /* Note that varasm still thinks we're in the function's code section.
3674 The ".endp" directive that will immediately follow will take us back. */
3675 #else
3676 switch_to_exception_section (fnname);
3677 #endif
3678
3679 /* If the target wants a label to begin the table, emit it here. */
3680 targetm.asm_out.except_table_label (asm_out_file);
3681
3682 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3683 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3684
3685 /* Indicate the format of the @TType entries. */
3686 if (! have_tt_data)
3687 tt_format = DW_EH_PE_omit;
3688 else
3689 {
3690 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3691 #ifdef HAVE_AS_LEB128
3692 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3693 current_function_funcdef_no);
3694 #endif
3695 tt_format_size = size_of_encoded_value (tt_format);
3696
3697 assemble_align (tt_format_size * BITS_PER_UNIT);
3698 }
3699
3700 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3701 current_function_funcdef_no);
3702
3703 /* The LSDA header. */
3704
3705 /* Indicate the format of the landing pad start pointer. An omitted
3706 field implies @LPStart == @Start. */
3707 /* Currently we always put @LPStart == @Start. This field would
3708 be most useful in moving the landing pads completely out of
3709 line to another section, but it could also be used to minimize
3710 the size of uleb128 landing pad offsets. */
3711 lp_format = DW_EH_PE_omit;
3712 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3713 eh_data_format_name (lp_format));
3714
3715 /* @LPStart pointer would go here. */
3716
3717 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3718 eh_data_format_name (tt_format));
3719
3720 #ifndef HAVE_AS_LEB128
3721 if (USING_SJLJ_EXCEPTIONS)
3722 call_site_len = sjlj_size_of_call_site_table ();
3723 else
3724 call_site_len = dw2_size_of_call_site_table ();
3725 #endif
3726
3727 /* A pc-relative 4-byte displacement to the @TType data. */
3728 if (have_tt_data)
3729 {
3730 #ifdef HAVE_AS_LEB128
3731 char ttype_after_disp_label[32];
3732 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3733 current_function_funcdef_no);
3734 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3735 "@TType base offset");
3736 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3737 #else
3738 /* Ug. Alignment queers things. */
3739 unsigned int before_disp, after_disp, last_disp, disp;
3740
3741 before_disp = 1 + 1;
3742 after_disp = (1 + size_of_uleb128 (call_site_len)
3743 + call_site_len
3744 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3745 + (VEC_length (tree, crtl->eh.ttype_data)
3746 * tt_format_size));
3747
3748 disp = after_disp;
3749 do
3750 {
3751 unsigned int disp_size, pad;
3752
3753 last_disp = disp;
3754 disp_size = size_of_uleb128 (disp);
3755 pad = before_disp + disp_size + after_disp;
3756 if (pad % tt_format_size)
3757 pad = tt_format_size - (pad % tt_format_size);
3758 else
3759 pad = 0;
3760 disp = after_disp + pad;
3761 }
3762 while (disp != last_disp);
3763
3764 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3765 #endif
3766 }
3767
3768 /* Indicate the format of the call-site offsets. */
3769 #ifdef HAVE_AS_LEB128
3770 cs_format = DW_EH_PE_uleb128;
3771 #else
3772 cs_format = DW_EH_PE_udata4;
3773 #endif
3774 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3775 eh_data_format_name (cs_format));
3776
3777 #ifdef HAVE_AS_LEB128
3778 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3779 current_function_funcdef_no);
3780 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3781 current_function_funcdef_no);
3782 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3783 "Call-site table length");
3784 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3785 if (USING_SJLJ_EXCEPTIONS)
3786 sjlj_output_call_site_table ();
3787 else
3788 dw2_output_call_site_table ();
3789 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3790 #else
3791 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3792 if (USING_SJLJ_EXCEPTIONS)
3793 sjlj_output_call_site_table ();
3794 else
3795 dw2_output_call_site_table ();
3796 #endif
3797
3798 /* ??? Decode and interpret the data for flag_debug_asm. */
3799 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3800 for (i = 0; i < n; ++i)
3801 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3802 (i ? NULL : "Action record table"));
3803
3804 if (have_tt_data)
3805 assemble_align (tt_format_size * BITS_PER_UNIT);
3806
3807 i = VEC_length (tree, crtl->eh.ttype_data);
3808 while (i-- > 0)
3809 {
3810 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3811 output_ttype (type, tt_format, tt_format_size);
3812 }
3813
3814 #ifdef HAVE_AS_LEB128
3815 if (have_tt_data)
3816 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3817 #endif
3818
3819 /* ??? Decode and interpret the data for flag_debug_asm. */
3820 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
3821 for (i = 0; i < n; ++i)
3822 {
3823 if (targetm.arm_eabi_unwinder)
3824 {
3825 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
3826 output_ttype (type, tt_format, tt_format_size);
3827 }
3828 else
3829 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
3830 (i ? NULL : "Exception specification table"));
3831 }
3832
3833 switch_to_section (current_function_section ());
3834 }
3835
3836 void
3837 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3838 {
3839 fun->eh->throw_stmt_table = table;
3840 }
3841
3842 htab_t
3843 get_eh_throw_stmt_table (struct function *fun)
3844 {
3845 return fun->eh->throw_stmt_table;
3846 }
3847
3848 /* Dump EH information to OUT. */
3849
3850 void
3851 dump_eh_tree (FILE * out, struct function *fun)
3852 {
3853 struct eh_region *i;
3854 int depth = 0;
3855 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
3856 "allowed_exceptions", "must_not_throw",
3857 "throw"
3858 };
3859
3860 i = fun->eh->region_tree;
3861 if (!i)
3862 return;
3863
3864 fprintf (out, "Eh tree:\n");
3865 while (1)
3866 {
3867 fprintf (out, " %*s %i %s", depth * 2, "",
3868 i->region_number, type_name[(int) i->type]);
3869 if (i->tree_label)
3870 {
3871 fprintf (out, " tree_label:");
3872 print_generic_expr (out, i->tree_label, 0);
3873 }
3874 if (i->label)
3875 fprintf (out, " label:%i", INSN_UID (i->label));
3876 if (i->landing_pad)
3877 {
3878 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
3879 if (GET_CODE (i->landing_pad) == NOTE)
3880 fprintf (out, " (deleted)");
3881 }
3882 if (i->post_landing_pad)
3883 {
3884 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
3885 if (GET_CODE (i->post_landing_pad) == NOTE)
3886 fprintf (out, " (deleted)");
3887 }
3888 if (i->resume)
3889 {
3890 fprintf (out, " resume:%i", INSN_UID (i->resume));
3891 if (GET_CODE (i->resume) == NOTE)
3892 fprintf (out, " (deleted)");
3893 }
3894 if (i->may_contain_throw)
3895 fprintf (out, " may_contain_throw");
3896 switch (i->type)
3897 {
3898 case ERT_CLEANUP:
3899 if (i->u.cleanup.prev_try)
3900 fprintf (out, " prev try:%i",
3901 i->u.cleanup.prev_try->region_number);
3902 break;
3903
3904 case ERT_TRY:
3905 {
3906 struct eh_region *c;
3907 fprintf (out, " catch regions:");
3908 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
3909 fprintf (out, " %i", c->region_number);
3910 }
3911 break;
3912
3913 case ERT_CATCH:
3914 if (i->u.eh_catch.prev_catch)
3915 fprintf (out, " prev: %i",
3916 i->u.eh_catch.prev_catch->region_number);
3917 if (i->u.eh_catch.next_catch)
3918 fprintf (out, " next %i",
3919 i->u.eh_catch.next_catch->region_number);
3920 fprintf (out, " type:");
3921 print_generic_expr (out, i->u.eh_catch.type_list, 0);
3922 break;
3923
3924 case ERT_ALLOWED_EXCEPTIONS:
3925 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3926 print_generic_expr (out, i->u.allowed.type_list, 0);
3927 break;
3928
3929 case ERT_THROW:
3930 fprintf (out, " type:");
3931 print_generic_expr (out, i->u.eh_throw.type, 0);
3932 break;
3933
3934 case ERT_MUST_NOT_THROW:
3935 break;
3936
3937 case ERT_UNKNOWN:
3938 break;
3939 }
3940 if (i->aka)
3941 {
3942 fprintf (out, " also known as:");
3943 dump_bitmap (out, i->aka);
3944 }
3945 else
3946 fprintf (out, "\n");
3947 /* If there are sub-regions, process them. */
3948 if (i->inner)
3949 i = i->inner, depth++;
3950 /* If there are peers, process them. */
3951 else if (i->next_peer)
3952 i = i->next_peer;
3953 /* Otherwise, step back up the tree to the next peer. */
3954 else
3955 {
3956 do
3957 {
3958 i = i->outer;
3959 depth--;
3960 if (i == NULL)
3961 return;
3962 }
3963 while (i->next_peer == NULL);
3964 i = i->next_peer;
3965 }
3966 }
3967 }
3968
3969 /* Dump the EH tree for FN on stderr. */
3970
3971 void
3972 debug_eh_tree (struct function *fn)
3973 {
3974 dump_eh_tree (stderr, fn);
3975 }
3976
3977
3978 /* Verify EH region invariants. */
3979
3980 static bool
3981 verify_eh_region (struct eh_region *region, struct eh_region *prev_try)
3982 {
3983 bool found = false;
3984 if (!region)
3985 return false;
3986 switch (region->type)
3987 {
3988 case ERT_CLEANUP:
3989 if (region->u.cleanup.prev_try != prev_try)
3990 {
3991 error ("Wrong prev_try pointer in EH region %i",
3992 region->region_number);
3993 found = true;
3994 }
3995 break;
3996 case ERT_TRY:
3997 {
3998 struct eh_region *c, *prev = NULL;
3999 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4000 {
4001 error ("Try region %i has wrong rh_catch pointer to %i",
4002 region->region_number,
4003 region->u.eh_try.eh_catch->region_number);
4004 found = true;
4005 }
4006 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4007 {
4008 if (c->outer != region->outer)
4009 {
4010 error
4011 ("Catch region %i has different outer region than try region %i",
4012 c->region_number, region->region_number);
4013 found = true;
4014 }
4015 if (c->u.eh_catch.prev_catch != prev)
4016 {
4017 error ("Catch region %i has corrupted catchlist",
4018 c->region_number);
4019 found = true;
4020 }
4021 prev = c;
4022 }
4023 if (prev != region->u.eh_try.last_catch)
4024 {
4025 error
4026 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4027 region->region_number,
4028 region->u.eh_try.last_catch->region_number,
4029 prev->region_number);
4030 found = true;
4031 }
4032 }
4033 break;
4034 case ERT_CATCH:
4035 if (!region->u.eh_catch.prev_catch
4036 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4037 {
4038 error ("Catch region %i should be followed by try", region->region_number);
4039 found = true;
4040 }
4041 break;
4042 case ERT_ALLOWED_EXCEPTIONS:
4043 case ERT_MUST_NOT_THROW:
4044 case ERT_THROW:
4045 break;
4046 case ERT_UNKNOWN:
4047 gcc_unreachable ();
4048 }
4049 if (region->type == ERT_TRY)
4050 prev_try = region;
4051 else if (region->type == ERT_MUST_NOT_THROW
4052 || (region->type == ERT_ALLOWED_EXCEPTIONS
4053 && !region->u.allowed.type_list))
4054 prev_try = NULL;
4055 for (region = region->inner; region; region = region->next_peer)
4056 found |= verify_eh_region (region, prev_try);
4057 return found;
4058 }
4059
4060 /* Verify invariants on EH datastructures. */
4061
4062 void
4063 verify_eh_tree (struct function *fun)
4064 {
4065 struct eh_region *i, *outer = NULL;
4066 bool err = false;
4067 int nvisited = 0;
4068 int count = 0;
4069 int j;
4070 int depth = 0;
4071
4072 if (!fun->eh->region_tree)
4073 return;
4074 for (j = fun->eh->last_region_number; j > 0; --j)
4075 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4076 {
4077 if (i->region_number == j)
4078 count++;
4079 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4080 {
4081 error ("region_array is corrupted for region %i",
4082 i->region_number);
4083 err = true;
4084 }
4085 }
4086 i = fun->eh->region_tree;
4087
4088 while (1)
4089 {
4090 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4091 {
4092 error ("region_array is corrupted for region %i", i->region_number);
4093 err = true;
4094 }
4095 if (i->outer != outer)
4096 {
4097 error ("outer block of region %i is wrong", i->region_number);
4098 err = true;
4099 }
4100 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4101 {
4102 error
4103 ("region %i may contain throw and is contained in region that may not",
4104 i->region_number);
4105 err = true;
4106 }
4107 if (depth < 0)
4108 {
4109 error ("negative nesting depth of region %i", i->region_number);
4110 err = true;
4111 }
4112 nvisited++;
4113 /* If there are sub-regions, process them. */
4114 if (i->inner)
4115 outer = i, i = i->inner, depth++;
4116 /* If there are peers, process them. */
4117 else if (i->next_peer)
4118 i = i->next_peer;
4119 /* Otherwise, step back up the tree to the next peer. */
4120 else
4121 {
4122 do
4123 {
4124 i = i->outer;
4125 depth--;
4126 if (i == NULL)
4127 {
4128 if (depth != -1)
4129 {
4130 error ("tree list ends on depth %i", depth + 1);
4131 err = true;
4132 }
4133 if (count != nvisited)
4134 {
4135 error ("array does not match the region tree");
4136 err = true;
4137 }
4138 if (!err)
4139 for (i = fun->eh->region_tree; i; i = i->next_peer)
4140 err |= verify_eh_region (i, NULL);
4141
4142 if (err)
4143 {
4144 dump_eh_tree (stderr, fun);
4145 internal_error ("verify_eh_tree failed");
4146 }
4147 return;
4148 }
4149 outer = i->outer;
4150 }
4151 while (i->next_peer == NULL);
4152 i = i->next_peer;
4153 }
4154 }
4155 }
4156
4157 /* Initialize unwind_resume_libfunc. */
4158
4159 void
4160 default_init_unwind_resume_libfunc (void)
4161 {
4162 /* The default c++ routines aren't actually c++ specific, so use those. */
4163 unwind_resume_libfunc =
4164 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4165 : "_Unwind_Resume");
4166 }
4167
4168 \f
4169 static bool
4170 gate_handle_eh (void)
4171 {
4172 return doing_eh (0);
4173 }
4174
4175 /* Complete generation of exception handling code. */
4176 static unsigned int
4177 rest_of_handle_eh (void)
4178 {
4179 finish_eh_generation ();
4180 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4181 return 0;
4182 }
4183
4184 struct rtl_opt_pass pass_rtl_eh =
4185 {
4186 {
4187 RTL_PASS,
4188 "eh", /* name */
4189 gate_handle_eh, /* gate */
4190 rest_of_handle_eh, /* execute */
4191 NULL, /* sub */
4192 NULL, /* next */
4193 0, /* static_pass_number */
4194 TV_JUMP, /* tv_id */
4195 0, /* properties_required */
4196 0, /* properties_provided */
4197 0, /* properties_destroyed */
4198 0, /* todo_flags_start */
4199 TODO_dump_func /* todo_flags_finish */
4200 }
4201 };
4202
4203 #include "gt-except.h"