decl.c (value_annotation_hasher::handle_cache_entry): Delete.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37 conversion.
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
76
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
110
111
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "tm.h"
116 #include "rtl.h"
117 #include "alias.h"
118 #include "symtab.h"
119 #include "tree.h"
120 #include "fold-const.h"
121 #include "stringpool.h"
122 #include "stor-layout.h"
123 #include "flags.h"
124 #include "hard-reg-set.h"
125 #include "function.h"
126 #include "insn-codes.h"
127 #include "optabs.h"
128 #include "insn-config.h"
129 #include "expmed.h"
130 #include "dojump.h"
131 #include "explow.h"
132 #include "calls.h"
133 #include "emit-rtl.h"
134 #include "varasm.h"
135 #include "stmt.h"
136 #include "expr.h"
137 #include "libfuncs.h"
138 #include "except.h"
139 #include "output.h"
140 #include "dwarf2asm.h"
141 #include "dwarf2out.h"
142 #include "dwarf2.h"
143 #include "toplev.h"
144 #include "intl.h"
145 #include "tm_p.h"
146 #include "target.h"
147 #include "common/common-target.h"
148 #include "langhooks.h"
149 #include "predict.h"
150 #include "dominance.h"
151 #include "cfg.h"
152 #include "cfgrtl.h"
153 #include "basic-block.h"
154 #include "plugin-api.h"
155 #include "ipa-ref.h"
156 #include "cgraph.h"
157 #include "diagnostic.h"
158 #include "tree-pretty-print.h"
159 #include "tree-pass.h"
160 #include "cfgloop.h"
161 #include "builtins.h"
162
163 static GTY(()) int call_site_base;
164
165 struct tree_hash_traits : default_hashmap_traits
166 {
167 static hashval_t hash (tree t) { return TREE_HASH (t); }
168 };
169
170 static GTY (()) hash_map<tree, tree, tree_hash_traits> *type_to_runtime_map;
171
172 /* Describe the SjLj_Function_Context structure. */
173 static GTY(()) tree sjlj_fc_type_node;
174 static int sjlj_fc_call_site_ofs;
175 static int sjlj_fc_data_ofs;
176 static int sjlj_fc_personality_ofs;
177 static int sjlj_fc_lsda_ofs;
178 static int sjlj_fc_jbuf_ofs;
179 \f
180
181 struct GTY(()) call_site_record_d
182 {
183 rtx landing_pad;
184 int action;
185 };
186
187 /* In the following structure and associated functions,
188 we represent entries in the action table as 1-based indices.
189 Special cases are:
190
191 0: null action record, non-null landing pad; implies cleanups
192 -1: null action record, null landing pad; implies no action
193 -2: no call-site entry; implies must_not_throw
194 -3: we have yet to process outer regions
195
196 Further, no special cases apply to the "next" field of the record.
197 For next, 0 means end of list. */
198
199 struct action_record
200 {
201 int offset;
202 int filter;
203 int next;
204 };
205
206 /* Hashtable helpers. */
207
208 struct action_record_hasher : typed_free_remove <action_record>
209 {
210 typedef action_record *value_type;
211 typedef action_record *compare_type;
212 static inline hashval_t hash (const action_record *);
213 static inline bool equal (const action_record *, const action_record *);
214 };
215
216 inline hashval_t
217 action_record_hasher::hash (const action_record *entry)
218 {
219 return entry->next * 1009 + entry->filter;
220 }
221
222 inline bool
223 action_record_hasher::equal (const action_record *entry,
224 const action_record *data)
225 {
226 return entry->filter == data->filter && entry->next == data->next;
227 }
228
229 typedef hash_table<action_record_hasher> action_hash_type;
230 \f
231 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
232 eh_landing_pad *);
233
234 static void dw2_build_landing_pads (void);
235
236 static int collect_one_action_chain (action_hash_type *, eh_region);
237 static int add_call_site (rtx, int, int);
238
239 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
240 static void push_sleb128 (vec<uchar, va_gc> **, int);
241 #ifndef HAVE_AS_LEB128
242 static int dw2_size_of_call_site_table (int);
243 static int sjlj_size_of_call_site_table (void);
244 #endif
245 static void dw2_output_call_site_table (int, int);
246 static void sjlj_output_call_site_table (void);
247
248 \f
249 void
250 init_eh (void)
251 {
252 if (! flag_exceptions)
253 return;
254
255 type_to_runtime_map
256 = hash_map<tree, tree, tree_hash_traits>::create_ggc (31);
257
258 /* Create the SjLj_Function_Context structure. This should match
259 the definition in unwind-sjlj.c. */
260 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
261 {
262 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
263
264 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
265
266 f_prev = build_decl (BUILTINS_LOCATION,
267 FIELD_DECL, get_identifier ("__prev"),
268 build_pointer_type (sjlj_fc_type_node));
269 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
270
271 f_cs = build_decl (BUILTINS_LOCATION,
272 FIELD_DECL, get_identifier ("__call_site"),
273 integer_type_node);
274 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
275
276 tmp = build_index_type (size_int (4 - 1));
277 tmp = build_array_type (lang_hooks.types.type_for_mode
278 (targetm.unwind_word_mode (), 1),
279 tmp);
280 f_data = build_decl (BUILTINS_LOCATION,
281 FIELD_DECL, get_identifier ("__data"), tmp);
282 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
283
284 f_per = build_decl (BUILTINS_LOCATION,
285 FIELD_DECL, get_identifier ("__personality"),
286 ptr_type_node);
287 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
288
289 f_lsda = build_decl (BUILTINS_LOCATION,
290 FIELD_DECL, get_identifier ("__lsda"),
291 ptr_type_node);
292 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
293
294 #ifdef DONT_USE_BUILTIN_SETJMP
295 #ifdef JMP_BUF_SIZE
296 tmp = size_int (JMP_BUF_SIZE - 1);
297 #else
298 /* Should be large enough for most systems, if it is not,
299 JMP_BUF_SIZE should be defined with the proper value. It will
300 also tend to be larger than necessary for most systems, a more
301 optimal port will define JMP_BUF_SIZE. */
302 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
303 #endif
304 #else
305 /* Compute a minimally sized jump buffer. We need room to store at
306 least 3 pointers - stack pointer, frame pointer and return address.
307 Plus for some targets we need room for an extra pointer - in the
308 case of MIPS this is the global pointer. This makes a total of four
309 pointers, but to be safe we actually allocate room for 5.
310
311 If pointers are smaller than words then we allocate enough room for
312 5 words, just in case the backend needs this much room. For more
313 discussion on this issue see:
314 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
315 if (POINTER_SIZE > BITS_PER_WORD)
316 tmp = size_int (5 - 1);
317 else
318 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
319 #endif
320
321 tmp = build_index_type (tmp);
322 tmp = build_array_type (ptr_type_node, tmp);
323 f_jbuf = build_decl (BUILTINS_LOCATION,
324 FIELD_DECL, get_identifier ("__jbuf"), tmp);
325 #ifdef DONT_USE_BUILTIN_SETJMP
326 /* We don't know what the alignment requirements of the
327 runtime's jmp_buf has. Overestimate. */
328 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
329 DECL_USER_ALIGN (f_jbuf) = 1;
330 #endif
331 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
332
333 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
334 TREE_CHAIN (f_prev) = f_cs;
335 TREE_CHAIN (f_cs) = f_data;
336 TREE_CHAIN (f_data) = f_per;
337 TREE_CHAIN (f_per) = f_lsda;
338 TREE_CHAIN (f_lsda) = f_jbuf;
339
340 layout_type (sjlj_fc_type_node);
341
342 /* Cache the interesting field offsets so that we have
343 easy access from rtl. */
344 sjlj_fc_call_site_ofs
345 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
346 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
347 sjlj_fc_data_ofs
348 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
349 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
350 sjlj_fc_personality_ofs
351 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
352 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
353 sjlj_fc_lsda_ofs
354 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
355 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
356 sjlj_fc_jbuf_ofs
357 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
358 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
359 }
360 }
361
362 void
363 init_eh_for_function (void)
364 {
365 cfun->eh = ggc_cleared_alloc<eh_status> ();
366
367 /* Make sure zero'th entries are used. */
368 vec_safe_push (cfun->eh->region_array, (eh_region)0);
369 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
370 }
371 \f
372 /* Routines to generate the exception tree somewhat directly.
373 These are used from tree-eh.c when processing exception related
374 nodes during tree optimization. */
375
376 static eh_region
377 gen_eh_region (enum eh_region_type type, eh_region outer)
378 {
379 eh_region new_eh;
380
381 /* Insert a new blank region as a leaf in the tree. */
382 new_eh = ggc_cleared_alloc<eh_region_d> ();
383 new_eh->type = type;
384 new_eh->outer = outer;
385 if (outer)
386 {
387 new_eh->next_peer = outer->inner;
388 outer->inner = new_eh;
389 }
390 else
391 {
392 new_eh->next_peer = cfun->eh->region_tree;
393 cfun->eh->region_tree = new_eh;
394 }
395
396 new_eh->index = vec_safe_length (cfun->eh->region_array);
397 vec_safe_push (cfun->eh->region_array, new_eh);
398
399 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
400 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
401 new_eh->use_cxa_end_cleanup = true;
402
403 return new_eh;
404 }
405
406 eh_region
407 gen_eh_region_cleanup (eh_region outer)
408 {
409 return gen_eh_region (ERT_CLEANUP, outer);
410 }
411
412 eh_region
413 gen_eh_region_try (eh_region outer)
414 {
415 return gen_eh_region (ERT_TRY, outer);
416 }
417
418 eh_catch
419 gen_eh_region_catch (eh_region t, tree type_or_list)
420 {
421 eh_catch c, l;
422 tree type_list, type_node;
423
424 gcc_assert (t->type == ERT_TRY);
425
426 /* Ensure to always end up with a type list to normalize further
427 processing, then register each type against the runtime types map. */
428 type_list = type_or_list;
429 if (type_or_list)
430 {
431 if (TREE_CODE (type_or_list) != TREE_LIST)
432 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
433
434 type_node = type_list;
435 for (; type_node; type_node = TREE_CHAIN (type_node))
436 add_type_for_runtime (TREE_VALUE (type_node));
437 }
438
439 c = ggc_cleared_alloc<eh_catch_d> ();
440 c->type_list = type_list;
441 l = t->u.eh_try.last_catch;
442 c->prev_catch = l;
443 if (l)
444 l->next_catch = c;
445 else
446 t->u.eh_try.first_catch = c;
447 t->u.eh_try.last_catch = c;
448
449 return c;
450 }
451
452 eh_region
453 gen_eh_region_allowed (eh_region outer, tree allowed)
454 {
455 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
456 region->u.allowed.type_list = allowed;
457
458 for (; allowed ; allowed = TREE_CHAIN (allowed))
459 add_type_for_runtime (TREE_VALUE (allowed));
460
461 return region;
462 }
463
464 eh_region
465 gen_eh_region_must_not_throw (eh_region outer)
466 {
467 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
468 }
469
470 eh_landing_pad
471 gen_eh_landing_pad (eh_region region)
472 {
473 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
474
475 lp->next_lp = region->landing_pads;
476 lp->region = region;
477 lp->index = vec_safe_length (cfun->eh->lp_array);
478 region->landing_pads = lp;
479
480 vec_safe_push (cfun->eh->lp_array, lp);
481
482 return lp;
483 }
484
485 eh_region
486 get_eh_region_from_number_fn (struct function *ifun, int i)
487 {
488 return (*ifun->eh->region_array)[i];
489 }
490
491 eh_region
492 get_eh_region_from_number (int i)
493 {
494 return get_eh_region_from_number_fn (cfun, i);
495 }
496
497 eh_landing_pad
498 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
499 {
500 return (*ifun->eh->lp_array)[i];
501 }
502
503 eh_landing_pad
504 get_eh_landing_pad_from_number (int i)
505 {
506 return get_eh_landing_pad_from_number_fn (cfun, i);
507 }
508
509 eh_region
510 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
511 {
512 if (i < 0)
513 return (*ifun->eh->region_array)[-i];
514 else if (i == 0)
515 return NULL;
516 else
517 {
518 eh_landing_pad lp;
519 lp = (*ifun->eh->lp_array)[i];
520 return lp->region;
521 }
522 }
523
524 eh_region
525 get_eh_region_from_lp_number (int i)
526 {
527 return get_eh_region_from_lp_number_fn (cfun, i);
528 }
529 \f
530 /* Returns true if the current function has exception handling regions. */
531
532 bool
533 current_function_has_exception_handlers (void)
534 {
535 return cfun->eh->region_tree != NULL;
536 }
537 \f
538 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
539 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
540
541 struct duplicate_eh_regions_data
542 {
543 duplicate_eh_regions_map label_map;
544 void *label_map_data;
545 hash_map<void *, void *> *eh_map;
546 };
547
548 static void
549 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
550 eh_region old_r, eh_region outer)
551 {
552 eh_landing_pad old_lp, new_lp;
553 eh_region new_r;
554
555 new_r = gen_eh_region (old_r->type, outer);
556 gcc_assert (!data->eh_map->put (old_r, new_r));
557
558 switch (old_r->type)
559 {
560 case ERT_CLEANUP:
561 break;
562
563 case ERT_TRY:
564 {
565 eh_catch oc, nc;
566 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
567 {
568 /* We should be doing all our region duplication before and
569 during inlining, which is before filter lists are created. */
570 gcc_assert (oc->filter_list == NULL);
571 nc = gen_eh_region_catch (new_r, oc->type_list);
572 nc->label = data->label_map (oc->label, data->label_map_data);
573 }
574 }
575 break;
576
577 case ERT_ALLOWED_EXCEPTIONS:
578 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
579 if (old_r->u.allowed.label)
580 new_r->u.allowed.label
581 = data->label_map (old_r->u.allowed.label, data->label_map_data);
582 else
583 new_r->u.allowed.label = NULL_TREE;
584 break;
585
586 case ERT_MUST_NOT_THROW:
587 new_r->u.must_not_throw.failure_loc =
588 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
589 new_r->u.must_not_throw.failure_decl =
590 old_r->u.must_not_throw.failure_decl;
591 break;
592 }
593
594 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
595 {
596 /* Don't bother copying unused landing pads. */
597 if (old_lp->post_landing_pad == NULL)
598 continue;
599
600 new_lp = gen_eh_landing_pad (new_r);
601 gcc_assert (!data->eh_map->put (old_lp, new_lp));
602
603 new_lp->post_landing_pad
604 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
605 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
606 }
607
608 /* Make sure to preserve the original use of __cxa_end_cleanup. */
609 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
610
611 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
612 duplicate_eh_regions_1 (data, old_r, new_r);
613 }
614
615 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
616 the current function and root the tree below OUTER_REGION.
617 The special case of COPY_REGION of NULL means all regions.
618 Remap labels using MAP/MAP_DATA callback. Return a pointer map
619 that allows the caller to remap uses of both EH regions and
620 EH landing pads. */
621
622 hash_map<void *, void *> *
623 duplicate_eh_regions (struct function *ifun,
624 eh_region copy_region, int outer_lp,
625 duplicate_eh_regions_map map, void *map_data)
626 {
627 struct duplicate_eh_regions_data data;
628 eh_region outer_region;
629
630 #ifdef ENABLE_CHECKING
631 verify_eh_tree (ifun);
632 #endif
633
634 data.label_map = map;
635 data.label_map_data = map_data;
636 data.eh_map = new hash_map<void *, void *>;
637
638 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
639
640 /* Copy all the regions in the subtree. */
641 if (copy_region)
642 duplicate_eh_regions_1 (&data, copy_region, outer_region);
643 else
644 {
645 eh_region r;
646 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
647 duplicate_eh_regions_1 (&data, r, outer_region);
648 }
649
650 #ifdef ENABLE_CHECKING
651 verify_eh_tree (cfun);
652 #endif
653
654 return data.eh_map;
655 }
656
657 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
658
659 eh_region
660 eh_region_outermost (struct function *ifun, eh_region region_a,
661 eh_region region_b)
662 {
663 sbitmap b_outer;
664
665 gcc_assert (ifun->eh->region_array);
666 gcc_assert (ifun->eh->region_tree);
667
668 b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
669 bitmap_clear (b_outer);
670
671 do
672 {
673 bitmap_set_bit (b_outer, region_b->index);
674 region_b = region_b->outer;
675 }
676 while (region_b);
677
678 do
679 {
680 if (bitmap_bit_p (b_outer, region_a->index))
681 break;
682 region_a = region_a->outer;
683 }
684 while (region_a);
685
686 sbitmap_free (b_outer);
687 return region_a;
688 }
689 \f
690 void
691 add_type_for_runtime (tree type)
692 {
693 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
694 if (TREE_CODE (type) == NOP_EXPR)
695 return;
696
697 bool existed = false;
698 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
699 if (!existed)
700 *slot = lang_hooks.eh_runtime_type (type);
701 }
702
703 tree
704 lookup_type_for_runtime (tree type)
705 {
706 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
707 if (TREE_CODE (type) == NOP_EXPR)
708 return type;
709
710 /* We should have always inserted the data earlier. */
711 return *type_to_runtime_map->get (type);
712 }
713
714 \f
715 /* Represent an entry in @TTypes for either catch actions
716 or exception filter actions. */
717 struct ttypes_filter {
718 tree t;
719 int filter;
720 };
721
722 /* Helper for ttypes_filter hashing. */
723
724 struct ttypes_filter_hasher : typed_free_remove <ttypes_filter>
725 {
726 typedef ttypes_filter *value_type;
727 typedef tree_node *compare_type;
728 static inline hashval_t hash (const ttypes_filter *);
729 static inline bool equal (const ttypes_filter *, const tree_node *);
730 };
731
732 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
733 (a tree) for a @TTypes type node we are thinking about adding. */
734
735 inline bool
736 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
737 {
738 return entry->t == data;
739 }
740
741 inline hashval_t
742 ttypes_filter_hasher::hash (const ttypes_filter *entry)
743 {
744 return TREE_HASH (entry->t);
745 }
746
747 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
748
749
750 /* Helper for ehspec hashing. */
751
752 struct ehspec_hasher : typed_free_remove <ttypes_filter>
753 {
754 typedef ttypes_filter *value_type;
755 typedef ttypes_filter *compare_type;
756 static inline hashval_t hash (const ttypes_filter *);
757 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
758 };
759
760 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
761 exception specification list we are thinking about adding. */
762 /* ??? Currently we use the type lists in the order given. Someone
763 should put these in some canonical order. */
764
765 inline bool
766 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
767 {
768 return type_list_equal (entry->t, data->t);
769 }
770
771 /* Hash function for exception specification lists. */
772
773 inline hashval_t
774 ehspec_hasher::hash (const ttypes_filter *entry)
775 {
776 hashval_t h = 0;
777 tree list;
778
779 for (list = entry->t; list ; list = TREE_CHAIN (list))
780 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
781 return h;
782 }
783
784 typedef hash_table<ehspec_hasher> ehspec_hash_type;
785
786
787 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
788 to speed up the search. Return the filter value to be used. */
789
790 static int
791 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
792 {
793 struct ttypes_filter **slot, *n;
794
795 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
796 INSERT);
797
798 if ((n = *slot) == NULL)
799 {
800 /* Filter value is a 1 based table index. */
801
802 n = XNEW (struct ttypes_filter);
803 n->t = type;
804 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
805 *slot = n;
806
807 vec_safe_push (cfun->eh->ttype_data, type);
808 }
809
810 return n->filter;
811 }
812
813 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
814 to speed up the search. Return the filter value to be used. */
815
816 static int
817 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
818 tree list)
819 {
820 struct ttypes_filter **slot, *n;
821 struct ttypes_filter dummy;
822
823 dummy.t = list;
824 slot = ehspec_hash->find_slot (&dummy, INSERT);
825
826 if ((n = *slot) == NULL)
827 {
828 int len;
829
830 if (targetm.arm_eabi_unwinder)
831 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
832 else
833 len = vec_safe_length (cfun->eh->ehspec_data.other);
834
835 /* Filter value is a -1 based byte index into a uleb128 buffer. */
836
837 n = XNEW (struct ttypes_filter);
838 n->t = list;
839 n->filter = -(len + 1);
840 *slot = n;
841
842 /* Generate a 0 terminated list of filter values. */
843 for (; list ; list = TREE_CHAIN (list))
844 {
845 if (targetm.arm_eabi_unwinder)
846 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
847 else
848 {
849 /* Look up each type in the list and encode its filter
850 value as a uleb128. */
851 push_uleb128 (&cfun->eh->ehspec_data.other,
852 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
853 }
854 }
855 if (targetm.arm_eabi_unwinder)
856 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
857 else
858 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
859 }
860
861 return n->filter;
862 }
863
864 /* Generate the action filter values to be used for CATCH and
865 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
866 we use lots of landing pads, and so every type or list can share
867 the same filter value, which saves table space. */
868
869 void
870 assign_filter_values (void)
871 {
872 int i;
873 eh_region r;
874 eh_catch c;
875
876 vec_alloc (cfun->eh->ttype_data, 16);
877 if (targetm.arm_eabi_unwinder)
878 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
879 else
880 vec_alloc (cfun->eh->ehspec_data.other, 64);
881
882 ehspec_hash_type ehspec (31);
883 ttypes_hash_type ttypes (31);
884
885 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
886 {
887 if (r == NULL)
888 continue;
889
890 switch (r->type)
891 {
892 case ERT_TRY:
893 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
894 {
895 /* Whatever type_list is (NULL or true list), we build a list
896 of filters for the region. */
897 c->filter_list = NULL_TREE;
898
899 if (c->type_list != NULL)
900 {
901 /* Get a filter value for each of the types caught and store
902 them in the region's dedicated list. */
903 tree tp_node = c->type_list;
904
905 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
906 {
907 int flt
908 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
909 tree flt_node = build_int_cst (integer_type_node, flt);
910
911 c->filter_list
912 = tree_cons (NULL_TREE, flt_node, c->filter_list);
913 }
914 }
915 else
916 {
917 /* Get a filter value for the NULL list also since it
918 will need an action record anyway. */
919 int flt = add_ttypes_entry (&ttypes, NULL);
920 tree flt_node = build_int_cst (integer_type_node, flt);
921
922 c->filter_list
923 = tree_cons (NULL_TREE, flt_node, NULL);
924 }
925 }
926 break;
927
928 case ERT_ALLOWED_EXCEPTIONS:
929 r->u.allowed.filter
930 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
931 break;
932
933 default:
934 break;
935 }
936 }
937 }
938
939 /* Emit SEQ into basic block just before INSN (that is assumed to be
940 first instruction of some existing BB and return the newly
941 produced block. */
942 static basic_block
943 emit_to_new_bb_before (rtx_insn *seq, rtx insn)
944 {
945 rtx_insn *last;
946 basic_block bb;
947 edge e;
948 edge_iterator ei;
949
950 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
951 call), we don't want it to go into newly created landing pad or other EH
952 construct. */
953 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
954 if (e->flags & EDGE_FALLTHRU)
955 force_nonfallthru (e);
956 else
957 ei_next (&ei);
958 last = emit_insn_before (seq, insn);
959 if (BARRIER_P (last))
960 last = PREV_INSN (last);
961 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
962 update_bb_for_insn (bb);
963 bb->flags |= BB_SUPERBLOCK;
964 return bb;
965 }
966 \f
967 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
968 at the rtl level. Emit the code required by the target at a landing
969 pad for the given region. */
970
971 void
972 expand_dw2_landing_pad_for_region (eh_region region)
973 {
974 #ifdef HAVE_exception_receiver
975 if (HAVE_exception_receiver)
976 emit_insn (gen_exception_receiver ());
977 else
978 #endif
979 #ifdef HAVE_nonlocal_goto_receiver
980 if (HAVE_nonlocal_goto_receiver)
981 emit_insn (gen_nonlocal_goto_receiver ());
982 else
983 #endif
984 { /* Nothing */ }
985
986 if (region->exc_ptr_reg)
987 emit_move_insn (region->exc_ptr_reg,
988 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
989 if (region->filter_reg)
990 emit_move_insn (region->filter_reg,
991 gen_rtx_REG (targetm.eh_return_filter_mode (),
992 EH_RETURN_DATA_REGNO (1)));
993 }
994
995 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
996
997 static void
998 dw2_build_landing_pads (void)
999 {
1000 int i;
1001 eh_landing_pad lp;
1002 int e_flags = EDGE_FALLTHRU;
1003
1004 /* If we're going to partition blocks, we need to be able to add
1005 new landing pads later, which means that we need to hold on to
1006 the post-landing-pad block. Prevent it from being merged away.
1007 We'll remove this bit after partitioning. */
1008 if (flag_reorder_blocks_and_partition)
1009 e_flags |= EDGE_PRESERVE;
1010
1011 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1012 {
1013 basic_block bb;
1014 rtx_insn *seq;
1015 edge e;
1016
1017 if (lp == NULL || lp->post_landing_pad == NULL)
1018 continue;
1019
1020 start_sequence ();
1021
1022 lp->landing_pad = gen_label_rtx ();
1023 emit_label (lp->landing_pad);
1024 LABEL_PRESERVE_P (lp->landing_pad) = 1;
1025
1026 expand_dw2_landing_pad_for_region (lp->region);
1027
1028 seq = get_insns ();
1029 end_sequence ();
1030
1031 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1032 e = make_edge (bb, bb->next_bb, e_flags);
1033 e->count = bb->count;
1034 e->probability = REG_BR_PROB_BASE;
1035 if (current_loops)
1036 {
1037 struct loop *loop = bb->next_bb->loop_father;
1038 /* If we created a pre-header block, add the new block to the
1039 outer loop, otherwise to the loop itself. */
1040 if (bb->next_bb == loop->header)
1041 add_bb_to_loop (bb, loop_outer (loop));
1042 else
1043 add_bb_to_loop (bb, loop);
1044 }
1045 }
1046 }
1047
1048 \f
1049 static vec<int> sjlj_lp_call_site_index;
1050
1051 /* Process all active landing pads. Assign each one a compact dispatch
1052 index, and a call-site index. */
1053
1054 static int
1055 sjlj_assign_call_site_values (void)
1056 {
1057 action_hash_type ar_hash (31);
1058 int i, disp_index;
1059 eh_landing_pad lp;
1060
1061 vec_alloc (crtl->eh.action_record_data, 64);
1062
1063 disp_index = 0;
1064 call_site_base = 1;
1065 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1066 if (lp && lp->post_landing_pad)
1067 {
1068 int action, call_site;
1069
1070 /* First: build the action table. */
1071 action = collect_one_action_chain (&ar_hash, lp->region);
1072
1073 /* Next: assign call-site values. If dwarf2 terms, this would be
1074 the region number assigned by convert_to_eh_region_ranges, but
1075 handles no-action and must-not-throw differently. */
1076 /* Map must-not-throw to otherwise unused call-site index 0. */
1077 if (action == -2)
1078 call_site = 0;
1079 /* Map no-action to otherwise unused call-site index -1. */
1080 else if (action == -1)
1081 call_site = -1;
1082 /* Otherwise, look it up in the table. */
1083 else
1084 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1085 sjlj_lp_call_site_index[i] = call_site;
1086
1087 disp_index++;
1088 }
1089
1090 return disp_index;
1091 }
1092
1093 /* Emit code to record the current call-site index before every
1094 insn that can throw. */
1095
1096 static void
1097 sjlj_mark_call_sites (void)
1098 {
1099 int last_call_site = -2;
1100 rtx_insn *insn;
1101 rtx mem;
1102
1103 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1104 {
1105 eh_landing_pad lp;
1106 eh_region r;
1107 bool nothrow;
1108 int this_call_site;
1109 rtx_insn *before, *p;
1110
1111 /* Reset value tracking at extended basic block boundaries. */
1112 if (LABEL_P (insn))
1113 last_call_site = -2;
1114
1115 /* If the function allocates dynamic stack space, the context must
1116 be updated after every allocation/deallocation accordingly. */
1117 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1118 {
1119 rtx buf_addr;
1120
1121 start_sequence ();
1122 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1123 sjlj_fc_jbuf_ofs);
1124 expand_builtin_update_setjmp_buf (buf_addr);
1125 p = get_insns ();
1126 end_sequence ();
1127 emit_insn_before (p, insn);
1128 }
1129
1130 if (! INSN_P (insn))
1131 continue;
1132
1133 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1134 if (nothrow)
1135 continue;
1136 if (lp)
1137 this_call_site = sjlj_lp_call_site_index[lp->index];
1138 else if (r == NULL)
1139 {
1140 /* Calls (and trapping insns) without notes are outside any
1141 exception handling region in this function. Mark them as
1142 no action. */
1143 this_call_site = -1;
1144 }
1145 else
1146 {
1147 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1148 this_call_site = 0;
1149 }
1150
1151 if (this_call_site != -1)
1152 crtl->uses_eh_lsda = 1;
1153
1154 if (this_call_site == last_call_site)
1155 continue;
1156
1157 /* Don't separate a call from it's argument loads. */
1158 before = insn;
1159 if (CALL_P (insn))
1160 before = find_first_parameter_load (insn, NULL);
1161
1162 start_sequence ();
1163 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1164 sjlj_fc_call_site_ofs);
1165 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1166 p = get_insns ();
1167 end_sequence ();
1168
1169 emit_insn_before (p, before);
1170 last_call_site = this_call_site;
1171 }
1172 }
1173
1174 /* Construct the SjLj_Function_Context. */
1175
1176 static void
1177 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1178 {
1179 rtx_insn *fn_begin, *seq;
1180 rtx fc, mem;
1181 bool fn_begin_outside_block;
1182 rtx personality = get_personality_function (current_function_decl);
1183
1184 fc = crtl->eh.sjlj_fc;
1185
1186 start_sequence ();
1187
1188 /* We're storing this libcall's address into memory instead of
1189 calling it directly. Thus, we must call assemble_external_libcall
1190 here, as we can not depend on emit_library_call to do it for us. */
1191 assemble_external_libcall (personality);
1192 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1193 emit_move_insn (mem, personality);
1194
1195 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1196 if (crtl->uses_eh_lsda)
1197 {
1198 char buf[20];
1199 rtx sym;
1200
1201 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1202 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1203 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1204 emit_move_insn (mem, sym);
1205 }
1206 else
1207 emit_move_insn (mem, const0_rtx);
1208
1209 if (dispatch_label)
1210 {
1211 #ifdef DONT_USE_BUILTIN_SETJMP
1212 rtx x;
1213 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1214 TYPE_MODE (integer_type_node), 1,
1215 plus_constant (Pmode, XEXP (fc, 0),
1216 sjlj_fc_jbuf_ofs), Pmode);
1217
1218 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1219 TYPE_MODE (integer_type_node), 0,
1220 dispatch_label, REG_BR_PROB_BASE / 100);
1221 #else
1222 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1223 sjlj_fc_jbuf_ofs),
1224 dispatch_label);
1225 #endif
1226 }
1227
1228 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1229 1, XEXP (fc, 0), Pmode);
1230
1231 seq = get_insns ();
1232 end_sequence ();
1233
1234 /* ??? Instead of doing this at the beginning of the function,
1235 do this in a block that is at loop level 0 and dominates all
1236 can_throw_internal instructions. */
1237
1238 fn_begin_outside_block = true;
1239 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1240 if (NOTE_P (fn_begin))
1241 {
1242 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1243 break;
1244 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1245 fn_begin_outside_block = false;
1246 }
1247
1248 if (fn_begin_outside_block)
1249 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1250 else
1251 emit_insn_after (seq, fn_begin);
1252 }
1253
1254 /* Call back from expand_function_end to know where we should put
1255 the call to unwind_sjlj_unregister_libfunc if needed. */
1256
1257 void
1258 sjlj_emit_function_exit_after (rtx_insn *after)
1259 {
1260 crtl->eh.sjlj_exit_after = after;
1261 }
1262
1263 static void
1264 sjlj_emit_function_exit (void)
1265 {
1266 rtx_insn *seq, *insn;
1267
1268 start_sequence ();
1269
1270 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1271 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1272
1273 seq = get_insns ();
1274 end_sequence ();
1275
1276 /* ??? Really this can be done in any block at loop level 0 that
1277 post-dominates all can_throw_internal instructions. This is
1278 the last possible moment. */
1279
1280 insn = crtl->eh.sjlj_exit_after;
1281 if (LABEL_P (insn))
1282 insn = NEXT_INSN (insn);
1283
1284 emit_insn_after (seq, insn);
1285 }
1286
1287 static void
1288 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1289 {
1290 machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1291 machine_mode filter_mode = targetm.eh_return_filter_mode ();
1292 eh_landing_pad lp;
1293 rtx mem, fc, exc_ptr_reg, filter_reg;
1294 rtx_insn *seq;
1295 basic_block bb;
1296 eh_region r;
1297 edge e;
1298 int i, disp_index;
1299 vec<tree> dispatch_labels = vNULL;
1300
1301 fc = crtl->eh.sjlj_fc;
1302
1303 start_sequence ();
1304
1305 emit_label (dispatch_label);
1306
1307 #ifndef DONT_USE_BUILTIN_SETJMP
1308 expand_builtin_setjmp_receiver (dispatch_label);
1309
1310 /* The caller of expand_builtin_setjmp_receiver is responsible for
1311 making sure that the label doesn't vanish. The only other caller
1312 is the expander for __builtin_setjmp_receiver, which places this
1313 label on the nonlocal_goto_label list. Since we're modeling these
1314 CFG edges more exactly, we can use the forced_labels list instead. */
1315 LABEL_PRESERVE_P (dispatch_label) = 1;
1316 forced_labels
1317 = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
1318 #endif
1319
1320 /* Load up exc_ptr and filter values from the function context. */
1321 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1322 if (unwind_word_mode != ptr_mode)
1323 {
1324 #ifdef POINTERS_EXTEND_UNSIGNED
1325 mem = convert_memory_address (ptr_mode, mem);
1326 #else
1327 mem = convert_to_mode (ptr_mode, mem, 0);
1328 #endif
1329 }
1330 exc_ptr_reg = force_reg (ptr_mode, mem);
1331
1332 mem = adjust_address (fc, unwind_word_mode,
1333 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1334 if (unwind_word_mode != filter_mode)
1335 mem = convert_to_mode (filter_mode, mem, 0);
1336 filter_reg = force_reg (filter_mode, mem);
1337
1338 /* Jump to one of the directly reachable regions. */
1339
1340 disp_index = 0;
1341 rtx_code_label *first_reachable_label = NULL;
1342
1343 /* If there's exactly one call site in the function, don't bother
1344 generating a switch statement. */
1345 if (num_dispatch > 1)
1346 dispatch_labels.create (num_dispatch);
1347
1348 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1349 if (lp && lp->post_landing_pad)
1350 {
1351 rtx_insn *seq2;
1352 rtx_code_label *label;
1353
1354 start_sequence ();
1355
1356 lp->landing_pad = dispatch_label;
1357
1358 if (num_dispatch > 1)
1359 {
1360 tree t_label, case_elt, t;
1361
1362 t_label = create_artificial_label (UNKNOWN_LOCATION);
1363 t = build_int_cst (integer_type_node, disp_index);
1364 case_elt = build_case_label (t, NULL, t_label);
1365 dispatch_labels.quick_push (case_elt);
1366 label = jump_target_rtx (t_label);
1367 }
1368 else
1369 label = gen_label_rtx ();
1370
1371 if (disp_index == 0)
1372 first_reachable_label = label;
1373 emit_label (label);
1374
1375 r = lp->region;
1376 if (r->exc_ptr_reg)
1377 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1378 if (r->filter_reg)
1379 emit_move_insn (r->filter_reg, filter_reg);
1380
1381 seq2 = get_insns ();
1382 end_sequence ();
1383
1384 rtx_insn *before = label_rtx (lp->post_landing_pad);
1385 bb = emit_to_new_bb_before (seq2, before);
1386 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1387 e->count = bb->count;
1388 e->probability = REG_BR_PROB_BASE;
1389 if (current_loops)
1390 {
1391 struct loop *loop = bb->next_bb->loop_father;
1392 /* If we created a pre-header block, add the new block to the
1393 outer loop, otherwise to the loop itself. */
1394 if (bb->next_bb == loop->header)
1395 add_bb_to_loop (bb, loop_outer (loop));
1396 else
1397 add_bb_to_loop (bb, loop);
1398 /* ??? For multiple dispatches we will end up with edges
1399 from the loop tree root into this loop, making it a
1400 multiple-entry loop. Discard all affected loops. */
1401 if (num_dispatch > 1)
1402 {
1403 for (loop = bb->loop_father;
1404 loop_outer (loop); loop = loop_outer (loop))
1405 mark_loop_for_removal (loop);
1406 }
1407 }
1408
1409 disp_index++;
1410 }
1411 gcc_assert (disp_index == num_dispatch);
1412
1413 if (num_dispatch > 1)
1414 {
1415 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1416 sjlj_fc_call_site_ofs);
1417 expand_sjlj_dispatch_table (disp, dispatch_labels);
1418 }
1419
1420 seq = get_insns ();
1421 end_sequence ();
1422
1423 bb = emit_to_new_bb_before (seq, first_reachable_label);
1424 if (num_dispatch == 1)
1425 {
1426 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1427 e->count = bb->count;
1428 e->probability = REG_BR_PROB_BASE;
1429 if (current_loops)
1430 {
1431 struct loop *loop = bb->next_bb->loop_father;
1432 /* If we created a pre-header block, add the new block to the
1433 outer loop, otherwise to the loop itself. */
1434 if (bb->next_bb == loop->header)
1435 add_bb_to_loop (bb, loop_outer (loop));
1436 else
1437 add_bb_to_loop (bb, loop);
1438 }
1439 }
1440 else
1441 {
1442 /* We are not wiring up edges here, but as the dispatcher call
1443 is at function begin simply associate the block with the
1444 outermost (non-)loop. */
1445 if (current_loops)
1446 add_bb_to_loop (bb, current_loops->tree_root);
1447 }
1448 }
1449
1450 static void
1451 sjlj_build_landing_pads (void)
1452 {
1453 int num_dispatch;
1454
1455 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1456 if (num_dispatch == 0)
1457 return;
1458 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1459
1460 num_dispatch = sjlj_assign_call_site_values ();
1461 if (num_dispatch > 0)
1462 {
1463 rtx_code_label *dispatch_label = gen_label_rtx ();
1464 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1465 TYPE_MODE (sjlj_fc_type_node),
1466 TYPE_ALIGN (sjlj_fc_type_node));
1467 crtl->eh.sjlj_fc
1468 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1469 int_size_in_bytes (sjlj_fc_type_node),
1470 align);
1471
1472 sjlj_mark_call_sites ();
1473 sjlj_emit_function_enter (dispatch_label);
1474 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1475 sjlj_emit_function_exit ();
1476 }
1477
1478 /* If we do not have any landing pads, we may still need to register a
1479 personality routine and (empty) LSDA to handle must-not-throw regions. */
1480 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1481 {
1482 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1483 TYPE_MODE (sjlj_fc_type_node),
1484 TYPE_ALIGN (sjlj_fc_type_node));
1485 crtl->eh.sjlj_fc
1486 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1487 int_size_in_bytes (sjlj_fc_type_node),
1488 align);
1489
1490 sjlj_mark_call_sites ();
1491 sjlj_emit_function_enter (NULL);
1492 sjlj_emit_function_exit ();
1493 }
1494
1495 sjlj_lp_call_site_index.release ();
1496 }
1497
1498 /* Update the sjlj function context. This function should be called
1499 whenever we allocate or deallocate dynamic stack space. */
1500
1501 void
1502 update_sjlj_context (void)
1503 {
1504 if (!flag_exceptions)
1505 return;
1506
1507 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1508 }
1509
1510 /* After initial rtl generation, call back to finish generating
1511 exception support code. */
1512
1513 void
1514 finish_eh_generation (void)
1515 {
1516 basic_block bb;
1517
1518 /* Construct the landing pads. */
1519 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1520 sjlj_build_landing_pads ();
1521 else
1522 dw2_build_landing_pads ();
1523 break_superblocks ();
1524
1525 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1526 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1527 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1528 commit_edge_insertions ();
1529
1530 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1531 FOR_EACH_BB_FN (bb, cfun)
1532 {
1533 eh_landing_pad lp;
1534 edge_iterator ei;
1535 edge e;
1536
1537 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1538
1539 FOR_EACH_EDGE (e, ei, bb->succs)
1540 if (e->flags & EDGE_EH)
1541 break;
1542
1543 /* We should not have generated any new throwing insns during this
1544 pass, and we should not have lost any EH edges, so we only need
1545 to handle two cases here:
1546 (1) reachable handler and an existing edge to post-landing-pad,
1547 (2) no reachable handler and no edge. */
1548 gcc_assert ((lp != NULL) == (e != NULL));
1549 if (lp != NULL)
1550 {
1551 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1552
1553 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1554 e->flags |= (CALL_P (BB_END (bb))
1555 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1556 : EDGE_ABNORMAL);
1557 }
1558 }
1559 }
1560 \f
1561 /* This section handles removing dead code for flow. */
1562
1563 void
1564 remove_eh_landing_pad (eh_landing_pad lp)
1565 {
1566 eh_landing_pad *pp;
1567
1568 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1569 continue;
1570 *pp = lp->next_lp;
1571
1572 if (lp->post_landing_pad)
1573 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1574 (*cfun->eh->lp_array)[lp->index] = NULL;
1575 }
1576
1577 /* Splice the EH region at PP from the region tree. */
1578
1579 static void
1580 remove_eh_handler_splicer (eh_region *pp)
1581 {
1582 eh_region region = *pp;
1583 eh_landing_pad lp;
1584
1585 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1586 {
1587 if (lp->post_landing_pad)
1588 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1589 (*cfun->eh->lp_array)[lp->index] = NULL;
1590 }
1591
1592 if (region->inner)
1593 {
1594 eh_region p, outer;
1595 outer = region->outer;
1596
1597 *pp = p = region->inner;
1598 do
1599 {
1600 p->outer = outer;
1601 pp = &p->next_peer;
1602 p = *pp;
1603 }
1604 while (p);
1605 }
1606 *pp = region->next_peer;
1607
1608 (*cfun->eh->region_array)[region->index] = NULL;
1609 }
1610
1611 /* Splice a single EH region REGION from the region tree.
1612
1613 To unlink REGION, we need to find the pointer to it with a relatively
1614 expensive search in REGION's outer region. If you are going to
1615 remove a number of handlers, using remove_unreachable_eh_regions may
1616 be a better option. */
1617
1618 void
1619 remove_eh_handler (eh_region region)
1620 {
1621 eh_region *pp, *pp_start, p, outer;
1622
1623 outer = region->outer;
1624 if (outer)
1625 pp_start = &outer->inner;
1626 else
1627 pp_start = &cfun->eh->region_tree;
1628 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1629 continue;
1630
1631 remove_eh_handler_splicer (pp);
1632 }
1633
1634 /* Worker for remove_unreachable_eh_regions.
1635 PP is a pointer to the region to start a region tree depth-first
1636 search from. R_REACHABLE is the set of regions that have to be
1637 preserved. */
1638
1639 static void
1640 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1641 {
1642 while (*pp)
1643 {
1644 eh_region region = *pp;
1645 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1646 if (!bitmap_bit_p (r_reachable, region->index))
1647 remove_eh_handler_splicer (pp);
1648 else
1649 pp = &region->next_peer;
1650 }
1651 }
1652
1653 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1654 Do this by traversing the EH tree top-down and splice out regions that
1655 are not marked. By removing regions from the leaves, we avoid costly
1656 searches in the region tree. */
1657
1658 void
1659 remove_unreachable_eh_regions (sbitmap r_reachable)
1660 {
1661 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1662 }
1663
1664 /* Invokes CALLBACK for every exception handler landing pad label.
1665 Only used by reload hackery; should not be used by new code. */
1666
1667 void
1668 for_each_eh_label (void (*callback) (rtx))
1669 {
1670 eh_landing_pad lp;
1671 int i;
1672
1673 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1674 {
1675 if (lp)
1676 {
1677 rtx_code_label *lab = lp->landing_pad;
1678 if (lab && LABEL_P (lab))
1679 (*callback) (lab);
1680 }
1681 }
1682 }
1683 \f
1684 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1685 call insn.
1686
1687 At the gimple level, we use LP_NR
1688 > 0 : The statement transfers to landing pad LP_NR
1689 = 0 : The statement is outside any EH region
1690 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1691
1692 At the rtl level, we use LP_NR
1693 > 0 : The insn transfers to landing pad LP_NR
1694 = 0 : The insn cannot throw
1695 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1696 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1697 missing note: The insn is outside any EH region.
1698
1699 ??? This difference probably ought to be avoided. We could stand
1700 to record nothrow for arbitrary gimple statements, and so avoid
1701 some moderately complex lookups in stmt_could_throw_p. Perhaps
1702 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1703 no-nonlocal-goto property should be recorded elsewhere as a bit
1704 on the call_insn directly. Perhaps we should make more use of
1705 attaching the trees to call_insns (reachable via symbol_ref in
1706 direct call cases) and just pull the data out of the trees. */
1707
1708 void
1709 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1710 {
1711 rtx value;
1712 if (ecf_flags & ECF_NOTHROW)
1713 value = const0_rtx;
1714 else if (lp_nr != 0)
1715 value = GEN_INT (lp_nr);
1716 else
1717 return;
1718 add_reg_note (insn, REG_EH_REGION, value);
1719 }
1720
1721 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1722 nor perform a non-local goto. Replace the region note if it
1723 already exists. */
1724
1725 void
1726 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1727 {
1728 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1729 rtx intmin = GEN_INT (INT_MIN);
1730
1731 if (note != 0)
1732 XEXP (note, 0) = intmin;
1733 else
1734 add_reg_note (insn, REG_EH_REGION, intmin);
1735 }
1736
1737 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1738 to the contrary. */
1739
1740 bool
1741 insn_could_throw_p (const_rtx insn)
1742 {
1743 if (!flag_exceptions)
1744 return false;
1745 if (CALL_P (insn))
1746 return true;
1747 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1748 return may_trap_p (PATTERN (insn));
1749 return false;
1750 }
1751
1752 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1753 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1754 to look for a note, or the note itself. */
1755
1756 void
1757 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1758 {
1759 rtx_insn *insn;
1760 rtx note = note_or_insn;
1761
1762 if (INSN_P (note_or_insn))
1763 {
1764 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1765 if (note == NULL)
1766 return;
1767 }
1768 note = XEXP (note, 0);
1769
1770 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1771 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1772 && insn_could_throw_p (insn))
1773 add_reg_note (insn, REG_EH_REGION, note);
1774 }
1775
1776 /* Likewise, but iterate backward. */
1777
1778 void
1779 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1780 {
1781 rtx_insn *insn;
1782 rtx note = note_or_insn;
1783
1784 if (INSN_P (note_or_insn))
1785 {
1786 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1787 if (note == NULL)
1788 return;
1789 }
1790 note = XEXP (note, 0);
1791
1792 for (insn = last; insn != first; insn = PREV_INSN (insn))
1793 if (insn_could_throw_p (insn))
1794 add_reg_note (insn, REG_EH_REGION, note);
1795 }
1796
1797
1798 /* Extract all EH information from INSN. Return true if the insn
1799 was marked NOTHROW. */
1800
1801 static bool
1802 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1803 eh_landing_pad *plp)
1804 {
1805 eh_landing_pad lp = NULL;
1806 eh_region r = NULL;
1807 bool ret = false;
1808 rtx note;
1809 int lp_nr;
1810
1811 if (! INSN_P (insn))
1812 goto egress;
1813
1814 if (NONJUMP_INSN_P (insn)
1815 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1816 insn = XVECEXP (PATTERN (insn), 0, 0);
1817
1818 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1819 if (!note)
1820 {
1821 ret = !insn_could_throw_p (insn);
1822 goto egress;
1823 }
1824
1825 lp_nr = INTVAL (XEXP (note, 0));
1826 if (lp_nr == 0 || lp_nr == INT_MIN)
1827 {
1828 ret = true;
1829 goto egress;
1830 }
1831
1832 if (lp_nr < 0)
1833 r = (*cfun->eh->region_array)[-lp_nr];
1834 else
1835 {
1836 lp = (*cfun->eh->lp_array)[lp_nr];
1837 r = lp->region;
1838 }
1839
1840 egress:
1841 *plp = lp;
1842 *pr = r;
1843 return ret;
1844 }
1845
1846 /* Return the landing pad to which INSN may go, or NULL if it does not
1847 have a reachable landing pad within this function. */
1848
1849 eh_landing_pad
1850 get_eh_landing_pad_from_rtx (const_rtx insn)
1851 {
1852 eh_landing_pad lp;
1853 eh_region r;
1854
1855 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1856 return lp;
1857 }
1858
1859 /* Return the region to which INSN may go, or NULL if it does not
1860 have a reachable region within this function. */
1861
1862 eh_region
1863 get_eh_region_from_rtx (const_rtx insn)
1864 {
1865 eh_landing_pad lp;
1866 eh_region r;
1867
1868 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1869 return r;
1870 }
1871
1872 /* Return true if INSN throws and is caught by something in this function. */
1873
1874 bool
1875 can_throw_internal (const_rtx insn)
1876 {
1877 return get_eh_landing_pad_from_rtx (insn) != NULL;
1878 }
1879
1880 /* Return true if INSN throws and escapes from the current function. */
1881
1882 bool
1883 can_throw_external (const_rtx insn)
1884 {
1885 eh_landing_pad lp;
1886 eh_region r;
1887 bool nothrow;
1888
1889 if (! INSN_P (insn))
1890 return false;
1891
1892 if (NONJUMP_INSN_P (insn)
1893 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1894 {
1895 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1896 int i, n = seq->len ();
1897
1898 for (i = 0; i < n; i++)
1899 if (can_throw_external (seq->element (i)))
1900 return true;
1901
1902 return false;
1903 }
1904
1905 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1906
1907 /* If we can't throw, we obviously can't throw external. */
1908 if (nothrow)
1909 return false;
1910
1911 /* If we have an internal landing pad, then we're not external. */
1912 if (lp != NULL)
1913 return false;
1914
1915 /* If we're not within an EH region, then we are external. */
1916 if (r == NULL)
1917 return true;
1918
1919 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1920 which don't always have landing pads. */
1921 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1922 return false;
1923 }
1924
1925 /* Return true if INSN cannot throw at all. */
1926
1927 bool
1928 insn_nothrow_p (const_rtx insn)
1929 {
1930 eh_landing_pad lp;
1931 eh_region r;
1932
1933 if (! INSN_P (insn))
1934 return true;
1935
1936 if (NONJUMP_INSN_P (insn)
1937 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1938 {
1939 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1940 int i, n = seq->len ();
1941
1942 for (i = 0; i < n; i++)
1943 if (!insn_nothrow_p (seq->element (i)))
1944 return false;
1945
1946 return true;
1947 }
1948
1949 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1950 }
1951
1952 /* Return true if INSN can perform a non-local goto. */
1953 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1954
1955 bool
1956 can_nonlocal_goto (const rtx_insn *insn)
1957 {
1958 if (nonlocal_goto_handler_labels && CALL_P (insn))
1959 {
1960 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1961 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1962 return true;
1963 }
1964 return false;
1965 }
1966 \f
1967 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1968
1969 static unsigned int
1970 set_nothrow_function_flags (void)
1971 {
1972 rtx_insn *insn;
1973
1974 crtl->nothrow = 1;
1975
1976 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1977 something that can throw an exception. We specifically exempt
1978 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1979 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1980 is optimistic. */
1981
1982 crtl->all_throwers_are_sibcalls = 1;
1983
1984 /* If we don't know that this implementation of the function will
1985 actually be used, then we must not set TREE_NOTHROW, since
1986 callers must not assume that this function does not throw. */
1987 if (TREE_NOTHROW (current_function_decl))
1988 return 0;
1989
1990 if (! flag_exceptions)
1991 return 0;
1992
1993 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1994 if (can_throw_external (insn))
1995 {
1996 crtl->nothrow = 0;
1997
1998 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1999 {
2000 crtl->all_throwers_are_sibcalls = 0;
2001 return 0;
2002 }
2003 }
2004
2005 if (crtl->nothrow
2006 && (cgraph_node::get (current_function_decl)->get_availability ()
2007 >= AVAIL_AVAILABLE))
2008 {
2009 struct cgraph_node *node = cgraph_node::get (current_function_decl);
2010 struct cgraph_edge *e;
2011 for (e = node->callers; e; e = e->next_caller)
2012 e->can_throw_external = false;
2013 node->set_nothrow_flag (true);
2014
2015 if (dump_file)
2016 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2017 current_function_name ());
2018 }
2019 return 0;
2020 }
2021
2022 namespace {
2023
2024 const pass_data pass_data_set_nothrow_function_flags =
2025 {
2026 RTL_PASS, /* type */
2027 "nothrow", /* name */
2028 OPTGROUP_NONE, /* optinfo_flags */
2029 TV_NONE, /* tv_id */
2030 0, /* properties_required */
2031 0, /* properties_provided */
2032 0, /* properties_destroyed */
2033 0, /* todo_flags_start */
2034 0, /* todo_flags_finish */
2035 };
2036
2037 class pass_set_nothrow_function_flags : public rtl_opt_pass
2038 {
2039 public:
2040 pass_set_nothrow_function_flags (gcc::context *ctxt)
2041 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2042 {}
2043
2044 /* opt_pass methods: */
2045 virtual unsigned int execute (function *)
2046 {
2047 return set_nothrow_function_flags ();
2048 }
2049
2050 }; // class pass_set_nothrow_function_flags
2051
2052 } // anon namespace
2053
2054 rtl_opt_pass *
2055 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2056 {
2057 return new pass_set_nothrow_function_flags (ctxt);
2058 }
2059
2060 \f
2061 /* Various hooks for unwind library. */
2062
2063 /* Expand the EH support builtin functions:
2064 __builtin_eh_pointer and __builtin_eh_filter. */
2065
2066 static eh_region
2067 expand_builtin_eh_common (tree region_nr_t)
2068 {
2069 HOST_WIDE_INT region_nr;
2070 eh_region region;
2071
2072 gcc_assert (tree_fits_shwi_p (region_nr_t));
2073 region_nr = tree_to_shwi (region_nr_t);
2074
2075 region = (*cfun->eh->region_array)[region_nr];
2076
2077 /* ??? We shouldn't have been able to delete a eh region without
2078 deleting all the code that depended on it. */
2079 gcc_assert (region != NULL);
2080
2081 return region;
2082 }
2083
2084 /* Expand to the exc_ptr value from the given eh region. */
2085
2086 rtx
2087 expand_builtin_eh_pointer (tree exp)
2088 {
2089 eh_region region
2090 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2091 if (region->exc_ptr_reg == NULL)
2092 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2093 return region->exc_ptr_reg;
2094 }
2095
2096 /* Expand to the filter value from the given eh region. */
2097
2098 rtx
2099 expand_builtin_eh_filter (tree exp)
2100 {
2101 eh_region region
2102 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2103 if (region->filter_reg == NULL)
2104 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2105 return region->filter_reg;
2106 }
2107
2108 /* Copy the exc_ptr and filter values from one landing pad's registers
2109 to another. This is used to inline the resx statement. */
2110
2111 rtx
2112 expand_builtin_eh_copy_values (tree exp)
2113 {
2114 eh_region dst
2115 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2116 eh_region src
2117 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2118 machine_mode fmode = targetm.eh_return_filter_mode ();
2119
2120 if (dst->exc_ptr_reg == NULL)
2121 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2122 if (src->exc_ptr_reg == NULL)
2123 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2124
2125 if (dst->filter_reg == NULL)
2126 dst->filter_reg = gen_reg_rtx (fmode);
2127 if (src->filter_reg == NULL)
2128 src->filter_reg = gen_reg_rtx (fmode);
2129
2130 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2131 emit_move_insn (dst->filter_reg, src->filter_reg);
2132
2133 return const0_rtx;
2134 }
2135
2136 /* Do any necessary initialization to access arbitrary stack frames.
2137 On the SPARC, this means flushing the register windows. */
2138
2139 void
2140 expand_builtin_unwind_init (void)
2141 {
2142 /* Set this so all the registers get saved in our frame; we need to be
2143 able to copy the saved values for any registers from frames we unwind. */
2144 crtl->saves_all_registers = 1;
2145
2146 #ifdef SETUP_FRAME_ADDRESSES
2147 SETUP_FRAME_ADDRESSES ();
2148 #endif
2149 }
2150
2151 /* Map a non-negative number to an eh return data register number; expands
2152 to -1 if no return data register is associated with the input number.
2153 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2154
2155 rtx
2156 expand_builtin_eh_return_data_regno (tree exp)
2157 {
2158 tree which = CALL_EXPR_ARG (exp, 0);
2159 unsigned HOST_WIDE_INT iwhich;
2160
2161 if (TREE_CODE (which) != INTEGER_CST)
2162 {
2163 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2164 return constm1_rtx;
2165 }
2166
2167 iwhich = tree_to_uhwi (which);
2168 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2169 if (iwhich == INVALID_REGNUM)
2170 return constm1_rtx;
2171
2172 #ifdef DWARF_FRAME_REGNUM
2173 iwhich = DWARF_FRAME_REGNUM (iwhich);
2174 #else
2175 iwhich = DBX_REGISTER_NUMBER (iwhich);
2176 #endif
2177
2178 return GEN_INT (iwhich);
2179 }
2180
2181 /* Given a value extracted from the return address register or stack slot,
2182 return the actual address encoded in that value. */
2183
2184 rtx
2185 expand_builtin_extract_return_addr (tree addr_tree)
2186 {
2187 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2188
2189 if (GET_MODE (addr) != Pmode
2190 && GET_MODE (addr) != VOIDmode)
2191 {
2192 #ifdef POINTERS_EXTEND_UNSIGNED
2193 addr = convert_memory_address (Pmode, addr);
2194 #else
2195 addr = convert_to_mode (Pmode, addr, 0);
2196 #endif
2197 }
2198
2199 /* First mask out any unwanted bits. */
2200 rtx mask = MASK_RETURN_ADDR;
2201 if (mask)
2202 expand_and (Pmode, addr, mask, addr);
2203
2204 /* Then adjust to find the real return address. */
2205 if (RETURN_ADDR_OFFSET)
2206 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2207
2208 return addr;
2209 }
2210
2211 /* Given an actual address in addr_tree, do any necessary encoding
2212 and return the value to be stored in the return address register or
2213 stack slot so the epilogue will return to that address. */
2214
2215 rtx
2216 expand_builtin_frob_return_addr (tree addr_tree)
2217 {
2218 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2219
2220 addr = convert_memory_address (Pmode, addr);
2221
2222 if (RETURN_ADDR_OFFSET)
2223 {
2224 addr = force_reg (Pmode, addr);
2225 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2226 }
2227
2228 return addr;
2229 }
2230
2231 /* Set up the epilogue with the magic bits we'll need to return to the
2232 exception handler. */
2233
2234 void
2235 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2236 tree handler_tree)
2237 {
2238 rtx tmp;
2239
2240 #ifdef EH_RETURN_STACKADJ_RTX
2241 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2242 VOIDmode, EXPAND_NORMAL);
2243 tmp = convert_memory_address (Pmode, tmp);
2244 if (!crtl->eh.ehr_stackadj)
2245 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2246 else if (tmp != crtl->eh.ehr_stackadj)
2247 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2248 #endif
2249
2250 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2251 VOIDmode, EXPAND_NORMAL);
2252 tmp = convert_memory_address (Pmode, tmp);
2253 if (!crtl->eh.ehr_handler)
2254 crtl->eh.ehr_handler = copy_to_reg (tmp);
2255 else if (tmp != crtl->eh.ehr_handler)
2256 emit_move_insn (crtl->eh.ehr_handler, tmp);
2257
2258 if (!crtl->eh.ehr_label)
2259 crtl->eh.ehr_label = gen_label_rtx ();
2260 emit_jump (crtl->eh.ehr_label);
2261 }
2262
2263 /* Expand __builtin_eh_return. This exit path from the function loads up
2264 the eh return data registers, adjusts the stack, and branches to a
2265 given PC other than the normal return address. */
2266
2267 void
2268 expand_eh_return (void)
2269 {
2270 rtx_code_label *around_label;
2271
2272 if (! crtl->eh.ehr_label)
2273 return;
2274
2275 crtl->calls_eh_return = 1;
2276
2277 #ifdef EH_RETURN_STACKADJ_RTX
2278 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2279 #endif
2280
2281 around_label = gen_label_rtx ();
2282 emit_jump (around_label);
2283
2284 emit_label (crtl->eh.ehr_label);
2285 clobber_return_register ();
2286
2287 #ifdef EH_RETURN_STACKADJ_RTX
2288 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2289 #endif
2290
2291 #ifdef HAVE_eh_return
2292 if (HAVE_eh_return)
2293 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2294 else
2295 #endif
2296 {
2297 #ifdef EH_RETURN_HANDLER_RTX
2298 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2299 #else
2300 error ("__builtin_eh_return not supported on this target");
2301 #endif
2302 }
2303
2304 emit_label (around_label);
2305 }
2306
2307 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2308 POINTERS_EXTEND_UNSIGNED and return it. */
2309
2310 rtx
2311 expand_builtin_extend_pointer (tree addr_tree)
2312 {
2313 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2314 int extend;
2315
2316 #ifdef POINTERS_EXTEND_UNSIGNED
2317 extend = POINTERS_EXTEND_UNSIGNED;
2318 #else
2319 /* The previous EH code did an unsigned extend by default, so we do this also
2320 for consistency. */
2321 extend = 1;
2322 #endif
2323
2324 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2325 }
2326 \f
2327 static int
2328 add_action_record (action_hash_type *ar_hash, int filter, int next)
2329 {
2330 struct action_record **slot, *new_ar, tmp;
2331
2332 tmp.filter = filter;
2333 tmp.next = next;
2334 slot = ar_hash->find_slot (&tmp, INSERT);
2335
2336 if ((new_ar = *slot) == NULL)
2337 {
2338 new_ar = XNEW (struct action_record);
2339 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2340 new_ar->filter = filter;
2341 new_ar->next = next;
2342 *slot = new_ar;
2343
2344 /* The filter value goes in untouched. The link to the next
2345 record is a "self-relative" byte offset, or zero to indicate
2346 that there is no next record. So convert the absolute 1 based
2347 indices we've been carrying around into a displacement. */
2348
2349 push_sleb128 (&crtl->eh.action_record_data, filter);
2350 if (next)
2351 next -= crtl->eh.action_record_data->length () + 1;
2352 push_sleb128 (&crtl->eh.action_record_data, next);
2353 }
2354
2355 return new_ar->offset;
2356 }
2357
2358 static int
2359 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2360 {
2361 int next;
2362
2363 /* If we've reached the top of the region chain, then we have
2364 no actions, and require no landing pad. */
2365 if (region == NULL)
2366 return -1;
2367
2368 switch (region->type)
2369 {
2370 case ERT_CLEANUP:
2371 {
2372 eh_region r;
2373 /* A cleanup adds a zero filter to the beginning of the chain, but
2374 there are special cases to look out for. If there are *only*
2375 cleanups along a path, then it compresses to a zero action.
2376 Further, if there are multiple cleanups along a path, we only
2377 need to represent one of them, as that is enough to trigger
2378 entry to the landing pad at runtime. */
2379 next = collect_one_action_chain (ar_hash, region->outer);
2380 if (next <= 0)
2381 return 0;
2382 for (r = region->outer; r ; r = r->outer)
2383 if (r->type == ERT_CLEANUP)
2384 return next;
2385 return add_action_record (ar_hash, 0, next);
2386 }
2387
2388 case ERT_TRY:
2389 {
2390 eh_catch c;
2391
2392 /* Process the associated catch regions in reverse order.
2393 If there's a catch-all handler, then we don't need to
2394 search outer regions. Use a magic -3 value to record
2395 that we haven't done the outer search. */
2396 next = -3;
2397 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2398 {
2399 if (c->type_list == NULL)
2400 {
2401 /* Retrieve the filter from the head of the filter list
2402 where we have stored it (see assign_filter_values). */
2403 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2404 next = add_action_record (ar_hash, filter, 0);
2405 }
2406 else
2407 {
2408 /* Once the outer search is done, trigger an action record for
2409 each filter we have. */
2410 tree flt_node;
2411
2412 if (next == -3)
2413 {
2414 next = collect_one_action_chain (ar_hash, region->outer);
2415
2416 /* If there is no next action, terminate the chain. */
2417 if (next == -1)
2418 next = 0;
2419 /* If all outer actions are cleanups or must_not_throw,
2420 we'll have no action record for it, since we had wanted
2421 to encode these states in the call-site record directly.
2422 Add a cleanup action to the chain to catch these. */
2423 else if (next <= 0)
2424 next = add_action_record (ar_hash, 0, 0);
2425 }
2426
2427 flt_node = c->filter_list;
2428 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2429 {
2430 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2431 next = add_action_record (ar_hash, filter, next);
2432 }
2433 }
2434 }
2435 return next;
2436 }
2437
2438 case ERT_ALLOWED_EXCEPTIONS:
2439 /* An exception specification adds its filter to the
2440 beginning of the chain. */
2441 next = collect_one_action_chain (ar_hash, region->outer);
2442
2443 /* If there is no next action, terminate the chain. */
2444 if (next == -1)
2445 next = 0;
2446 /* If all outer actions are cleanups or must_not_throw,
2447 we'll have no action record for it, since we had wanted
2448 to encode these states in the call-site record directly.
2449 Add a cleanup action to the chain to catch these. */
2450 else if (next <= 0)
2451 next = add_action_record (ar_hash, 0, 0);
2452
2453 return add_action_record (ar_hash, region->u.allowed.filter, next);
2454
2455 case ERT_MUST_NOT_THROW:
2456 /* A must-not-throw region with no inner handlers or cleanups
2457 requires no call-site entry. Note that this differs from
2458 the no handler or cleanup case in that we do require an lsda
2459 to be generated. Return a magic -2 value to record this. */
2460 return -2;
2461 }
2462
2463 gcc_unreachable ();
2464 }
2465
2466 static int
2467 add_call_site (rtx landing_pad, int action, int section)
2468 {
2469 call_site_record record;
2470
2471 record = ggc_alloc<call_site_record_d> ();
2472 record->landing_pad = landing_pad;
2473 record->action = action;
2474
2475 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2476
2477 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2478 }
2479
2480 static rtx_note *
2481 emit_note_eh_region_end (rtx_insn *insn)
2482 {
2483 rtx_insn *next = NEXT_INSN (insn);
2484
2485 /* Make sure we do not split a call and its corresponding
2486 CALL_ARG_LOCATION note. */
2487 if (next && NOTE_P (next)
2488 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2489 insn = next;
2490
2491 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2492 }
2493
2494 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2495 The new note numbers will not refer to region numbers, but
2496 instead to call site entries. */
2497
2498 static unsigned int
2499 convert_to_eh_region_ranges (void)
2500 {
2501 rtx insn;
2502 rtx_insn *iter;
2503 rtx_note *note;
2504 action_hash_type ar_hash (31);
2505 int last_action = -3;
2506 rtx_insn *last_action_insn = NULL;
2507 rtx last_landing_pad = NULL_RTX;
2508 rtx_insn *first_no_action_insn = NULL;
2509 int call_site = 0;
2510 int cur_sec = 0;
2511 rtx_insn *section_switch_note = NULL;
2512 rtx_insn *first_no_action_insn_before_switch = NULL;
2513 rtx_insn *last_no_action_insn_before_switch = NULL;
2514 int saved_call_site_base = call_site_base;
2515
2516 vec_alloc (crtl->eh.action_record_data, 64);
2517
2518 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2519 if (INSN_P (iter))
2520 {
2521 eh_landing_pad lp;
2522 eh_region region;
2523 bool nothrow;
2524 int this_action;
2525 rtx_code_label *this_landing_pad;
2526
2527 insn = iter;
2528 if (NONJUMP_INSN_P (insn)
2529 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2530 insn = XVECEXP (PATTERN (insn), 0, 0);
2531
2532 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2533 if (nothrow)
2534 continue;
2535 if (region)
2536 this_action = collect_one_action_chain (&ar_hash, region);
2537 else
2538 this_action = -1;
2539
2540 /* Existence of catch handlers, or must-not-throw regions
2541 implies that an lsda is needed (even if empty). */
2542 if (this_action != -1)
2543 crtl->uses_eh_lsda = 1;
2544
2545 /* Delay creation of region notes for no-action regions
2546 until we're sure that an lsda will be required. */
2547 else if (last_action == -3)
2548 {
2549 first_no_action_insn = iter;
2550 last_action = -1;
2551 }
2552
2553 if (this_action >= 0)
2554 this_landing_pad = lp->landing_pad;
2555 else
2556 this_landing_pad = NULL;
2557
2558 /* Differing actions or landing pads implies a change in call-site
2559 info, which implies some EH_REGION note should be emitted. */
2560 if (last_action != this_action
2561 || last_landing_pad != this_landing_pad)
2562 {
2563 /* If there is a queued no-action region in the other section
2564 with hot/cold partitioning, emit it now. */
2565 if (first_no_action_insn_before_switch)
2566 {
2567 gcc_assert (this_action != -1
2568 && last_action == (first_no_action_insn
2569 ? -1 : -3));
2570 call_site = add_call_site (NULL_RTX, 0, 0);
2571 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2572 first_no_action_insn_before_switch);
2573 NOTE_EH_HANDLER (note) = call_site;
2574 note
2575 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2576 NOTE_EH_HANDLER (note) = call_site;
2577 gcc_assert (last_action != -3
2578 || (last_action_insn
2579 == last_no_action_insn_before_switch));
2580 first_no_action_insn_before_switch = NULL;
2581 last_no_action_insn_before_switch = NULL;
2582 call_site_base++;
2583 }
2584 /* If we'd not seen a previous action (-3) or the previous
2585 action was must-not-throw (-2), then we do not need an
2586 end note. */
2587 if (last_action >= -1)
2588 {
2589 /* If we delayed the creation of the begin, do it now. */
2590 if (first_no_action_insn)
2591 {
2592 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2593 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2594 first_no_action_insn);
2595 NOTE_EH_HANDLER (note) = call_site;
2596 first_no_action_insn = NULL;
2597 }
2598
2599 note = emit_note_eh_region_end (last_action_insn);
2600 NOTE_EH_HANDLER (note) = call_site;
2601 }
2602
2603 /* If the new action is must-not-throw, then no region notes
2604 are created. */
2605 if (this_action >= -1)
2606 {
2607 call_site = add_call_site (this_landing_pad,
2608 this_action < 0 ? 0 : this_action,
2609 cur_sec);
2610 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2611 NOTE_EH_HANDLER (note) = call_site;
2612 }
2613
2614 last_action = this_action;
2615 last_landing_pad = this_landing_pad;
2616 }
2617 last_action_insn = iter;
2618 }
2619 else if (NOTE_P (iter)
2620 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2621 {
2622 gcc_assert (section_switch_note == NULL_RTX);
2623 gcc_assert (flag_reorder_blocks_and_partition);
2624 section_switch_note = iter;
2625 if (first_no_action_insn)
2626 {
2627 first_no_action_insn_before_switch = first_no_action_insn;
2628 last_no_action_insn_before_switch = last_action_insn;
2629 first_no_action_insn = NULL;
2630 gcc_assert (last_action == -1);
2631 last_action = -3;
2632 }
2633 /* Force closing of current EH region before section switch and
2634 opening a new one afterwards. */
2635 else if (last_action != -3)
2636 last_landing_pad = pc_rtx;
2637 if (crtl->eh.call_site_record_v[cur_sec])
2638 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2639 cur_sec++;
2640 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2641 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2642 }
2643
2644 if (last_action >= -1 && ! first_no_action_insn)
2645 {
2646 note = emit_note_eh_region_end (last_action_insn);
2647 NOTE_EH_HANDLER (note) = call_site;
2648 }
2649
2650 call_site_base = saved_call_site_base;
2651
2652 return 0;
2653 }
2654
2655 namespace {
2656
2657 const pass_data pass_data_convert_to_eh_region_ranges =
2658 {
2659 RTL_PASS, /* type */
2660 "eh_ranges", /* name */
2661 OPTGROUP_NONE, /* optinfo_flags */
2662 TV_NONE, /* tv_id */
2663 0, /* properties_required */
2664 0, /* properties_provided */
2665 0, /* properties_destroyed */
2666 0, /* todo_flags_start */
2667 0, /* todo_flags_finish */
2668 };
2669
2670 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2671 {
2672 public:
2673 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2674 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2675 {}
2676
2677 /* opt_pass methods: */
2678 virtual bool gate (function *);
2679 virtual unsigned int execute (function *)
2680 {
2681 return convert_to_eh_region_ranges ();
2682 }
2683
2684 }; // class pass_convert_to_eh_region_ranges
2685
2686 bool
2687 pass_convert_to_eh_region_ranges::gate (function *)
2688 {
2689 /* Nothing to do for SJLJ exceptions or if no regions created. */
2690 if (cfun->eh->region_tree == NULL)
2691 return false;
2692 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2693 return false;
2694 return true;
2695 }
2696
2697 } // anon namespace
2698
2699 rtl_opt_pass *
2700 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2701 {
2702 return new pass_convert_to_eh_region_ranges (ctxt);
2703 }
2704 \f
2705 static void
2706 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2707 {
2708 do
2709 {
2710 unsigned char byte = value & 0x7f;
2711 value >>= 7;
2712 if (value)
2713 byte |= 0x80;
2714 vec_safe_push (*data_area, byte);
2715 }
2716 while (value);
2717 }
2718
2719 static void
2720 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2721 {
2722 unsigned char byte;
2723 int more;
2724
2725 do
2726 {
2727 byte = value & 0x7f;
2728 value >>= 7;
2729 more = ! ((value == 0 && (byte & 0x40) == 0)
2730 || (value == -1 && (byte & 0x40) != 0));
2731 if (more)
2732 byte |= 0x80;
2733 vec_safe_push (*data_area, byte);
2734 }
2735 while (more);
2736 }
2737
2738 \f
2739 #ifndef HAVE_AS_LEB128
2740 static int
2741 dw2_size_of_call_site_table (int section)
2742 {
2743 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2744 int size = n * (4 + 4 + 4);
2745 int i;
2746
2747 for (i = 0; i < n; ++i)
2748 {
2749 struct call_site_record_d *cs =
2750 (*crtl->eh.call_site_record_v[section])[i];
2751 size += size_of_uleb128 (cs->action);
2752 }
2753
2754 return size;
2755 }
2756
2757 static int
2758 sjlj_size_of_call_site_table (void)
2759 {
2760 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2761 int size = 0;
2762 int i;
2763
2764 for (i = 0; i < n; ++i)
2765 {
2766 struct call_site_record_d *cs =
2767 (*crtl->eh.call_site_record_v[0])[i];
2768 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2769 size += size_of_uleb128 (cs->action);
2770 }
2771
2772 return size;
2773 }
2774 #endif
2775
2776 static void
2777 dw2_output_call_site_table (int cs_format, int section)
2778 {
2779 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2780 int i;
2781 const char *begin;
2782
2783 if (section == 0)
2784 begin = current_function_func_begin_label;
2785 else if (first_function_block_is_cold)
2786 begin = crtl->subsections.hot_section_label;
2787 else
2788 begin = crtl->subsections.cold_section_label;
2789
2790 for (i = 0; i < n; ++i)
2791 {
2792 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2793 char reg_start_lab[32];
2794 char reg_end_lab[32];
2795 char landing_pad_lab[32];
2796
2797 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2798 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2799
2800 if (cs->landing_pad)
2801 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2802 CODE_LABEL_NUMBER (cs->landing_pad));
2803
2804 /* ??? Perhaps use insn length scaling if the assembler supports
2805 generic arithmetic. */
2806 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2807 data4 if the function is small enough. */
2808 if (cs_format == DW_EH_PE_uleb128)
2809 {
2810 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2811 "region %d start", i);
2812 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2813 "length");
2814 if (cs->landing_pad)
2815 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2816 "landing pad");
2817 else
2818 dw2_asm_output_data_uleb128 (0, "landing pad");
2819 }
2820 else
2821 {
2822 dw2_asm_output_delta (4, reg_start_lab, begin,
2823 "region %d start", i);
2824 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2825 if (cs->landing_pad)
2826 dw2_asm_output_delta (4, landing_pad_lab, begin,
2827 "landing pad");
2828 else
2829 dw2_asm_output_data (4, 0, "landing pad");
2830 }
2831 dw2_asm_output_data_uleb128 (cs->action, "action");
2832 }
2833
2834 call_site_base += n;
2835 }
2836
2837 static void
2838 sjlj_output_call_site_table (void)
2839 {
2840 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2841 int i;
2842
2843 for (i = 0; i < n; ++i)
2844 {
2845 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2846
2847 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2848 "region %d landing pad", i);
2849 dw2_asm_output_data_uleb128 (cs->action, "action");
2850 }
2851
2852 call_site_base += n;
2853 }
2854
2855 /* Switch to the section that should be used for exception tables. */
2856
2857 static void
2858 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2859 {
2860 section *s;
2861
2862 if (exception_section)
2863 s = exception_section;
2864 else
2865 {
2866 /* Compute the section and cache it into exception_section,
2867 unless it depends on the function name. */
2868 if (targetm_common.have_named_sections)
2869 {
2870 int flags;
2871
2872 if (EH_TABLES_CAN_BE_READ_ONLY)
2873 {
2874 int tt_format =
2875 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2876 flags = ((! flag_pic
2877 || ((tt_format & 0x70) != DW_EH_PE_absptr
2878 && (tt_format & 0x70) != DW_EH_PE_aligned))
2879 ? 0 : SECTION_WRITE);
2880 }
2881 else
2882 flags = SECTION_WRITE;
2883
2884 #ifdef HAVE_LD_EH_GC_SECTIONS
2885 if (flag_function_sections
2886 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2887 {
2888 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2889 /* The EH table must match the code section, so only mark
2890 it linkonce if we have COMDAT groups to tie them together. */
2891 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2892 flags |= SECTION_LINKONCE;
2893 sprintf (section_name, ".gcc_except_table.%s", fnname);
2894 s = get_section (section_name, flags, current_function_decl);
2895 free (section_name);
2896 }
2897 else
2898 #endif
2899 exception_section
2900 = s = get_section (".gcc_except_table", flags, NULL);
2901 }
2902 else
2903 exception_section
2904 = s = flag_pic ? data_section : readonly_data_section;
2905 }
2906
2907 switch_to_section (s);
2908 }
2909
2910
2911 /* Output a reference from an exception table to the type_info object TYPE.
2912 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2913 the value. */
2914
2915 static void
2916 output_ttype (tree type, int tt_format, int tt_format_size)
2917 {
2918 rtx value;
2919 bool is_public = true;
2920
2921 if (type == NULL_TREE)
2922 value = const0_rtx;
2923 else
2924 {
2925 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2926 runtime types so TYPE should already be a runtime type
2927 reference. When pass_ipa_free_lang data is made a default
2928 pass, we can then remove the call to lookup_type_for_runtime
2929 below. */
2930 if (TYPE_P (type))
2931 type = lookup_type_for_runtime (type);
2932
2933 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2934
2935 /* Let cgraph know that the rtti decl is used. Not all of the
2936 paths below go through assemble_integer, which would take
2937 care of this for us. */
2938 STRIP_NOPS (type);
2939 if (TREE_CODE (type) == ADDR_EXPR)
2940 {
2941 type = TREE_OPERAND (type, 0);
2942 if (TREE_CODE (type) == VAR_DECL)
2943 is_public = TREE_PUBLIC (type);
2944 }
2945 else
2946 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2947 }
2948
2949 /* Allow the target to override the type table entry format. */
2950 if (targetm.asm_out.ttype (value))
2951 return;
2952
2953 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2954 assemble_integer (value, tt_format_size,
2955 tt_format_size * BITS_PER_UNIT, 1);
2956 else
2957 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2958 }
2959
2960 static void
2961 output_one_function_exception_table (int section)
2962 {
2963 int tt_format, cs_format, lp_format, i;
2964 #ifdef HAVE_AS_LEB128
2965 char ttype_label[32];
2966 char cs_after_size_label[32];
2967 char cs_end_label[32];
2968 #else
2969 int call_site_len;
2970 #endif
2971 int have_tt_data;
2972 int tt_format_size = 0;
2973
2974 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2975 || (targetm.arm_eabi_unwinder
2976 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2977 : vec_safe_length (cfun->eh->ehspec_data.other)));
2978
2979 /* Indicate the format of the @TType entries. */
2980 if (! have_tt_data)
2981 tt_format = DW_EH_PE_omit;
2982 else
2983 {
2984 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2985 #ifdef HAVE_AS_LEB128
2986 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2987 section ? "LLSDATTC" : "LLSDATT",
2988 current_function_funcdef_no);
2989 #endif
2990 tt_format_size = size_of_encoded_value (tt_format);
2991
2992 assemble_align (tt_format_size * BITS_PER_UNIT);
2993 }
2994
2995 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2996 current_function_funcdef_no);
2997
2998 /* The LSDA header. */
2999
3000 /* Indicate the format of the landing pad start pointer. An omitted
3001 field implies @LPStart == @Start. */
3002 /* Currently we always put @LPStart == @Start. This field would
3003 be most useful in moving the landing pads completely out of
3004 line to another section, but it could also be used to minimize
3005 the size of uleb128 landing pad offsets. */
3006 lp_format = DW_EH_PE_omit;
3007 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3008 eh_data_format_name (lp_format));
3009
3010 /* @LPStart pointer would go here. */
3011
3012 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3013 eh_data_format_name (tt_format));
3014
3015 #ifndef HAVE_AS_LEB128
3016 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3017 call_site_len = sjlj_size_of_call_site_table ();
3018 else
3019 call_site_len = dw2_size_of_call_site_table (section);
3020 #endif
3021
3022 /* A pc-relative 4-byte displacement to the @TType data. */
3023 if (have_tt_data)
3024 {
3025 #ifdef HAVE_AS_LEB128
3026 char ttype_after_disp_label[32];
3027 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3028 section ? "LLSDATTDC" : "LLSDATTD",
3029 current_function_funcdef_no);
3030 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3031 "@TType base offset");
3032 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3033 #else
3034 /* Ug. Alignment queers things. */
3035 unsigned int before_disp, after_disp, last_disp, disp;
3036
3037 before_disp = 1 + 1;
3038 after_disp = (1 + size_of_uleb128 (call_site_len)
3039 + call_site_len
3040 + vec_safe_length (crtl->eh.action_record_data)
3041 + (vec_safe_length (cfun->eh->ttype_data)
3042 * tt_format_size));
3043
3044 disp = after_disp;
3045 do
3046 {
3047 unsigned int disp_size, pad;
3048
3049 last_disp = disp;
3050 disp_size = size_of_uleb128 (disp);
3051 pad = before_disp + disp_size + after_disp;
3052 if (pad % tt_format_size)
3053 pad = tt_format_size - (pad % tt_format_size);
3054 else
3055 pad = 0;
3056 disp = after_disp + pad;
3057 }
3058 while (disp != last_disp);
3059
3060 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3061 #endif
3062 }
3063
3064 /* Indicate the format of the call-site offsets. */
3065 #ifdef HAVE_AS_LEB128
3066 cs_format = DW_EH_PE_uleb128;
3067 #else
3068 cs_format = DW_EH_PE_udata4;
3069 #endif
3070 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3071 eh_data_format_name (cs_format));
3072
3073 #ifdef HAVE_AS_LEB128
3074 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3075 section ? "LLSDACSBC" : "LLSDACSB",
3076 current_function_funcdef_no);
3077 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3078 section ? "LLSDACSEC" : "LLSDACSE",
3079 current_function_funcdef_no);
3080 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3081 "Call-site table length");
3082 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3083 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3084 sjlj_output_call_site_table ();
3085 else
3086 dw2_output_call_site_table (cs_format, section);
3087 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3088 #else
3089 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3090 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3091 sjlj_output_call_site_table ();
3092 else
3093 dw2_output_call_site_table (cs_format, section);
3094 #endif
3095
3096 /* ??? Decode and interpret the data for flag_debug_asm. */
3097 {
3098 uchar uc;
3099 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3100 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3101 }
3102
3103 if (have_tt_data)
3104 assemble_align (tt_format_size * BITS_PER_UNIT);
3105
3106 i = vec_safe_length (cfun->eh->ttype_data);
3107 while (i-- > 0)
3108 {
3109 tree type = (*cfun->eh->ttype_data)[i];
3110 output_ttype (type, tt_format, tt_format_size);
3111 }
3112
3113 #ifdef HAVE_AS_LEB128
3114 if (have_tt_data)
3115 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3116 #endif
3117
3118 /* ??? Decode and interpret the data for flag_debug_asm. */
3119 if (targetm.arm_eabi_unwinder)
3120 {
3121 tree type;
3122 for (i = 0;
3123 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3124 output_ttype (type, tt_format, tt_format_size);
3125 }
3126 else
3127 {
3128 uchar uc;
3129 for (i = 0;
3130 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3131 dw2_asm_output_data (1, uc,
3132 i ? NULL : "Exception specification table");
3133 }
3134 }
3135
3136 void
3137 output_function_exception_table (const char *fnname)
3138 {
3139 rtx personality = get_personality_function (current_function_decl);
3140
3141 /* Not all functions need anything. */
3142 if (! crtl->uses_eh_lsda)
3143 return;
3144
3145 if (personality)
3146 {
3147 assemble_external_libcall (personality);
3148
3149 if (targetm.asm_out.emit_except_personality)
3150 targetm.asm_out.emit_except_personality (personality);
3151 }
3152
3153 switch_to_exception_section (fnname);
3154
3155 /* If the target wants a label to begin the table, emit it here. */
3156 targetm.asm_out.emit_except_table_label (asm_out_file);
3157
3158 output_one_function_exception_table (0);
3159 if (crtl->eh.call_site_record_v[1])
3160 output_one_function_exception_table (1);
3161
3162 switch_to_section (current_function_section ());
3163 }
3164
3165 void
3166 set_eh_throw_stmt_table (function *fun, hash_map<gimple, int> *table)
3167 {
3168 fun->eh->throw_stmt_table = table;
3169 }
3170
3171 hash_map<gimple, int> *
3172 get_eh_throw_stmt_table (struct function *fun)
3173 {
3174 return fun->eh->throw_stmt_table;
3175 }
3176 \f
3177 /* Determine if the function needs an EH personality function. */
3178
3179 enum eh_personality_kind
3180 function_needs_eh_personality (struct function *fn)
3181 {
3182 enum eh_personality_kind kind = eh_personality_none;
3183 eh_region i;
3184
3185 FOR_ALL_EH_REGION_FN (i, fn)
3186 {
3187 switch (i->type)
3188 {
3189 case ERT_CLEANUP:
3190 /* Can do with any personality including the generic C one. */
3191 kind = eh_personality_any;
3192 break;
3193
3194 case ERT_TRY:
3195 case ERT_ALLOWED_EXCEPTIONS:
3196 /* Always needs a EH personality function. The generic C
3197 personality doesn't handle these even for empty type lists. */
3198 return eh_personality_lang;
3199
3200 case ERT_MUST_NOT_THROW:
3201 /* Always needs a EH personality function. The language may specify
3202 what abort routine that must be used, e.g. std::terminate. */
3203 return eh_personality_lang;
3204 }
3205 }
3206
3207 return kind;
3208 }
3209 \f
3210 /* Dump EH information to OUT. */
3211
3212 void
3213 dump_eh_tree (FILE * out, struct function *fun)
3214 {
3215 eh_region i;
3216 int depth = 0;
3217 static const char *const type_name[] = {
3218 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3219 };
3220
3221 i = fun->eh->region_tree;
3222 if (!i)
3223 return;
3224
3225 fprintf (out, "Eh tree:\n");
3226 while (1)
3227 {
3228 fprintf (out, " %*s %i %s", depth * 2, "",
3229 i->index, type_name[(int) i->type]);
3230
3231 if (i->landing_pads)
3232 {
3233 eh_landing_pad lp;
3234
3235 fprintf (out, " land:");
3236 if (current_ir_type () == IR_GIMPLE)
3237 {
3238 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3239 {
3240 fprintf (out, "{%i,", lp->index);
3241 print_generic_expr (out, lp->post_landing_pad, 0);
3242 fputc ('}', out);
3243 if (lp->next_lp)
3244 fputc (',', out);
3245 }
3246 }
3247 else
3248 {
3249 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3250 {
3251 fprintf (out, "{%i,", lp->index);
3252 if (lp->landing_pad)
3253 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3254 NOTE_P (lp->landing_pad) ? "(del)" : "");
3255 else
3256 fprintf (out, "(nil),");
3257 if (lp->post_landing_pad)
3258 {
3259 rtx_insn *lab = label_rtx (lp->post_landing_pad);
3260 fprintf (out, "%i%s}", INSN_UID (lab),
3261 NOTE_P (lab) ? "(del)" : "");
3262 }
3263 else
3264 fprintf (out, "(nil)}");
3265 if (lp->next_lp)
3266 fputc (',', out);
3267 }
3268 }
3269 }
3270
3271 switch (i->type)
3272 {
3273 case ERT_CLEANUP:
3274 case ERT_MUST_NOT_THROW:
3275 break;
3276
3277 case ERT_TRY:
3278 {
3279 eh_catch c;
3280 fprintf (out, " catch:");
3281 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3282 {
3283 fputc ('{', out);
3284 if (c->label)
3285 {
3286 fprintf (out, "lab:");
3287 print_generic_expr (out, c->label, 0);
3288 fputc (';', out);
3289 }
3290 print_generic_expr (out, c->type_list, 0);
3291 fputc ('}', out);
3292 if (c->next_catch)
3293 fputc (',', out);
3294 }
3295 }
3296 break;
3297
3298 case ERT_ALLOWED_EXCEPTIONS:
3299 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3300 print_generic_expr (out, i->u.allowed.type_list, 0);
3301 break;
3302 }
3303 fputc ('\n', out);
3304
3305 /* If there are sub-regions, process them. */
3306 if (i->inner)
3307 i = i->inner, depth++;
3308 /* If there are peers, process them. */
3309 else if (i->next_peer)
3310 i = i->next_peer;
3311 /* Otherwise, step back up the tree to the next peer. */
3312 else
3313 {
3314 do
3315 {
3316 i = i->outer;
3317 depth--;
3318 if (i == NULL)
3319 return;
3320 }
3321 while (i->next_peer == NULL);
3322 i = i->next_peer;
3323 }
3324 }
3325 }
3326
3327 /* Dump the EH tree for FN on stderr. */
3328
3329 DEBUG_FUNCTION void
3330 debug_eh_tree (struct function *fn)
3331 {
3332 dump_eh_tree (stderr, fn);
3333 }
3334
3335 /* Verify invariants on EH datastructures. */
3336
3337 DEBUG_FUNCTION void
3338 verify_eh_tree (struct function *fun)
3339 {
3340 eh_region r, outer;
3341 int nvisited_lp, nvisited_r;
3342 int count_lp, count_r, depth, i;
3343 eh_landing_pad lp;
3344 bool err = false;
3345
3346 if (!fun->eh->region_tree)
3347 return;
3348
3349 count_r = 0;
3350 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3351 if (r)
3352 {
3353 if (r->index == i)
3354 count_r++;
3355 else
3356 {
3357 error ("region_array is corrupted for region %i", r->index);
3358 err = true;
3359 }
3360 }
3361
3362 count_lp = 0;
3363 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3364 if (lp)
3365 {
3366 if (lp->index == i)
3367 count_lp++;
3368 else
3369 {
3370 error ("lp_array is corrupted for lp %i", lp->index);
3371 err = true;
3372 }
3373 }
3374
3375 depth = nvisited_lp = nvisited_r = 0;
3376 outer = NULL;
3377 r = fun->eh->region_tree;
3378 while (1)
3379 {
3380 if ((*fun->eh->region_array)[r->index] != r)
3381 {
3382 error ("region_array is corrupted for region %i", r->index);
3383 err = true;
3384 }
3385 if (r->outer != outer)
3386 {
3387 error ("outer block of region %i is wrong", r->index);
3388 err = true;
3389 }
3390 if (depth < 0)
3391 {
3392 error ("negative nesting depth of region %i", r->index);
3393 err = true;
3394 }
3395 nvisited_r++;
3396
3397 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3398 {
3399 if ((*fun->eh->lp_array)[lp->index] != lp)
3400 {
3401 error ("lp_array is corrupted for lp %i", lp->index);
3402 err = true;
3403 }
3404 if (lp->region != r)
3405 {
3406 error ("region of lp %i is wrong", lp->index);
3407 err = true;
3408 }
3409 nvisited_lp++;
3410 }
3411
3412 if (r->inner)
3413 outer = r, r = r->inner, depth++;
3414 else if (r->next_peer)
3415 r = r->next_peer;
3416 else
3417 {
3418 do
3419 {
3420 r = r->outer;
3421 if (r == NULL)
3422 goto region_done;
3423 depth--;
3424 outer = r->outer;
3425 }
3426 while (r->next_peer == NULL);
3427 r = r->next_peer;
3428 }
3429 }
3430 region_done:
3431 if (depth != 0)
3432 {
3433 error ("tree list ends on depth %i", depth);
3434 err = true;
3435 }
3436 if (count_r != nvisited_r)
3437 {
3438 error ("region_array does not match region_tree");
3439 err = true;
3440 }
3441 if (count_lp != nvisited_lp)
3442 {
3443 error ("lp_array does not match region_tree");
3444 err = true;
3445 }
3446
3447 if (err)
3448 {
3449 dump_eh_tree (stderr, fun);
3450 internal_error ("verify_eh_tree failed");
3451 }
3452 }
3453 \f
3454 #include "gt-except.h"