emit-rtl.c, [...]: Replace rtx base types with more derived ones.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37 conversion.
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
76
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
110
111
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "tm.h"
116 #include "rtl.h"
117 #include "hash-set.h"
118 #include "vec.h"
119 #include "input.h"
120 #include "alias.h"
121 #include "symtab.h"
122 #include "inchash.h"
123 #include "tree.h"
124 #include "fold-const.h"
125 #include "stringpool.h"
126 #include "stor-layout.h"
127 #include "flags.h"
128 #include "hard-reg-set.h"
129 #include "function.h"
130 #include "insn-codes.h"
131 #include "optabs.h"
132 #include "hashtab.h"
133 #include "statistics.h"
134 #include "insn-config.h"
135 #include "expmed.h"
136 #include "dojump.h"
137 #include "explow.h"
138 #include "calls.h"
139 #include "emit-rtl.h"
140 #include "varasm.h"
141 #include "stmt.h"
142 #include "expr.h"
143 #include "libfuncs.h"
144 #include "except.h"
145 #include "output.h"
146 #include "dwarf2asm.h"
147 #include "dwarf2out.h"
148 #include "dwarf2.h"
149 #include "toplev.h"
150 #include "hash-table.h"
151 #include "intl.h"
152 #include "tm_p.h"
153 #include "target.h"
154 #include "common/common-target.h"
155 #include "langhooks.h"
156 #include "predict.h"
157 #include "dominance.h"
158 #include "cfg.h"
159 #include "cfgrtl.h"
160 #include "basic-block.h"
161 #include "hash-map.h"
162 #include "is-a.h"
163 #include "plugin-api.h"
164 #include "ipa-ref.h"
165 #include "cgraph.h"
166 #include "diagnostic.h"
167 #include "tree-pretty-print.h"
168 #include "tree-pass.h"
169 #include "cfgloop.h"
170 #include "builtins.h"
171
172 static GTY(()) int call_site_base;
173
174 struct tree_hash_traits : default_hashmap_traits
175 {
176 static hashval_t hash (tree t) { return TREE_HASH (t); }
177 };
178
179 static GTY (()) hash_map<tree, tree, tree_hash_traits> *type_to_runtime_map;
180
181 /* Describe the SjLj_Function_Context structure. */
182 static GTY(()) tree sjlj_fc_type_node;
183 static int sjlj_fc_call_site_ofs;
184 static int sjlj_fc_data_ofs;
185 static int sjlj_fc_personality_ofs;
186 static int sjlj_fc_lsda_ofs;
187 static int sjlj_fc_jbuf_ofs;
188 \f
189
190 struct GTY(()) call_site_record_d
191 {
192 rtx landing_pad;
193 int action;
194 };
195
196 /* In the following structure and associated functions,
197 we represent entries in the action table as 1-based indices.
198 Special cases are:
199
200 0: null action record, non-null landing pad; implies cleanups
201 -1: null action record, null landing pad; implies no action
202 -2: no call-site entry; implies must_not_throw
203 -3: we have yet to process outer regions
204
205 Further, no special cases apply to the "next" field of the record.
206 For next, 0 means end of list. */
207
208 struct action_record
209 {
210 int offset;
211 int filter;
212 int next;
213 };
214
215 /* Hashtable helpers. */
216
217 struct action_record_hasher : typed_free_remove <action_record>
218 {
219 typedef action_record *value_type;
220 typedef action_record *compare_type;
221 static inline hashval_t hash (const action_record *);
222 static inline bool equal (const action_record *, const action_record *);
223 };
224
225 inline hashval_t
226 action_record_hasher::hash (const action_record *entry)
227 {
228 return entry->next * 1009 + entry->filter;
229 }
230
231 inline bool
232 action_record_hasher::equal (const action_record *entry,
233 const action_record *data)
234 {
235 return entry->filter == data->filter && entry->next == data->next;
236 }
237
238 typedef hash_table<action_record_hasher> action_hash_type;
239 \f
240 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
241 eh_landing_pad *);
242
243 static void dw2_build_landing_pads (void);
244
245 static int collect_one_action_chain (action_hash_type *, eh_region);
246 static int add_call_site (rtx, int, int);
247
248 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
249 static void push_sleb128 (vec<uchar, va_gc> **, int);
250 #ifndef HAVE_AS_LEB128
251 static int dw2_size_of_call_site_table (int);
252 static int sjlj_size_of_call_site_table (void);
253 #endif
254 static void dw2_output_call_site_table (int, int);
255 static void sjlj_output_call_site_table (void);
256
257 \f
258 void
259 init_eh (void)
260 {
261 if (! flag_exceptions)
262 return;
263
264 type_to_runtime_map
265 = hash_map<tree, tree, tree_hash_traits>::create_ggc (31);
266
267 /* Create the SjLj_Function_Context structure. This should match
268 the definition in unwind-sjlj.c. */
269 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
270 {
271 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
272
273 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
274
275 f_prev = build_decl (BUILTINS_LOCATION,
276 FIELD_DECL, get_identifier ("__prev"),
277 build_pointer_type (sjlj_fc_type_node));
278 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
279
280 f_cs = build_decl (BUILTINS_LOCATION,
281 FIELD_DECL, get_identifier ("__call_site"),
282 integer_type_node);
283 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
284
285 tmp = build_index_type (size_int (4 - 1));
286 tmp = build_array_type (lang_hooks.types.type_for_mode
287 (targetm.unwind_word_mode (), 1),
288 tmp);
289 f_data = build_decl (BUILTINS_LOCATION,
290 FIELD_DECL, get_identifier ("__data"), tmp);
291 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
292
293 f_per = build_decl (BUILTINS_LOCATION,
294 FIELD_DECL, get_identifier ("__personality"),
295 ptr_type_node);
296 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
297
298 f_lsda = build_decl (BUILTINS_LOCATION,
299 FIELD_DECL, get_identifier ("__lsda"),
300 ptr_type_node);
301 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
302
303 #ifdef DONT_USE_BUILTIN_SETJMP
304 #ifdef JMP_BUF_SIZE
305 tmp = size_int (JMP_BUF_SIZE - 1);
306 #else
307 /* Should be large enough for most systems, if it is not,
308 JMP_BUF_SIZE should be defined with the proper value. It will
309 also tend to be larger than necessary for most systems, a more
310 optimal port will define JMP_BUF_SIZE. */
311 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
312 #endif
313 #else
314 /* Compute a minimally sized jump buffer. We need room to store at
315 least 3 pointers - stack pointer, frame pointer and return address.
316 Plus for some targets we need room for an extra pointer - in the
317 case of MIPS this is the global pointer. This makes a total of four
318 pointers, but to be safe we actually allocate room for 5.
319
320 If pointers are smaller than words then we allocate enough room for
321 5 words, just in case the backend needs this much room. For more
322 discussion on this issue see:
323 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
324 if (POINTER_SIZE > BITS_PER_WORD)
325 tmp = size_int (5 - 1);
326 else
327 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
328 #endif
329
330 tmp = build_index_type (tmp);
331 tmp = build_array_type (ptr_type_node, tmp);
332 f_jbuf = build_decl (BUILTINS_LOCATION,
333 FIELD_DECL, get_identifier ("__jbuf"), tmp);
334 #ifdef DONT_USE_BUILTIN_SETJMP
335 /* We don't know what the alignment requirements of the
336 runtime's jmp_buf has. Overestimate. */
337 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
338 DECL_USER_ALIGN (f_jbuf) = 1;
339 #endif
340 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
341
342 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
343 TREE_CHAIN (f_prev) = f_cs;
344 TREE_CHAIN (f_cs) = f_data;
345 TREE_CHAIN (f_data) = f_per;
346 TREE_CHAIN (f_per) = f_lsda;
347 TREE_CHAIN (f_lsda) = f_jbuf;
348
349 layout_type (sjlj_fc_type_node);
350
351 /* Cache the interesting field offsets so that we have
352 easy access from rtl. */
353 sjlj_fc_call_site_ofs
354 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
355 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
356 sjlj_fc_data_ofs
357 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
358 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
359 sjlj_fc_personality_ofs
360 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
361 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
362 sjlj_fc_lsda_ofs
363 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
364 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
365 sjlj_fc_jbuf_ofs
366 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
367 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
368 }
369 }
370
371 void
372 init_eh_for_function (void)
373 {
374 cfun->eh = ggc_cleared_alloc<eh_status> ();
375
376 /* Make sure zero'th entries are used. */
377 vec_safe_push (cfun->eh->region_array, (eh_region)0);
378 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
379 }
380 \f
381 /* Routines to generate the exception tree somewhat directly.
382 These are used from tree-eh.c when processing exception related
383 nodes during tree optimization. */
384
385 static eh_region
386 gen_eh_region (enum eh_region_type type, eh_region outer)
387 {
388 eh_region new_eh;
389
390 /* Insert a new blank region as a leaf in the tree. */
391 new_eh = ggc_cleared_alloc<eh_region_d> ();
392 new_eh->type = type;
393 new_eh->outer = outer;
394 if (outer)
395 {
396 new_eh->next_peer = outer->inner;
397 outer->inner = new_eh;
398 }
399 else
400 {
401 new_eh->next_peer = cfun->eh->region_tree;
402 cfun->eh->region_tree = new_eh;
403 }
404
405 new_eh->index = vec_safe_length (cfun->eh->region_array);
406 vec_safe_push (cfun->eh->region_array, new_eh);
407
408 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
409 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
410 new_eh->use_cxa_end_cleanup = true;
411
412 return new_eh;
413 }
414
415 eh_region
416 gen_eh_region_cleanup (eh_region outer)
417 {
418 return gen_eh_region (ERT_CLEANUP, outer);
419 }
420
421 eh_region
422 gen_eh_region_try (eh_region outer)
423 {
424 return gen_eh_region (ERT_TRY, outer);
425 }
426
427 eh_catch
428 gen_eh_region_catch (eh_region t, tree type_or_list)
429 {
430 eh_catch c, l;
431 tree type_list, type_node;
432
433 gcc_assert (t->type == ERT_TRY);
434
435 /* Ensure to always end up with a type list to normalize further
436 processing, then register each type against the runtime types map. */
437 type_list = type_or_list;
438 if (type_or_list)
439 {
440 if (TREE_CODE (type_or_list) != TREE_LIST)
441 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
442
443 type_node = type_list;
444 for (; type_node; type_node = TREE_CHAIN (type_node))
445 add_type_for_runtime (TREE_VALUE (type_node));
446 }
447
448 c = ggc_cleared_alloc<eh_catch_d> ();
449 c->type_list = type_list;
450 l = t->u.eh_try.last_catch;
451 c->prev_catch = l;
452 if (l)
453 l->next_catch = c;
454 else
455 t->u.eh_try.first_catch = c;
456 t->u.eh_try.last_catch = c;
457
458 return c;
459 }
460
461 eh_region
462 gen_eh_region_allowed (eh_region outer, tree allowed)
463 {
464 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
465 region->u.allowed.type_list = allowed;
466
467 for (; allowed ; allowed = TREE_CHAIN (allowed))
468 add_type_for_runtime (TREE_VALUE (allowed));
469
470 return region;
471 }
472
473 eh_region
474 gen_eh_region_must_not_throw (eh_region outer)
475 {
476 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
477 }
478
479 eh_landing_pad
480 gen_eh_landing_pad (eh_region region)
481 {
482 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
483
484 lp->next_lp = region->landing_pads;
485 lp->region = region;
486 lp->index = vec_safe_length (cfun->eh->lp_array);
487 region->landing_pads = lp;
488
489 vec_safe_push (cfun->eh->lp_array, lp);
490
491 return lp;
492 }
493
494 eh_region
495 get_eh_region_from_number_fn (struct function *ifun, int i)
496 {
497 return (*ifun->eh->region_array)[i];
498 }
499
500 eh_region
501 get_eh_region_from_number (int i)
502 {
503 return get_eh_region_from_number_fn (cfun, i);
504 }
505
506 eh_landing_pad
507 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
508 {
509 return (*ifun->eh->lp_array)[i];
510 }
511
512 eh_landing_pad
513 get_eh_landing_pad_from_number (int i)
514 {
515 return get_eh_landing_pad_from_number_fn (cfun, i);
516 }
517
518 eh_region
519 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
520 {
521 if (i < 0)
522 return (*ifun->eh->region_array)[-i];
523 else if (i == 0)
524 return NULL;
525 else
526 {
527 eh_landing_pad lp;
528 lp = (*ifun->eh->lp_array)[i];
529 return lp->region;
530 }
531 }
532
533 eh_region
534 get_eh_region_from_lp_number (int i)
535 {
536 return get_eh_region_from_lp_number_fn (cfun, i);
537 }
538 \f
539 /* Returns true if the current function has exception handling regions. */
540
541 bool
542 current_function_has_exception_handlers (void)
543 {
544 return cfun->eh->region_tree != NULL;
545 }
546 \f
547 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
548 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
549
550 struct duplicate_eh_regions_data
551 {
552 duplicate_eh_regions_map label_map;
553 void *label_map_data;
554 hash_map<void *, void *> *eh_map;
555 };
556
557 static void
558 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
559 eh_region old_r, eh_region outer)
560 {
561 eh_landing_pad old_lp, new_lp;
562 eh_region new_r;
563
564 new_r = gen_eh_region (old_r->type, outer);
565 gcc_assert (!data->eh_map->put (old_r, new_r));
566
567 switch (old_r->type)
568 {
569 case ERT_CLEANUP:
570 break;
571
572 case ERT_TRY:
573 {
574 eh_catch oc, nc;
575 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
576 {
577 /* We should be doing all our region duplication before and
578 during inlining, which is before filter lists are created. */
579 gcc_assert (oc->filter_list == NULL);
580 nc = gen_eh_region_catch (new_r, oc->type_list);
581 nc->label = data->label_map (oc->label, data->label_map_data);
582 }
583 }
584 break;
585
586 case ERT_ALLOWED_EXCEPTIONS:
587 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
588 if (old_r->u.allowed.label)
589 new_r->u.allowed.label
590 = data->label_map (old_r->u.allowed.label, data->label_map_data);
591 else
592 new_r->u.allowed.label = NULL_TREE;
593 break;
594
595 case ERT_MUST_NOT_THROW:
596 new_r->u.must_not_throw.failure_loc =
597 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
598 new_r->u.must_not_throw.failure_decl =
599 old_r->u.must_not_throw.failure_decl;
600 break;
601 }
602
603 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
604 {
605 /* Don't bother copying unused landing pads. */
606 if (old_lp->post_landing_pad == NULL)
607 continue;
608
609 new_lp = gen_eh_landing_pad (new_r);
610 gcc_assert (!data->eh_map->put (old_lp, new_lp));
611
612 new_lp->post_landing_pad
613 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
614 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
615 }
616
617 /* Make sure to preserve the original use of __cxa_end_cleanup. */
618 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
619
620 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
621 duplicate_eh_regions_1 (data, old_r, new_r);
622 }
623
624 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
625 the current function and root the tree below OUTER_REGION.
626 The special case of COPY_REGION of NULL means all regions.
627 Remap labels using MAP/MAP_DATA callback. Return a pointer map
628 that allows the caller to remap uses of both EH regions and
629 EH landing pads. */
630
631 hash_map<void *, void *> *
632 duplicate_eh_regions (struct function *ifun,
633 eh_region copy_region, int outer_lp,
634 duplicate_eh_regions_map map, void *map_data)
635 {
636 struct duplicate_eh_regions_data data;
637 eh_region outer_region;
638
639 #ifdef ENABLE_CHECKING
640 verify_eh_tree (ifun);
641 #endif
642
643 data.label_map = map;
644 data.label_map_data = map_data;
645 data.eh_map = new hash_map<void *, void *>;
646
647 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
648
649 /* Copy all the regions in the subtree. */
650 if (copy_region)
651 duplicate_eh_regions_1 (&data, copy_region, outer_region);
652 else
653 {
654 eh_region r;
655 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
656 duplicate_eh_regions_1 (&data, r, outer_region);
657 }
658
659 #ifdef ENABLE_CHECKING
660 verify_eh_tree (cfun);
661 #endif
662
663 return data.eh_map;
664 }
665
666 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
667
668 eh_region
669 eh_region_outermost (struct function *ifun, eh_region region_a,
670 eh_region region_b)
671 {
672 sbitmap b_outer;
673
674 gcc_assert (ifun->eh->region_array);
675 gcc_assert (ifun->eh->region_tree);
676
677 b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
678 bitmap_clear (b_outer);
679
680 do
681 {
682 bitmap_set_bit (b_outer, region_b->index);
683 region_b = region_b->outer;
684 }
685 while (region_b);
686
687 do
688 {
689 if (bitmap_bit_p (b_outer, region_a->index))
690 break;
691 region_a = region_a->outer;
692 }
693 while (region_a);
694
695 sbitmap_free (b_outer);
696 return region_a;
697 }
698 \f
699 void
700 add_type_for_runtime (tree type)
701 {
702 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
703 if (TREE_CODE (type) == NOP_EXPR)
704 return;
705
706 bool existed = false;
707 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
708 if (!existed)
709 *slot = lang_hooks.eh_runtime_type (type);
710 }
711
712 tree
713 lookup_type_for_runtime (tree type)
714 {
715 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
716 if (TREE_CODE (type) == NOP_EXPR)
717 return type;
718
719 /* We should have always inserted the data earlier. */
720 return *type_to_runtime_map->get (type);
721 }
722
723 \f
724 /* Represent an entry in @TTypes for either catch actions
725 or exception filter actions. */
726 struct ttypes_filter {
727 tree t;
728 int filter;
729 };
730
731 /* Helper for ttypes_filter hashing. */
732
733 struct ttypes_filter_hasher : typed_free_remove <ttypes_filter>
734 {
735 typedef ttypes_filter *value_type;
736 typedef tree_node *compare_type;
737 static inline hashval_t hash (const ttypes_filter *);
738 static inline bool equal (const ttypes_filter *, const tree_node *);
739 };
740
741 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
742 (a tree) for a @TTypes type node we are thinking about adding. */
743
744 inline bool
745 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
746 {
747 return entry->t == data;
748 }
749
750 inline hashval_t
751 ttypes_filter_hasher::hash (const ttypes_filter *entry)
752 {
753 return TREE_HASH (entry->t);
754 }
755
756 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
757
758
759 /* Helper for ehspec hashing. */
760
761 struct ehspec_hasher : typed_free_remove <ttypes_filter>
762 {
763 typedef ttypes_filter *value_type;
764 typedef ttypes_filter *compare_type;
765 static inline hashval_t hash (const ttypes_filter *);
766 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
767 };
768
769 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
770 exception specification list we are thinking about adding. */
771 /* ??? Currently we use the type lists in the order given. Someone
772 should put these in some canonical order. */
773
774 inline bool
775 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
776 {
777 return type_list_equal (entry->t, data->t);
778 }
779
780 /* Hash function for exception specification lists. */
781
782 inline hashval_t
783 ehspec_hasher::hash (const ttypes_filter *entry)
784 {
785 hashval_t h = 0;
786 tree list;
787
788 for (list = entry->t; list ; list = TREE_CHAIN (list))
789 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
790 return h;
791 }
792
793 typedef hash_table<ehspec_hasher> ehspec_hash_type;
794
795
796 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
797 to speed up the search. Return the filter value to be used. */
798
799 static int
800 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
801 {
802 struct ttypes_filter **slot, *n;
803
804 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
805 INSERT);
806
807 if ((n = *slot) == NULL)
808 {
809 /* Filter value is a 1 based table index. */
810
811 n = XNEW (struct ttypes_filter);
812 n->t = type;
813 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
814 *slot = n;
815
816 vec_safe_push (cfun->eh->ttype_data, type);
817 }
818
819 return n->filter;
820 }
821
822 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
823 to speed up the search. Return the filter value to be used. */
824
825 static int
826 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
827 tree list)
828 {
829 struct ttypes_filter **slot, *n;
830 struct ttypes_filter dummy;
831
832 dummy.t = list;
833 slot = ehspec_hash->find_slot (&dummy, INSERT);
834
835 if ((n = *slot) == NULL)
836 {
837 int len;
838
839 if (targetm.arm_eabi_unwinder)
840 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
841 else
842 len = vec_safe_length (cfun->eh->ehspec_data.other);
843
844 /* Filter value is a -1 based byte index into a uleb128 buffer. */
845
846 n = XNEW (struct ttypes_filter);
847 n->t = list;
848 n->filter = -(len + 1);
849 *slot = n;
850
851 /* Generate a 0 terminated list of filter values. */
852 for (; list ; list = TREE_CHAIN (list))
853 {
854 if (targetm.arm_eabi_unwinder)
855 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
856 else
857 {
858 /* Look up each type in the list and encode its filter
859 value as a uleb128. */
860 push_uleb128 (&cfun->eh->ehspec_data.other,
861 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
862 }
863 }
864 if (targetm.arm_eabi_unwinder)
865 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
866 else
867 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
868 }
869
870 return n->filter;
871 }
872
873 /* Generate the action filter values to be used for CATCH and
874 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
875 we use lots of landing pads, and so every type or list can share
876 the same filter value, which saves table space. */
877
878 void
879 assign_filter_values (void)
880 {
881 int i;
882 eh_region r;
883 eh_catch c;
884
885 vec_alloc (cfun->eh->ttype_data, 16);
886 if (targetm.arm_eabi_unwinder)
887 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
888 else
889 vec_alloc (cfun->eh->ehspec_data.other, 64);
890
891 ehspec_hash_type ehspec (31);
892 ttypes_hash_type ttypes (31);
893
894 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
895 {
896 if (r == NULL)
897 continue;
898
899 switch (r->type)
900 {
901 case ERT_TRY:
902 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
903 {
904 /* Whatever type_list is (NULL or true list), we build a list
905 of filters for the region. */
906 c->filter_list = NULL_TREE;
907
908 if (c->type_list != NULL)
909 {
910 /* Get a filter value for each of the types caught and store
911 them in the region's dedicated list. */
912 tree tp_node = c->type_list;
913
914 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
915 {
916 int flt
917 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
918 tree flt_node = build_int_cst (integer_type_node, flt);
919
920 c->filter_list
921 = tree_cons (NULL_TREE, flt_node, c->filter_list);
922 }
923 }
924 else
925 {
926 /* Get a filter value for the NULL list also since it
927 will need an action record anyway. */
928 int flt = add_ttypes_entry (&ttypes, NULL);
929 tree flt_node = build_int_cst (integer_type_node, flt);
930
931 c->filter_list
932 = tree_cons (NULL_TREE, flt_node, NULL);
933 }
934 }
935 break;
936
937 case ERT_ALLOWED_EXCEPTIONS:
938 r->u.allowed.filter
939 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
940 break;
941
942 default:
943 break;
944 }
945 }
946 }
947
948 /* Emit SEQ into basic block just before INSN (that is assumed to be
949 first instruction of some existing BB and return the newly
950 produced block. */
951 static basic_block
952 emit_to_new_bb_before (rtx_insn *seq, rtx insn)
953 {
954 rtx_insn *last;
955 basic_block bb;
956 edge e;
957 edge_iterator ei;
958
959 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
960 call), we don't want it to go into newly created landing pad or other EH
961 construct. */
962 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
963 if (e->flags & EDGE_FALLTHRU)
964 force_nonfallthru (e);
965 else
966 ei_next (&ei);
967 last = emit_insn_before (seq, insn);
968 if (BARRIER_P (last))
969 last = PREV_INSN (last);
970 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
971 update_bb_for_insn (bb);
972 bb->flags |= BB_SUPERBLOCK;
973 return bb;
974 }
975 \f
976 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
977 at the rtl level. Emit the code required by the target at a landing
978 pad for the given region. */
979
980 void
981 expand_dw2_landing_pad_for_region (eh_region region)
982 {
983 #ifdef HAVE_exception_receiver
984 if (HAVE_exception_receiver)
985 emit_insn (gen_exception_receiver ());
986 else
987 #endif
988 #ifdef HAVE_nonlocal_goto_receiver
989 if (HAVE_nonlocal_goto_receiver)
990 emit_insn (gen_nonlocal_goto_receiver ());
991 else
992 #endif
993 { /* Nothing */ }
994
995 if (region->exc_ptr_reg)
996 emit_move_insn (region->exc_ptr_reg,
997 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
998 if (region->filter_reg)
999 emit_move_insn (region->filter_reg,
1000 gen_rtx_REG (targetm.eh_return_filter_mode (),
1001 EH_RETURN_DATA_REGNO (1)));
1002 }
1003
1004 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
1005
1006 static void
1007 dw2_build_landing_pads (void)
1008 {
1009 int i;
1010 eh_landing_pad lp;
1011 int e_flags = EDGE_FALLTHRU;
1012
1013 /* If we're going to partition blocks, we need to be able to add
1014 new landing pads later, which means that we need to hold on to
1015 the post-landing-pad block. Prevent it from being merged away.
1016 We'll remove this bit after partitioning. */
1017 if (flag_reorder_blocks_and_partition)
1018 e_flags |= EDGE_PRESERVE;
1019
1020 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1021 {
1022 basic_block bb;
1023 rtx_insn *seq;
1024 edge e;
1025
1026 if (lp == NULL || lp->post_landing_pad == NULL)
1027 continue;
1028
1029 start_sequence ();
1030
1031 lp->landing_pad = gen_label_rtx ();
1032 emit_label (lp->landing_pad);
1033 LABEL_PRESERVE_P (lp->landing_pad) = 1;
1034
1035 expand_dw2_landing_pad_for_region (lp->region);
1036
1037 seq = get_insns ();
1038 end_sequence ();
1039
1040 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1041 e = make_edge (bb, bb->next_bb, e_flags);
1042 e->count = bb->count;
1043 e->probability = REG_BR_PROB_BASE;
1044 if (current_loops)
1045 {
1046 struct loop *loop = bb->next_bb->loop_father;
1047 /* If we created a pre-header block, add the new block to the
1048 outer loop, otherwise to the loop itself. */
1049 if (bb->next_bb == loop->header)
1050 add_bb_to_loop (bb, loop_outer (loop));
1051 else
1052 add_bb_to_loop (bb, loop);
1053 }
1054 }
1055 }
1056
1057 \f
1058 static vec<int> sjlj_lp_call_site_index;
1059
1060 /* Process all active landing pads. Assign each one a compact dispatch
1061 index, and a call-site index. */
1062
1063 static int
1064 sjlj_assign_call_site_values (void)
1065 {
1066 action_hash_type ar_hash (31);
1067 int i, disp_index;
1068 eh_landing_pad lp;
1069
1070 vec_alloc (crtl->eh.action_record_data, 64);
1071
1072 disp_index = 0;
1073 call_site_base = 1;
1074 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1075 if (lp && lp->post_landing_pad)
1076 {
1077 int action, call_site;
1078
1079 /* First: build the action table. */
1080 action = collect_one_action_chain (&ar_hash, lp->region);
1081
1082 /* Next: assign call-site values. If dwarf2 terms, this would be
1083 the region number assigned by convert_to_eh_region_ranges, but
1084 handles no-action and must-not-throw differently. */
1085 /* Map must-not-throw to otherwise unused call-site index 0. */
1086 if (action == -2)
1087 call_site = 0;
1088 /* Map no-action to otherwise unused call-site index -1. */
1089 else if (action == -1)
1090 call_site = -1;
1091 /* Otherwise, look it up in the table. */
1092 else
1093 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1094 sjlj_lp_call_site_index[i] = call_site;
1095
1096 disp_index++;
1097 }
1098
1099 return disp_index;
1100 }
1101
1102 /* Emit code to record the current call-site index before every
1103 insn that can throw. */
1104
1105 static void
1106 sjlj_mark_call_sites (void)
1107 {
1108 int last_call_site = -2;
1109 rtx_insn *insn;
1110 rtx mem;
1111
1112 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1113 {
1114 eh_landing_pad lp;
1115 eh_region r;
1116 bool nothrow;
1117 int this_call_site;
1118 rtx_insn *before, *p;
1119
1120 /* Reset value tracking at extended basic block boundaries. */
1121 if (LABEL_P (insn))
1122 last_call_site = -2;
1123
1124 /* If the function allocates dynamic stack space, the context must
1125 be updated after every allocation/deallocation accordingly. */
1126 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1127 {
1128 rtx buf_addr;
1129
1130 start_sequence ();
1131 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1132 sjlj_fc_jbuf_ofs);
1133 expand_builtin_update_setjmp_buf (buf_addr);
1134 p = get_insns ();
1135 end_sequence ();
1136 emit_insn_before (p, insn);
1137 }
1138
1139 if (! INSN_P (insn))
1140 continue;
1141
1142 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1143 if (nothrow)
1144 continue;
1145 if (lp)
1146 this_call_site = sjlj_lp_call_site_index[lp->index];
1147 else if (r == NULL)
1148 {
1149 /* Calls (and trapping insns) without notes are outside any
1150 exception handling region in this function. Mark them as
1151 no action. */
1152 this_call_site = -1;
1153 }
1154 else
1155 {
1156 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1157 this_call_site = 0;
1158 }
1159
1160 if (this_call_site != -1)
1161 crtl->uses_eh_lsda = 1;
1162
1163 if (this_call_site == last_call_site)
1164 continue;
1165
1166 /* Don't separate a call from it's argument loads. */
1167 before = insn;
1168 if (CALL_P (insn))
1169 before = find_first_parameter_load (insn, NULL);
1170
1171 start_sequence ();
1172 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1173 sjlj_fc_call_site_ofs);
1174 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1175 p = get_insns ();
1176 end_sequence ();
1177
1178 emit_insn_before (p, before);
1179 last_call_site = this_call_site;
1180 }
1181 }
1182
1183 /* Construct the SjLj_Function_Context. */
1184
1185 static void
1186 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1187 {
1188 rtx_insn *fn_begin, *seq;
1189 rtx fc, mem;
1190 bool fn_begin_outside_block;
1191 rtx personality = get_personality_function (current_function_decl);
1192
1193 fc = crtl->eh.sjlj_fc;
1194
1195 start_sequence ();
1196
1197 /* We're storing this libcall's address into memory instead of
1198 calling it directly. Thus, we must call assemble_external_libcall
1199 here, as we can not depend on emit_library_call to do it for us. */
1200 assemble_external_libcall (personality);
1201 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1202 emit_move_insn (mem, personality);
1203
1204 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1205 if (crtl->uses_eh_lsda)
1206 {
1207 char buf[20];
1208 rtx sym;
1209
1210 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1211 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1212 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1213 emit_move_insn (mem, sym);
1214 }
1215 else
1216 emit_move_insn (mem, const0_rtx);
1217
1218 if (dispatch_label)
1219 {
1220 #ifdef DONT_USE_BUILTIN_SETJMP
1221 rtx x;
1222 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1223 TYPE_MODE (integer_type_node), 1,
1224 plus_constant (Pmode, XEXP (fc, 0),
1225 sjlj_fc_jbuf_ofs), Pmode);
1226
1227 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1228 TYPE_MODE (integer_type_node), 0,
1229 dispatch_label, REG_BR_PROB_BASE / 100);
1230 #else
1231 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1232 sjlj_fc_jbuf_ofs),
1233 dispatch_label);
1234 #endif
1235 }
1236
1237 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1238 1, XEXP (fc, 0), Pmode);
1239
1240 seq = get_insns ();
1241 end_sequence ();
1242
1243 /* ??? Instead of doing this at the beginning of the function,
1244 do this in a block that is at loop level 0 and dominates all
1245 can_throw_internal instructions. */
1246
1247 fn_begin_outside_block = true;
1248 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1249 if (NOTE_P (fn_begin))
1250 {
1251 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1252 break;
1253 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1254 fn_begin_outside_block = false;
1255 }
1256
1257 if (fn_begin_outside_block)
1258 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1259 else
1260 emit_insn_after (seq, fn_begin);
1261 }
1262
1263 /* Call back from expand_function_end to know where we should put
1264 the call to unwind_sjlj_unregister_libfunc if needed. */
1265
1266 void
1267 sjlj_emit_function_exit_after (rtx_insn *after)
1268 {
1269 crtl->eh.sjlj_exit_after = after;
1270 }
1271
1272 static void
1273 sjlj_emit_function_exit (void)
1274 {
1275 rtx_insn *seq, *insn;
1276
1277 start_sequence ();
1278
1279 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1280 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1281
1282 seq = get_insns ();
1283 end_sequence ();
1284
1285 /* ??? Really this can be done in any block at loop level 0 that
1286 post-dominates all can_throw_internal instructions. This is
1287 the last possible moment. */
1288
1289 insn = crtl->eh.sjlj_exit_after;
1290 if (LABEL_P (insn))
1291 insn = NEXT_INSN (insn);
1292
1293 emit_insn_after (seq, insn);
1294 }
1295
1296 static void
1297 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1298 {
1299 machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1300 machine_mode filter_mode = targetm.eh_return_filter_mode ();
1301 eh_landing_pad lp;
1302 rtx mem, fc, exc_ptr_reg, filter_reg;
1303 rtx_insn *seq;
1304 basic_block bb;
1305 eh_region r;
1306 edge e;
1307 int i, disp_index;
1308 vec<tree> dispatch_labels = vNULL;
1309
1310 fc = crtl->eh.sjlj_fc;
1311
1312 start_sequence ();
1313
1314 emit_label (dispatch_label);
1315
1316 #ifndef DONT_USE_BUILTIN_SETJMP
1317 expand_builtin_setjmp_receiver (dispatch_label);
1318
1319 /* The caller of expand_builtin_setjmp_receiver is responsible for
1320 making sure that the label doesn't vanish. The only other caller
1321 is the expander for __builtin_setjmp_receiver, which places this
1322 label on the nonlocal_goto_label list. Since we're modeling these
1323 CFG edges more exactly, we can use the forced_labels list instead. */
1324 LABEL_PRESERVE_P (dispatch_label) = 1;
1325 forced_labels
1326 = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
1327 #endif
1328
1329 /* Load up exc_ptr and filter values from the function context. */
1330 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1331 if (unwind_word_mode != ptr_mode)
1332 {
1333 #ifdef POINTERS_EXTEND_UNSIGNED
1334 mem = convert_memory_address (ptr_mode, mem);
1335 #else
1336 mem = convert_to_mode (ptr_mode, mem, 0);
1337 #endif
1338 }
1339 exc_ptr_reg = force_reg (ptr_mode, mem);
1340
1341 mem = adjust_address (fc, unwind_word_mode,
1342 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1343 if (unwind_word_mode != filter_mode)
1344 mem = convert_to_mode (filter_mode, mem, 0);
1345 filter_reg = force_reg (filter_mode, mem);
1346
1347 /* Jump to one of the directly reachable regions. */
1348
1349 disp_index = 0;
1350 rtx_code_label *first_reachable_label = NULL;
1351
1352 /* If there's exactly one call site in the function, don't bother
1353 generating a switch statement. */
1354 if (num_dispatch > 1)
1355 dispatch_labels.create (num_dispatch);
1356
1357 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1358 if (lp && lp->post_landing_pad)
1359 {
1360 rtx_insn *seq2;
1361 rtx_code_label *label;
1362
1363 start_sequence ();
1364
1365 lp->landing_pad = dispatch_label;
1366
1367 if (num_dispatch > 1)
1368 {
1369 tree t_label, case_elt, t;
1370
1371 t_label = create_artificial_label (UNKNOWN_LOCATION);
1372 t = build_int_cst (integer_type_node, disp_index);
1373 case_elt = build_case_label (t, NULL, t_label);
1374 dispatch_labels.quick_push (case_elt);
1375 label = jump_target_rtx (t_label);
1376 }
1377 else
1378 label = gen_label_rtx ();
1379
1380 if (disp_index == 0)
1381 first_reachable_label = label;
1382 emit_label (label);
1383
1384 r = lp->region;
1385 if (r->exc_ptr_reg)
1386 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1387 if (r->filter_reg)
1388 emit_move_insn (r->filter_reg, filter_reg);
1389
1390 seq2 = get_insns ();
1391 end_sequence ();
1392
1393 rtx_insn *before = label_rtx (lp->post_landing_pad);
1394 bb = emit_to_new_bb_before (seq2, before);
1395 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1396 e->count = bb->count;
1397 e->probability = REG_BR_PROB_BASE;
1398 if (current_loops)
1399 {
1400 struct loop *loop = bb->next_bb->loop_father;
1401 /* If we created a pre-header block, add the new block to the
1402 outer loop, otherwise to the loop itself. */
1403 if (bb->next_bb == loop->header)
1404 add_bb_to_loop (bb, loop_outer (loop));
1405 else
1406 add_bb_to_loop (bb, loop);
1407 /* ??? For multiple dispatches we will end up with edges
1408 from the loop tree root into this loop, making it a
1409 multiple-entry loop. Discard all affected loops. */
1410 if (num_dispatch > 1)
1411 {
1412 for (loop = bb->loop_father;
1413 loop_outer (loop); loop = loop_outer (loop))
1414 mark_loop_for_removal (loop);
1415 }
1416 }
1417
1418 disp_index++;
1419 }
1420 gcc_assert (disp_index == num_dispatch);
1421
1422 if (num_dispatch > 1)
1423 {
1424 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1425 sjlj_fc_call_site_ofs);
1426 expand_sjlj_dispatch_table (disp, dispatch_labels);
1427 }
1428
1429 seq = get_insns ();
1430 end_sequence ();
1431
1432 bb = emit_to_new_bb_before (seq, first_reachable_label);
1433 if (num_dispatch == 1)
1434 {
1435 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1436 e->count = bb->count;
1437 e->probability = REG_BR_PROB_BASE;
1438 if (current_loops)
1439 {
1440 struct loop *loop = bb->next_bb->loop_father;
1441 /* If we created a pre-header block, add the new block to the
1442 outer loop, otherwise to the loop itself. */
1443 if (bb->next_bb == loop->header)
1444 add_bb_to_loop (bb, loop_outer (loop));
1445 else
1446 add_bb_to_loop (bb, loop);
1447 }
1448 }
1449 else
1450 {
1451 /* We are not wiring up edges here, but as the dispatcher call
1452 is at function begin simply associate the block with the
1453 outermost (non-)loop. */
1454 if (current_loops)
1455 add_bb_to_loop (bb, current_loops->tree_root);
1456 }
1457 }
1458
1459 static void
1460 sjlj_build_landing_pads (void)
1461 {
1462 int num_dispatch;
1463
1464 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1465 if (num_dispatch == 0)
1466 return;
1467 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1468
1469 num_dispatch = sjlj_assign_call_site_values ();
1470 if (num_dispatch > 0)
1471 {
1472 rtx_code_label *dispatch_label = gen_label_rtx ();
1473 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1474 TYPE_MODE (sjlj_fc_type_node),
1475 TYPE_ALIGN (sjlj_fc_type_node));
1476 crtl->eh.sjlj_fc
1477 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1478 int_size_in_bytes (sjlj_fc_type_node),
1479 align);
1480
1481 sjlj_mark_call_sites ();
1482 sjlj_emit_function_enter (dispatch_label);
1483 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1484 sjlj_emit_function_exit ();
1485 }
1486
1487 /* If we do not have any landing pads, we may still need to register a
1488 personality routine and (empty) LSDA to handle must-not-throw regions. */
1489 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1490 {
1491 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1492 TYPE_MODE (sjlj_fc_type_node),
1493 TYPE_ALIGN (sjlj_fc_type_node));
1494 crtl->eh.sjlj_fc
1495 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1496 int_size_in_bytes (sjlj_fc_type_node),
1497 align);
1498
1499 sjlj_mark_call_sites ();
1500 sjlj_emit_function_enter (NULL);
1501 sjlj_emit_function_exit ();
1502 }
1503
1504 sjlj_lp_call_site_index.release ();
1505 }
1506
1507 /* Update the sjlj function context. This function should be called
1508 whenever we allocate or deallocate dynamic stack space. */
1509
1510 void
1511 update_sjlj_context (void)
1512 {
1513 if (!flag_exceptions)
1514 return;
1515
1516 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1517 }
1518
1519 /* After initial rtl generation, call back to finish generating
1520 exception support code. */
1521
1522 void
1523 finish_eh_generation (void)
1524 {
1525 basic_block bb;
1526
1527 /* Construct the landing pads. */
1528 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1529 sjlj_build_landing_pads ();
1530 else
1531 dw2_build_landing_pads ();
1532 break_superblocks ();
1533
1534 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1535 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1536 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1537 commit_edge_insertions ();
1538
1539 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1540 FOR_EACH_BB_FN (bb, cfun)
1541 {
1542 eh_landing_pad lp;
1543 edge_iterator ei;
1544 edge e;
1545
1546 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1547
1548 FOR_EACH_EDGE (e, ei, bb->succs)
1549 if (e->flags & EDGE_EH)
1550 break;
1551
1552 /* We should not have generated any new throwing insns during this
1553 pass, and we should not have lost any EH edges, so we only need
1554 to handle two cases here:
1555 (1) reachable handler and an existing edge to post-landing-pad,
1556 (2) no reachable handler and no edge. */
1557 gcc_assert ((lp != NULL) == (e != NULL));
1558 if (lp != NULL)
1559 {
1560 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1561
1562 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1563 e->flags |= (CALL_P (BB_END (bb))
1564 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1565 : EDGE_ABNORMAL);
1566 }
1567 }
1568 }
1569 \f
1570 /* This section handles removing dead code for flow. */
1571
1572 void
1573 remove_eh_landing_pad (eh_landing_pad lp)
1574 {
1575 eh_landing_pad *pp;
1576
1577 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1578 continue;
1579 *pp = lp->next_lp;
1580
1581 if (lp->post_landing_pad)
1582 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1583 (*cfun->eh->lp_array)[lp->index] = NULL;
1584 }
1585
1586 /* Splice the EH region at PP from the region tree. */
1587
1588 static void
1589 remove_eh_handler_splicer (eh_region *pp)
1590 {
1591 eh_region region = *pp;
1592 eh_landing_pad lp;
1593
1594 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1595 {
1596 if (lp->post_landing_pad)
1597 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1598 (*cfun->eh->lp_array)[lp->index] = NULL;
1599 }
1600
1601 if (region->inner)
1602 {
1603 eh_region p, outer;
1604 outer = region->outer;
1605
1606 *pp = p = region->inner;
1607 do
1608 {
1609 p->outer = outer;
1610 pp = &p->next_peer;
1611 p = *pp;
1612 }
1613 while (p);
1614 }
1615 *pp = region->next_peer;
1616
1617 (*cfun->eh->region_array)[region->index] = NULL;
1618 }
1619
1620 /* Splice a single EH region REGION from the region tree.
1621
1622 To unlink REGION, we need to find the pointer to it with a relatively
1623 expensive search in REGION's outer region. If you are going to
1624 remove a number of handlers, using remove_unreachable_eh_regions may
1625 be a better option. */
1626
1627 void
1628 remove_eh_handler (eh_region region)
1629 {
1630 eh_region *pp, *pp_start, p, outer;
1631
1632 outer = region->outer;
1633 if (outer)
1634 pp_start = &outer->inner;
1635 else
1636 pp_start = &cfun->eh->region_tree;
1637 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1638 continue;
1639
1640 remove_eh_handler_splicer (pp);
1641 }
1642
1643 /* Worker for remove_unreachable_eh_regions.
1644 PP is a pointer to the region to start a region tree depth-first
1645 search from. R_REACHABLE is the set of regions that have to be
1646 preserved. */
1647
1648 static void
1649 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1650 {
1651 while (*pp)
1652 {
1653 eh_region region = *pp;
1654 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1655 if (!bitmap_bit_p (r_reachable, region->index))
1656 remove_eh_handler_splicer (pp);
1657 else
1658 pp = &region->next_peer;
1659 }
1660 }
1661
1662 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1663 Do this by traversing the EH tree top-down and splice out regions that
1664 are not marked. By removing regions from the leaves, we avoid costly
1665 searches in the region tree. */
1666
1667 void
1668 remove_unreachable_eh_regions (sbitmap r_reachable)
1669 {
1670 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1671 }
1672
1673 /* Invokes CALLBACK for every exception handler landing pad label.
1674 Only used by reload hackery; should not be used by new code. */
1675
1676 void
1677 for_each_eh_label (void (*callback) (rtx))
1678 {
1679 eh_landing_pad lp;
1680 int i;
1681
1682 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1683 {
1684 if (lp)
1685 {
1686 rtx_code_label *lab = lp->landing_pad;
1687 if (lab && LABEL_P (lab))
1688 (*callback) (lab);
1689 }
1690 }
1691 }
1692 \f
1693 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1694 call insn.
1695
1696 At the gimple level, we use LP_NR
1697 > 0 : The statement transfers to landing pad LP_NR
1698 = 0 : The statement is outside any EH region
1699 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1700
1701 At the rtl level, we use LP_NR
1702 > 0 : The insn transfers to landing pad LP_NR
1703 = 0 : The insn cannot throw
1704 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1705 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1706 missing note: The insn is outside any EH region.
1707
1708 ??? This difference probably ought to be avoided. We could stand
1709 to record nothrow for arbitrary gimple statements, and so avoid
1710 some moderately complex lookups in stmt_could_throw_p. Perhaps
1711 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1712 no-nonlocal-goto property should be recorded elsewhere as a bit
1713 on the call_insn directly. Perhaps we should make more use of
1714 attaching the trees to call_insns (reachable via symbol_ref in
1715 direct call cases) and just pull the data out of the trees. */
1716
1717 void
1718 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1719 {
1720 rtx value;
1721 if (ecf_flags & ECF_NOTHROW)
1722 value = const0_rtx;
1723 else if (lp_nr != 0)
1724 value = GEN_INT (lp_nr);
1725 else
1726 return;
1727 add_reg_note (insn, REG_EH_REGION, value);
1728 }
1729
1730 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1731 nor perform a non-local goto. Replace the region note if it
1732 already exists. */
1733
1734 void
1735 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1736 {
1737 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1738 rtx intmin = GEN_INT (INT_MIN);
1739
1740 if (note != 0)
1741 XEXP (note, 0) = intmin;
1742 else
1743 add_reg_note (insn, REG_EH_REGION, intmin);
1744 }
1745
1746 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1747 to the contrary. */
1748
1749 bool
1750 insn_could_throw_p (const_rtx insn)
1751 {
1752 if (!flag_exceptions)
1753 return false;
1754 if (CALL_P (insn))
1755 return true;
1756 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1757 return may_trap_p (PATTERN (insn));
1758 return false;
1759 }
1760
1761 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1762 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1763 to look for a note, or the note itself. */
1764
1765 void
1766 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1767 {
1768 rtx_insn *insn;
1769 rtx note = note_or_insn;
1770
1771 if (INSN_P (note_or_insn))
1772 {
1773 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1774 if (note == NULL)
1775 return;
1776 }
1777 note = XEXP (note, 0);
1778
1779 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1780 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1781 && insn_could_throw_p (insn))
1782 add_reg_note (insn, REG_EH_REGION, note);
1783 }
1784
1785 /* Likewise, but iterate backward. */
1786
1787 void
1788 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1789 {
1790 rtx_insn *insn;
1791 rtx note = note_or_insn;
1792
1793 if (INSN_P (note_or_insn))
1794 {
1795 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1796 if (note == NULL)
1797 return;
1798 }
1799 note = XEXP (note, 0);
1800
1801 for (insn = last; insn != first; insn = PREV_INSN (insn))
1802 if (insn_could_throw_p (insn))
1803 add_reg_note (insn, REG_EH_REGION, note);
1804 }
1805
1806
1807 /* Extract all EH information from INSN. Return true if the insn
1808 was marked NOTHROW. */
1809
1810 static bool
1811 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1812 eh_landing_pad *plp)
1813 {
1814 eh_landing_pad lp = NULL;
1815 eh_region r = NULL;
1816 bool ret = false;
1817 rtx note;
1818 int lp_nr;
1819
1820 if (! INSN_P (insn))
1821 goto egress;
1822
1823 if (NONJUMP_INSN_P (insn)
1824 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1825 insn = XVECEXP (PATTERN (insn), 0, 0);
1826
1827 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1828 if (!note)
1829 {
1830 ret = !insn_could_throw_p (insn);
1831 goto egress;
1832 }
1833
1834 lp_nr = INTVAL (XEXP (note, 0));
1835 if (lp_nr == 0 || lp_nr == INT_MIN)
1836 {
1837 ret = true;
1838 goto egress;
1839 }
1840
1841 if (lp_nr < 0)
1842 r = (*cfun->eh->region_array)[-lp_nr];
1843 else
1844 {
1845 lp = (*cfun->eh->lp_array)[lp_nr];
1846 r = lp->region;
1847 }
1848
1849 egress:
1850 *plp = lp;
1851 *pr = r;
1852 return ret;
1853 }
1854
1855 /* Return the landing pad to which INSN may go, or NULL if it does not
1856 have a reachable landing pad within this function. */
1857
1858 eh_landing_pad
1859 get_eh_landing_pad_from_rtx (const_rtx insn)
1860 {
1861 eh_landing_pad lp;
1862 eh_region r;
1863
1864 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1865 return lp;
1866 }
1867
1868 /* Return the region to which INSN may go, or NULL if it does not
1869 have a reachable region within this function. */
1870
1871 eh_region
1872 get_eh_region_from_rtx (const_rtx insn)
1873 {
1874 eh_landing_pad lp;
1875 eh_region r;
1876
1877 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1878 return r;
1879 }
1880
1881 /* Return true if INSN throws and is caught by something in this function. */
1882
1883 bool
1884 can_throw_internal (const_rtx insn)
1885 {
1886 return get_eh_landing_pad_from_rtx (insn) != NULL;
1887 }
1888
1889 /* Return true if INSN throws and escapes from the current function. */
1890
1891 bool
1892 can_throw_external (const_rtx insn)
1893 {
1894 eh_landing_pad lp;
1895 eh_region r;
1896 bool nothrow;
1897
1898 if (! INSN_P (insn))
1899 return false;
1900
1901 if (NONJUMP_INSN_P (insn)
1902 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1903 {
1904 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1905 int i, n = seq->len ();
1906
1907 for (i = 0; i < n; i++)
1908 if (can_throw_external (seq->element (i)))
1909 return true;
1910
1911 return false;
1912 }
1913
1914 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1915
1916 /* If we can't throw, we obviously can't throw external. */
1917 if (nothrow)
1918 return false;
1919
1920 /* If we have an internal landing pad, then we're not external. */
1921 if (lp != NULL)
1922 return false;
1923
1924 /* If we're not within an EH region, then we are external. */
1925 if (r == NULL)
1926 return true;
1927
1928 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1929 which don't always have landing pads. */
1930 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1931 return false;
1932 }
1933
1934 /* Return true if INSN cannot throw at all. */
1935
1936 bool
1937 insn_nothrow_p (const_rtx insn)
1938 {
1939 eh_landing_pad lp;
1940 eh_region r;
1941
1942 if (! INSN_P (insn))
1943 return true;
1944
1945 if (NONJUMP_INSN_P (insn)
1946 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1947 {
1948 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1949 int i, n = seq->len ();
1950
1951 for (i = 0; i < n; i++)
1952 if (!insn_nothrow_p (seq->element (i)))
1953 return false;
1954
1955 return true;
1956 }
1957
1958 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1959 }
1960
1961 /* Return true if INSN can perform a non-local goto. */
1962 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1963
1964 bool
1965 can_nonlocal_goto (const rtx_insn *insn)
1966 {
1967 if (nonlocal_goto_handler_labels && CALL_P (insn))
1968 {
1969 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1970 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1971 return true;
1972 }
1973 return false;
1974 }
1975 \f
1976 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1977
1978 static unsigned int
1979 set_nothrow_function_flags (void)
1980 {
1981 rtx_insn *insn;
1982
1983 crtl->nothrow = 1;
1984
1985 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1986 something that can throw an exception. We specifically exempt
1987 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1988 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1989 is optimistic. */
1990
1991 crtl->all_throwers_are_sibcalls = 1;
1992
1993 /* If we don't know that this implementation of the function will
1994 actually be used, then we must not set TREE_NOTHROW, since
1995 callers must not assume that this function does not throw. */
1996 if (TREE_NOTHROW (current_function_decl))
1997 return 0;
1998
1999 if (! flag_exceptions)
2000 return 0;
2001
2002 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2003 if (can_throw_external (insn))
2004 {
2005 crtl->nothrow = 0;
2006
2007 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2008 {
2009 crtl->all_throwers_are_sibcalls = 0;
2010 return 0;
2011 }
2012 }
2013
2014 if (crtl->nothrow
2015 && (cgraph_node::get (current_function_decl)->get_availability ()
2016 >= AVAIL_AVAILABLE))
2017 {
2018 struct cgraph_node *node = cgraph_node::get (current_function_decl);
2019 struct cgraph_edge *e;
2020 for (e = node->callers; e; e = e->next_caller)
2021 e->can_throw_external = false;
2022 node->set_nothrow_flag (true);
2023
2024 if (dump_file)
2025 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2026 current_function_name ());
2027 }
2028 return 0;
2029 }
2030
2031 namespace {
2032
2033 const pass_data pass_data_set_nothrow_function_flags =
2034 {
2035 RTL_PASS, /* type */
2036 "nothrow", /* name */
2037 OPTGROUP_NONE, /* optinfo_flags */
2038 TV_NONE, /* tv_id */
2039 0, /* properties_required */
2040 0, /* properties_provided */
2041 0, /* properties_destroyed */
2042 0, /* todo_flags_start */
2043 0, /* todo_flags_finish */
2044 };
2045
2046 class pass_set_nothrow_function_flags : public rtl_opt_pass
2047 {
2048 public:
2049 pass_set_nothrow_function_flags (gcc::context *ctxt)
2050 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2051 {}
2052
2053 /* opt_pass methods: */
2054 virtual unsigned int execute (function *)
2055 {
2056 return set_nothrow_function_flags ();
2057 }
2058
2059 }; // class pass_set_nothrow_function_flags
2060
2061 } // anon namespace
2062
2063 rtl_opt_pass *
2064 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2065 {
2066 return new pass_set_nothrow_function_flags (ctxt);
2067 }
2068
2069 \f
2070 /* Various hooks for unwind library. */
2071
2072 /* Expand the EH support builtin functions:
2073 __builtin_eh_pointer and __builtin_eh_filter. */
2074
2075 static eh_region
2076 expand_builtin_eh_common (tree region_nr_t)
2077 {
2078 HOST_WIDE_INT region_nr;
2079 eh_region region;
2080
2081 gcc_assert (tree_fits_shwi_p (region_nr_t));
2082 region_nr = tree_to_shwi (region_nr_t);
2083
2084 region = (*cfun->eh->region_array)[region_nr];
2085
2086 /* ??? We shouldn't have been able to delete a eh region without
2087 deleting all the code that depended on it. */
2088 gcc_assert (region != NULL);
2089
2090 return region;
2091 }
2092
2093 /* Expand to the exc_ptr value from the given eh region. */
2094
2095 rtx
2096 expand_builtin_eh_pointer (tree exp)
2097 {
2098 eh_region region
2099 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2100 if (region->exc_ptr_reg == NULL)
2101 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2102 return region->exc_ptr_reg;
2103 }
2104
2105 /* Expand to the filter value from the given eh region. */
2106
2107 rtx
2108 expand_builtin_eh_filter (tree exp)
2109 {
2110 eh_region region
2111 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2112 if (region->filter_reg == NULL)
2113 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2114 return region->filter_reg;
2115 }
2116
2117 /* Copy the exc_ptr and filter values from one landing pad's registers
2118 to another. This is used to inline the resx statement. */
2119
2120 rtx
2121 expand_builtin_eh_copy_values (tree exp)
2122 {
2123 eh_region dst
2124 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2125 eh_region src
2126 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2127 machine_mode fmode = targetm.eh_return_filter_mode ();
2128
2129 if (dst->exc_ptr_reg == NULL)
2130 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2131 if (src->exc_ptr_reg == NULL)
2132 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2133
2134 if (dst->filter_reg == NULL)
2135 dst->filter_reg = gen_reg_rtx (fmode);
2136 if (src->filter_reg == NULL)
2137 src->filter_reg = gen_reg_rtx (fmode);
2138
2139 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2140 emit_move_insn (dst->filter_reg, src->filter_reg);
2141
2142 return const0_rtx;
2143 }
2144
2145 /* Do any necessary initialization to access arbitrary stack frames.
2146 On the SPARC, this means flushing the register windows. */
2147
2148 void
2149 expand_builtin_unwind_init (void)
2150 {
2151 /* Set this so all the registers get saved in our frame; we need to be
2152 able to copy the saved values for any registers from frames we unwind. */
2153 crtl->saves_all_registers = 1;
2154
2155 #ifdef SETUP_FRAME_ADDRESSES
2156 SETUP_FRAME_ADDRESSES ();
2157 #endif
2158 }
2159
2160 /* Map a non-negative number to an eh return data register number; expands
2161 to -1 if no return data register is associated with the input number.
2162 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2163
2164 rtx
2165 expand_builtin_eh_return_data_regno (tree exp)
2166 {
2167 tree which = CALL_EXPR_ARG (exp, 0);
2168 unsigned HOST_WIDE_INT iwhich;
2169
2170 if (TREE_CODE (which) != INTEGER_CST)
2171 {
2172 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2173 return constm1_rtx;
2174 }
2175
2176 iwhich = tree_to_uhwi (which);
2177 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2178 if (iwhich == INVALID_REGNUM)
2179 return constm1_rtx;
2180
2181 #ifdef DWARF_FRAME_REGNUM
2182 iwhich = DWARF_FRAME_REGNUM (iwhich);
2183 #else
2184 iwhich = DBX_REGISTER_NUMBER (iwhich);
2185 #endif
2186
2187 return GEN_INT (iwhich);
2188 }
2189
2190 /* Given a value extracted from the return address register or stack slot,
2191 return the actual address encoded in that value. */
2192
2193 rtx
2194 expand_builtin_extract_return_addr (tree addr_tree)
2195 {
2196 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2197
2198 if (GET_MODE (addr) != Pmode
2199 && GET_MODE (addr) != VOIDmode)
2200 {
2201 #ifdef POINTERS_EXTEND_UNSIGNED
2202 addr = convert_memory_address (Pmode, addr);
2203 #else
2204 addr = convert_to_mode (Pmode, addr, 0);
2205 #endif
2206 }
2207
2208 /* First mask out any unwanted bits. */
2209 rtx mask = MASK_RETURN_ADDR;
2210 if (mask)
2211 expand_and (Pmode, addr, mask, addr);
2212
2213 /* Then adjust to find the real return address. */
2214 if (RETURN_ADDR_OFFSET)
2215 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2216
2217 return addr;
2218 }
2219
2220 /* Given an actual address in addr_tree, do any necessary encoding
2221 and return the value to be stored in the return address register or
2222 stack slot so the epilogue will return to that address. */
2223
2224 rtx
2225 expand_builtin_frob_return_addr (tree addr_tree)
2226 {
2227 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2228
2229 addr = convert_memory_address (Pmode, addr);
2230
2231 if (RETURN_ADDR_OFFSET)
2232 {
2233 addr = force_reg (Pmode, addr);
2234 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2235 }
2236
2237 return addr;
2238 }
2239
2240 /* Set up the epilogue with the magic bits we'll need to return to the
2241 exception handler. */
2242
2243 void
2244 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2245 tree handler_tree)
2246 {
2247 rtx tmp;
2248
2249 #ifdef EH_RETURN_STACKADJ_RTX
2250 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2251 VOIDmode, EXPAND_NORMAL);
2252 tmp = convert_memory_address (Pmode, tmp);
2253 if (!crtl->eh.ehr_stackadj)
2254 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2255 else if (tmp != crtl->eh.ehr_stackadj)
2256 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2257 #endif
2258
2259 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2260 VOIDmode, EXPAND_NORMAL);
2261 tmp = convert_memory_address (Pmode, tmp);
2262 if (!crtl->eh.ehr_handler)
2263 crtl->eh.ehr_handler = copy_to_reg (tmp);
2264 else if (tmp != crtl->eh.ehr_handler)
2265 emit_move_insn (crtl->eh.ehr_handler, tmp);
2266
2267 if (!crtl->eh.ehr_label)
2268 crtl->eh.ehr_label = gen_label_rtx ();
2269 emit_jump (crtl->eh.ehr_label);
2270 }
2271
2272 /* Expand __builtin_eh_return. This exit path from the function loads up
2273 the eh return data registers, adjusts the stack, and branches to a
2274 given PC other than the normal return address. */
2275
2276 void
2277 expand_eh_return (void)
2278 {
2279 rtx_code_label *around_label;
2280
2281 if (! crtl->eh.ehr_label)
2282 return;
2283
2284 crtl->calls_eh_return = 1;
2285
2286 #ifdef EH_RETURN_STACKADJ_RTX
2287 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2288 #endif
2289
2290 around_label = gen_label_rtx ();
2291 emit_jump (around_label);
2292
2293 emit_label (crtl->eh.ehr_label);
2294 clobber_return_register ();
2295
2296 #ifdef EH_RETURN_STACKADJ_RTX
2297 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2298 #endif
2299
2300 #ifdef HAVE_eh_return
2301 if (HAVE_eh_return)
2302 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2303 else
2304 #endif
2305 {
2306 #ifdef EH_RETURN_HANDLER_RTX
2307 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2308 #else
2309 error ("__builtin_eh_return not supported on this target");
2310 #endif
2311 }
2312
2313 emit_label (around_label);
2314 }
2315
2316 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2317 POINTERS_EXTEND_UNSIGNED and return it. */
2318
2319 rtx
2320 expand_builtin_extend_pointer (tree addr_tree)
2321 {
2322 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2323 int extend;
2324
2325 #ifdef POINTERS_EXTEND_UNSIGNED
2326 extend = POINTERS_EXTEND_UNSIGNED;
2327 #else
2328 /* The previous EH code did an unsigned extend by default, so we do this also
2329 for consistency. */
2330 extend = 1;
2331 #endif
2332
2333 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2334 }
2335 \f
2336 static int
2337 add_action_record (action_hash_type *ar_hash, int filter, int next)
2338 {
2339 struct action_record **slot, *new_ar, tmp;
2340
2341 tmp.filter = filter;
2342 tmp.next = next;
2343 slot = ar_hash->find_slot (&tmp, INSERT);
2344
2345 if ((new_ar = *slot) == NULL)
2346 {
2347 new_ar = XNEW (struct action_record);
2348 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2349 new_ar->filter = filter;
2350 new_ar->next = next;
2351 *slot = new_ar;
2352
2353 /* The filter value goes in untouched. The link to the next
2354 record is a "self-relative" byte offset, or zero to indicate
2355 that there is no next record. So convert the absolute 1 based
2356 indices we've been carrying around into a displacement. */
2357
2358 push_sleb128 (&crtl->eh.action_record_data, filter);
2359 if (next)
2360 next -= crtl->eh.action_record_data->length () + 1;
2361 push_sleb128 (&crtl->eh.action_record_data, next);
2362 }
2363
2364 return new_ar->offset;
2365 }
2366
2367 static int
2368 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2369 {
2370 int next;
2371
2372 /* If we've reached the top of the region chain, then we have
2373 no actions, and require no landing pad. */
2374 if (region == NULL)
2375 return -1;
2376
2377 switch (region->type)
2378 {
2379 case ERT_CLEANUP:
2380 {
2381 eh_region r;
2382 /* A cleanup adds a zero filter to the beginning of the chain, but
2383 there are special cases to look out for. If there are *only*
2384 cleanups along a path, then it compresses to a zero action.
2385 Further, if there are multiple cleanups along a path, we only
2386 need to represent one of them, as that is enough to trigger
2387 entry to the landing pad at runtime. */
2388 next = collect_one_action_chain (ar_hash, region->outer);
2389 if (next <= 0)
2390 return 0;
2391 for (r = region->outer; r ; r = r->outer)
2392 if (r->type == ERT_CLEANUP)
2393 return next;
2394 return add_action_record (ar_hash, 0, next);
2395 }
2396
2397 case ERT_TRY:
2398 {
2399 eh_catch c;
2400
2401 /* Process the associated catch regions in reverse order.
2402 If there's a catch-all handler, then we don't need to
2403 search outer regions. Use a magic -3 value to record
2404 that we haven't done the outer search. */
2405 next = -3;
2406 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2407 {
2408 if (c->type_list == NULL)
2409 {
2410 /* Retrieve the filter from the head of the filter list
2411 where we have stored it (see assign_filter_values). */
2412 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2413 next = add_action_record (ar_hash, filter, 0);
2414 }
2415 else
2416 {
2417 /* Once the outer search is done, trigger an action record for
2418 each filter we have. */
2419 tree flt_node;
2420
2421 if (next == -3)
2422 {
2423 next = collect_one_action_chain (ar_hash, region->outer);
2424
2425 /* If there is no next action, terminate the chain. */
2426 if (next == -1)
2427 next = 0;
2428 /* If all outer actions are cleanups or must_not_throw,
2429 we'll have no action record for it, since we had wanted
2430 to encode these states in the call-site record directly.
2431 Add a cleanup action to the chain to catch these. */
2432 else if (next <= 0)
2433 next = add_action_record (ar_hash, 0, 0);
2434 }
2435
2436 flt_node = c->filter_list;
2437 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2438 {
2439 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2440 next = add_action_record (ar_hash, filter, next);
2441 }
2442 }
2443 }
2444 return next;
2445 }
2446
2447 case ERT_ALLOWED_EXCEPTIONS:
2448 /* An exception specification adds its filter to the
2449 beginning of the chain. */
2450 next = collect_one_action_chain (ar_hash, region->outer);
2451
2452 /* If there is no next action, terminate the chain. */
2453 if (next == -1)
2454 next = 0;
2455 /* If all outer actions are cleanups or must_not_throw,
2456 we'll have no action record for it, since we had wanted
2457 to encode these states in the call-site record directly.
2458 Add a cleanup action to the chain to catch these. */
2459 else if (next <= 0)
2460 next = add_action_record (ar_hash, 0, 0);
2461
2462 return add_action_record (ar_hash, region->u.allowed.filter, next);
2463
2464 case ERT_MUST_NOT_THROW:
2465 /* A must-not-throw region with no inner handlers or cleanups
2466 requires no call-site entry. Note that this differs from
2467 the no handler or cleanup case in that we do require an lsda
2468 to be generated. Return a magic -2 value to record this. */
2469 return -2;
2470 }
2471
2472 gcc_unreachable ();
2473 }
2474
2475 static int
2476 add_call_site (rtx landing_pad, int action, int section)
2477 {
2478 call_site_record record;
2479
2480 record = ggc_alloc<call_site_record_d> ();
2481 record->landing_pad = landing_pad;
2482 record->action = action;
2483
2484 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2485
2486 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2487 }
2488
2489 static rtx_note *
2490 emit_note_eh_region_end (rtx_insn *insn)
2491 {
2492 rtx_insn *next = NEXT_INSN (insn);
2493
2494 /* Make sure we do not split a call and its corresponding
2495 CALL_ARG_LOCATION note. */
2496 if (next && NOTE_P (next)
2497 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2498 insn = next;
2499
2500 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2501 }
2502
2503 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2504 The new note numbers will not refer to region numbers, but
2505 instead to call site entries. */
2506
2507 static unsigned int
2508 convert_to_eh_region_ranges (void)
2509 {
2510 rtx insn;
2511 rtx_insn *iter;
2512 rtx_note *note;
2513 action_hash_type ar_hash (31);
2514 int last_action = -3;
2515 rtx_insn *last_action_insn = NULL;
2516 rtx last_landing_pad = NULL_RTX;
2517 rtx_insn *first_no_action_insn = NULL;
2518 int call_site = 0;
2519 int cur_sec = 0;
2520 rtx_insn *section_switch_note = NULL;
2521 rtx_insn *first_no_action_insn_before_switch = NULL;
2522 rtx_insn *last_no_action_insn_before_switch = NULL;
2523 int saved_call_site_base = call_site_base;
2524
2525 vec_alloc (crtl->eh.action_record_data, 64);
2526
2527 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2528 if (INSN_P (iter))
2529 {
2530 eh_landing_pad lp;
2531 eh_region region;
2532 bool nothrow;
2533 int this_action;
2534 rtx_code_label *this_landing_pad;
2535
2536 insn = iter;
2537 if (NONJUMP_INSN_P (insn)
2538 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2539 insn = XVECEXP (PATTERN (insn), 0, 0);
2540
2541 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2542 if (nothrow)
2543 continue;
2544 if (region)
2545 this_action = collect_one_action_chain (&ar_hash, region);
2546 else
2547 this_action = -1;
2548
2549 /* Existence of catch handlers, or must-not-throw regions
2550 implies that an lsda is needed (even if empty). */
2551 if (this_action != -1)
2552 crtl->uses_eh_lsda = 1;
2553
2554 /* Delay creation of region notes for no-action regions
2555 until we're sure that an lsda will be required. */
2556 else if (last_action == -3)
2557 {
2558 first_no_action_insn = iter;
2559 last_action = -1;
2560 }
2561
2562 if (this_action >= 0)
2563 this_landing_pad = lp->landing_pad;
2564 else
2565 this_landing_pad = NULL;
2566
2567 /* Differing actions or landing pads implies a change in call-site
2568 info, which implies some EH_REGION note should be emitted. */
2569 if (last_action != this_action
2570 || last_landing_pad != this_landing_pad)
2571 {
2572 /* If there is a queued no-action region in the other section
2573 with hot/cold partitioning, emit it now. */
2574 if (first_no_action_insn_before_switch)
2575 {
2576 gcc_assert (this_action != -1
2577 && last_action == (first_no_action_insn
2578 ? -1 : -3));
2579 call_site = add_call_site (NULL_RTX, 0, 0);
2580 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2581 first_no_action_insn_before_switch);
2582 NOTE_EH_HANDLER (note) = call_site;
2583 note
2584 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2585 NOTE_EH_HANDLER (note) = call_site;
2586 gcc_assert (last_action != -3
2587 || (last_action_insn
2588 == last_no_action_insn_before_switch));
2589 first_no_action_insn_before_switch = NULL;
2590 last_no_action_insn_before_switch = NULL;
2591 call_site_base++;
2592 }
2593 /* If we'd not seen a previous action (-3) or the previous
2594 action was must-not-throw (-2), then we do not need an
2595 end note. */
2596 if (last_action >= -1)
2597 {
2598 /* If we delayed the creation of the begin, do it now. */
2599 if (first_no_action_insn)
2600 {
2601 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2602 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2603 first_no_action_insn);
2604 NOTE_EH_HANDLER (note) = call_site;
2605 first_no_action_insn = NULL;
2606 }
2607
2608 note = emit_note_eh_region_end (last_action_insn);
2609 NOTE_EH_HANDLER (note) = call_site;
2610 }
2611
2612 /* If the new action is must-not-throw, then no region notes
2613 are created. */
2614 if (this_action >= -1)
2615 {
2616 call_site = add_call_site (this_landing_pad,
2617 this_action < 0 ? 0 : this_action,
2618 cur_sec);
2619 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2620 NOTE_EH_HANDLER (note) = call_site;
2621 }
2622
2623 last_action = this_action;
2624 last_landing_pad = this_landing_pad;
2625 }
2626 last_action_insn = iter;
2627 }
2628 else if (NOTE_P (iter)
2629 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2630 {
2631 gcc_assert (section_switch_note == NULL_RTX);
2632 gcc_assert (flag_reorder_blocks_and_partition);
2633 section_switch_note = iter;
2634 if (first_no_action_insn)
2635 {
2636 first_no_action_insn_before_switch = first_no_action_insn;
2637 last_no_action_insn_before_switch = last_action_insn;
2638 first_no_action_insn = NULL;
2639 gcc_assert (last_action == -1);
2640 last_action = -3;
2641 }
2642 /* Force closing of current EH region before section switch and
2643 opening a new one afterwards. */
2644 else if (last_action != -3)
2645 last_landing_pad = pc_rtx;
2646 if (crtl->eh.call_site_record_v[cur_sec])
2647 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2648 cur_sec++;
2649 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2650 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2651 }
2652
2653 if (last_action >= -1 && ! first_no_action_insn)
2654 {
2655 note = emit_note_eh_region_end (last_action_insn);
2656 NOTE_EH_HANDLER (note) = call_site;
2657 }
2658
2659 call_site_base = saved_call_site_base;
2660
2661 return 0;
2662 }
2663
2664 namespace {
2665
2666 const pass_data pass_data_convert_to_eh_region_ranges =
2667 {
2668 RTL_PASS, /* type */
2669 "eh_ranges", /* name */
2670 OPTGROUP_NONE, /* optinfo_flags */
2671 TV_NONE, /* tv_id */
2672 0, /* properties_required */
2673 0, /* properties_provided */
2674 0, /* properties_destroyed */
2675 0, /* todo_flags_start */
2676 0, /* todo_flags_finish */
2677 };
2678
2679 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2680 {
2681 public:
2682 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2683 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2684 {}
2685
2686 /* opt_pass methods: */
2687 virtual bool gate (function *);
2688 virtual unsigned int execute (function *)
2689 {
2690 return convert_to_eh_region_ranges ();
2691 }
2692
2693 }; // class pass_convert_to_eh_region_ranges
2694
2695 bool
2696 pass_convert_to_eh_region_ranges::gate (function *)
2697 {
2698 /* Nothing to do for SJLJ exceptions or if no regions created. */
2699 if (cfun->eh->region_tree == NULL)
2700 return false;
2701 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2702 return false;
2703 return true;
2704 }
2705
2706 } // anon namespace
2707
2708 rtl_opt_pass *
2709 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2710 {
2711 return new pass_convert_to_eh_region_ranges (ctxt);
2712 }
2713 \f
2714 static void
2715 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2716 {
2717 do
2718 {
2719 unsigned char byte = value & 0x7f;
2720 value >>= 7;
2721 if (value)
2722 byte |= 0x80;
2723 vec_safe_push (*data_area, byte);
2724 }
2725 while (value);
2726 }
2727
2728 static void
2729 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2730 {
2731 unsigned char byte;
2732 int more;
2733
2734 do
2735 {
2736 byte = value & 0x7f;
2737 value >>= 7;
2738 more = ! ((value == 0 && (byte & 0x40) == 0)
2739 || (value == -1 && (byte & 0x40) != 0));
2740 if (more)
2741 byte |= 0x80;
2742 vec_safe_push (*data_area, byte);
2743 }
2744 while (more);
2745 }
2746
2747 \f
2748 #ifndef HAVE_AS_LEB128
2749 static int
2750 dw2_size_of_call_site_table (int section)
2751 {
2752 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2753 int size = n * (4 + 4 + 4);
2754 int i;
2755
2756 for (i = 0; i < n; ++i)
2757 {
2758 struct call_site_record_d *cs =
2759 (*crtl->eh.call_site_record_v[section])[i];
2760 size += size_of_uleb128 (cs->action);
2761 }
2762
2763 return size;
2764 }
2765
2766 static int
2767 sjlj_size_of_call_site_table (void)
2768 {
2769 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2770 int size = 0;
2771 int i;
2772
2773 for (i = 0; i < n; ++i)
2774 {
2775 struct call_site_record_d *cs =
2776 (*crtl->eh.call_site_record_v[0])[i];
2777 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2778 size += size_of_uleb128 (cs->action);
2779 }
2780
2781 return size;
2782 }
2783 #endif
2784
2785 static void
2786 dw2_output_call_site_table (int cs_format, int section)
2787 {
2788 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2789 int i;
2790 const char *begin;
2791
2792 if (section == 0)
2793 begin = current_function_func_begin_label;
2794 else if (first_function_block_is_cold)
2795 begin = crtl->subsections.hot_section_label;
2796 else
2797 begin = crtl->subsections.cold_section_label;
2798
2799 for (i = 0; i < n; ++i)
2800 {
2801 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2802 char reg_start_lab[32];
2803 char reg_end_lab[32];
2804 char landing_pad_lab[32];
2805
2806 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2807 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2808
2809 if (cs->landing_pad)
2810 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2811 CODE_LABEL_NUMBER (cs->landing_pad));
2812
2813 /* ??? Perhaps use insn length scaling if the assembler supports
2814 generic arithmetic. */
2815 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2816 data4 if the function is small enough. */
2817 if (cs_format == DW_EH_PE_uleb128)
2818 {
2819 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2820 "region %d start", i);
2821 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2822 "length");
2823 if (cs->landing_pad)
2824 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2825 "landing pad");
2826 else
2827 dw2_asm_output_data_uleb128 (0, "landing pad");
2828 }
2829 else
2830 {
2831 dw2_asm_output_delta (4, reg_start_lab, begin,
2832 "region %d start", i);
2833 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2834 if (cs->landing_pad)
2835 dw2_asm_output_delta (4, landing_pad_lab, begin,
2836 "landing pad");
2837 else
2838 dw2_asm_output_data (4, 0, "landing pad");
2839 }
2840 dw2_asm_output_data_uleb128 (cs->action, "action");
2841 }
2842
2843 call_site_base += n;
2844 }
2845
2846 static void
2847 sjlj_output_call_site_table (void)
2848 {
2849 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2850 int i;
2851
2852 for (i = 0; i < n; ++i)
2853 {
2854 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2855
2856 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2857 "region %d landing pad", i);
2858 dw2_asm_output_data_uleb128 (cs->action, "action");
2859 }
2860
2861 call_site_base += n;
2862 }
2863
2864 /* Switch to the section that should be used for exception tables. */
2865
2866 static void
2867 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2868 {
2869 section *s;
2870
2871 if (exception_section)
2872 s = exception_section;
2873 else
2874 {
2875 /* Compute the section and cache it into exception_section,
2876 unless it depends on the function name. */
2877 if (targetm_common.have_named_sections)
2878 {
2879 int flags;
2880
2881 if (EH_TABLES_CAN_BE_READ_ONLY)
2882 {
2883 int tt_format =
2884 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2885 flags = ((! flag_pic
2886 || ((tt_format & 0x70) != DW_EH_PE_absptr
2887 && (tt_format & 0x70) != DW_EH_PE_aligned))
2888 ? 0 : SECTION_WRITE);
2889 }
2890 else
2891 flags = SECTION_WRITE;
2892
2893 #ifdef HAVE_LD_EH_GC_SECTIONS
2894 if (flag_function_sections
2895 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2896 {
2897 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2898 /* The EH table must match the code section, so only mark
2899 it linkonce if we have COMDAT groups to tie them together. */
2900 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2901 flags |= SECTION_LINKONCE;
2902 sprintf (section_name, ".gcc_except_table.%s", fnname);
2903 s = get_section (section_name, flags, current_function_decl);
2904 free (section_name);
2905 }
2906 else
2907 #endif
2908 exception_section
2909 = s = get_section (".gcc_except_table", flags, NULL);
2910 }
2911 else
2912 exception_section
2913 = s = flag_pic ? data_section : readonly_data_section;
2914 }
2915
2916 switch_to_section (s);
2917 }
2918
2919
2920 /* Output a reference from an exception table to the type_info object TYPE.
2921 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2922 the value. */
2923
2924 static void
2925 output_ttype (tree type, int tt_format, int tt_format_size)
2926 {
2927 rtx value;
2928 bool is_public = true;
2929
2930 if (type == NULL_TREE)
2931 value = const0_rtx;
2932 else
2933 {
2934 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2935 runtime types so TYPE should already be a runtime type
2936 reference. When pass_ipa_free_lang data is made a default
2937 pass, we can then remove the call to lookup_type_for_runtime
2938 below. */
2939 if (TYPE_P (type))
2940 type = lookup_type_for_runtime (type);
2941
2942 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2943
2944 /* Let cgraph know that the rtti decl is used. Not all of the
2945 paths below go through assemble_integer, which would take
2946 care of this for us. */
2947 STRIP_NOPS (type);
2948 if (TREE_CODE (type) == ADDR_EXPR)
2949 {
2950 type = TREE_OPERAND (type, 0);
2951 if (TREE_CODE (type) == VAR_DECL)
2952 is_public = TREE_PUBLIC (type);
2953 }
2954 else
2955 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2956 }
2957
2958 /* Allow the target to override the type table entry format. */
2959 if (targetm.asm_out.ttype (value))
2960 return;
2961
2962 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2963 assemble_integer (value, tt_format_size,
2964 tt_format_size * BITS_PER_UNIT, 1);
2965 else
2966 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2967 }
2968
2969 static void
2970 output_one_function_exception_table (int section)
2971 {
2972 int tt_format, cs_format, lp_format, i;
2973 #ifdef HAVE_AS_LEB128
2974 char ttype_label[32];
2975 char cs_after_size_label[32];
2976 char cs_end_label[32];
2977 #else
2978 int call_site_len;
2979 #endif
2980 int have_tt_data;
2981 int tt_format_size = 0;
2982
2983 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2984 || (targetm.arm_eabi_unwinder
2985 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2986 : vec_safe_length (cfun->eh->ehspec_data.other)));
2987
2988 /* Indicate the format of the @TType entries. */
2989 if (! have_tt_data)
2990 tt_format = DW_EH_PE_omit;
2991 else
2992 {
2993 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2994 #ifdef HAVE_AS_LEB128
2995 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2996 section ? "LLSDATTC" : "LLSDATT",
2997 current_function_funcdef_no);
2998 #endif
2999 tt_format_size = size_of_encoded_value (tt_format);
3000
3001 assemble_align (tt_format_size * BITS_PER_UNIT);
3002 }
3003
3004 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
3005 current_function_funcdef_no);
3006
3007 /* The LSDA header. */
3008
3009 /* Indicate the format of the landing pad start pointer. An omitted
3010 field implies @LPStart == @Start. */
3011 /* Currently we always put @LPStart == @Start. This field would
3012 be most useful in moving the landing pads completely out of
3013 line to another section, but it could also be used to minimize
3014 the size of uleb128 landing pad offsets. */
3015 lp_format = DW_EH_PE_omit;
3016 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3017 eh_data_format_name (lp_format));
3018
3019 /* @LPStart pointer would go here. */
3020
3021 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3022 eh_data_format_name (tt_format));
3023
3024 #ifndef HAVE_AS_LEB128
3025 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3026 call_site_len = sjlj_size_of_call_site_table ();
3027 else
3028 call_site_len = dw2_size_of_call_site_table (section);
3029 #endif
3030
3031 /* A pc-relative 4-byte displacement to the @TType data. */
3032 if (have_tt_data)
3033 {
3034 #ifdef HAVE_AS_LEB128
3035 char ttype_after_disp_label[32];
3036 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3037 section ? "LLSDATTDC" : "LLSDATTD",
3038 current_function_funcdef_no);
3039 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3040 "@TType base offset");
3041 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3042 #else
3043 /* Ug. Alignment queers things. */
3044 unsigned int before_disp, after_disp, last_disp, disp;
3045
3046 before_disp = 1 + 1;
3047 after_disp = (1 + size_of_uleb128 (call_site_len)
3048 + call_site_len
3049 + vec_safe_length (crtl->eh.action_record_data)
3050 + (vec_safe_length (cfun->eh->ttype_data)
3051 * tt_format_size));
3052
3053 disp = after_disp;
3054 do
3055 {
3056 unsigned int disp_size, pad;
3057
3058 last_disp = disp;
3059 disp_size = size_of_uleb128 (disp);
3060 pad = before_disp + disp_size + after_disp;
3061 if (pad % tt_format_size)
3062 pad = tt_format_size - (pad % tt_format_size);
3063 else
3064 pad = 0;
3065 disp = after_disp + pad;
3066 }
3067 while (disp != last_disp);
3068
3069 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3070 #endif
3071 }
3072
3073 /* Indicate the format of the call-site offsets. */
3074 #ifdef HAVE_AS_LEB128
3075 cs_format = DW_EH_PE_uleb128;
3076 #else
3077 cs_format = DW_EH_PE_udata4;
3078 #endif
3079 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3080 eh_data_format_name (cs_format));
3081
3082 #ifdef HAVE_AS_LEB128
3083 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3084 section ? "LLSDACSBC" : "LLSDACSB",
3085 current_function_funcdef_no);
3086 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3087 section ? "LLSDACSEC" : "LLSDACSE",
3088 current_function_funcdef_no);
3089 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3090 "Call-site table length");
3091 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3092 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3093 sjlj_output_call_site_table ();
3094 else
3095 dw2_output_call_site_table (cs_format, section);
3096 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3097 #else
3098 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3099 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3100 sjlj_output_call_site_table ();
3101 else
3102 dw2_output_call_site_table (cs_format, section);
3103 #endif
3104
3105 /* ??? Decode and interpret the data for flag_debug_asm. */
3106 {
3107 uchar uc;
3108 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3109 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3110 }
3111
3112 if (have_tt_data)
3113 assemble_align (tt_format_size * BITS_PER_UNIT);
3114
3115 i = vec_safe_length (cfun->eh->ttype_data);
3116 while (i-- > 0)
3117 {
3118 tree type = (*cfun->eh->ttype_data)[i];
3119 output_ttype (type, tt_format, tt_format_size);
3120 }
3121
3122 #ifdef HAVE_AS_LEB128
3123 if (have_tt_data)
3124 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3125 #endif
3126
3127 /* ??? Decode and interpret the data for flag_debug_asm. */
3128 if (targetm.arm_eabi_unwinder)
3129 {
3130 tree type;
3131 for (i = 0;
3132 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3133 output_ttype (type, tt_format, tt_format_size);
3134 }
3135 else
3136 {
3137 uchar uc;
3138 for (i = 0;
3139 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3140 dw2_asm_output_data (1, uc,
3141 i ? NULL : "Exception specification table");
3142 }
3143 }
3144
3145 void
3146 output_function_exception_table (const char *fnname)
3147 {
3148 rtx personality = get_personality_function (current_function_decl);
3149
3150 /* Not all functions need anything. */
3151 if (! crtl->uses_eh_lsda)
3152 return;
3153
3154 if (personality)
3155 {
3156 assemble_external_libcall (personality);
3157
3158 if (targetm.asm_out.emit_except_personality)
3159 targetm.asm_out.emit_except_personality (personality);
3160 }
3161
3162 switch_to_exception_section (fnname);
3163
3164 /* If the target wants a label to begin the table, emit it here. */
3165 targetm.asm_out.emit_except_table_label (asm_out_file);
3166
3167 output_one_function_exception_table (0);
3168 if (crtl->eh.call_site_record_v[1])
3169 output_one_function_exception_table (1);
3170
3171 switch_to_section (current_function_section ());
3172 }
3173
3174 void
3175 set_eh_throw_stmt_table (function *fun, hash_map<gimple, int> *table)
3176 {
3177 fun->eh->throw_stmt_table = table;
3178 }
3179
3180 hash_map<gimple, int> *
3181 get_eh_throw_stmt_table (struct function *fun)
3182 {
3183 return fun->eh->throw_stmt_table;
3184 }
3185 \f
3186 /* Determine if the function needs an EH personality function. */
3187
3188 enum eh_personality_kind
3189 function_needs_eh_personality (struct function *fn)
3190 {
3191 enum eh_personality_kind kind = eh_personality_none;
3192 eh_region i;
3193
3194 FOR_ALL_EH_REGION_FN (i, fn)
3195 {
3196 switch (i->type)
3197 {
3198 case ERT_CLEANUP:
3199 /* Can do with any personality including the generic C one. */
3200 kind = eh_personality_any;
3201 break;
3202
3203 case ERT_TRY:
3204 case ERT_ALLOWED_EXCEPTIONS:
3205 /* Always needs a EH personality function. The generic C
3206 personality doesn't handle these even for empty type lists. */
3207 return eh_personality_lang;
3208
3209 case ERT_MUST_NOT_THROW:
3210 /* Always needs a EH personality function. The language may specify
3211 what abort routine that must be used, e.g. std::terminate. */
3212 return eh_personality_lang;
3213 }
3214 }
3215
3216 return kind;
3217 }
3218 \f
3219 /* Dump EH information to OUT. */
3220
3221 void
3222 dump_eh_tree (FILE * out, struct function *fun)
3223 {
3224 eh_region i;
3225 int depth = 0;
3226 static const char *const type_name[] = {
3227 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3228 };
3229
3230 i = fun->eh->region_tree;
3231 if (!i)
3232 return;
3233
3234 fprintf (out, "Eh tree:\n");
3235 while (1)
3236 {
3237 fprintf (out, " %*s %i %s", depth * 2, "",
3238 i->index, type_name[(int) i->type]);
3239
3240 if (i->landing_pads)
3241 {
3242 eh_landing_pad lp;
3243
3244 fprintf (out, " land:");
3245 if (current_ir_type () == IR_GIMPLE)
3246 {
3247 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3248 {
3249 fprintf (out, "{%i,", lp->index);
3250 print_generic_expr (out, lp->post_landing_pad, 0);
3251 fputc ('}', out);
3252 if (lp->next_lp)
3253 fputc (',', out);
3254 }
3255 }
3256 else
3257 {
3258 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3259 {
3260 fprintf (out, "{%i,", lp->index);
3261 if (lp->landing_pad)
3262 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3263 NOTE_P (lp->landing_pad) ? "(del)" : "");
3264 else
3265 fprintf (out, "(nil),");
3266 if (lp->post_landing_pad)
3267 {
3268 rtx_insn *lab = label_rtx (lp->post_landing_pad);
3269 fprintf (out, "%i%s}", INSN_UID (lab),
3270 NOTE_P (lab) ? "(del)" : "");
3271 }
3272 else
3273 fprintf (out, "(nil)}");
3274 if (lp->next_lp)
3275 fputc (',', out);
3276 }
3277 }
3278 }
3279
3280 switch (i->type)
3281 {
3282 case ERT_CLEANUP:
3283 case ERT_MUST_NOT_THROW:
3284 break;
3285
3286 case ERT_TRY:
3287 {
3288 eh_catch c;
3289 fprintf (out, " catch:");
3290 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3291 {
3292 fputc ('{', out);
3293 if (c->label)
3294 {
3295 fprintf (out, "lab:");
3296 print_generic_expr (out, c->label, 0);
3297 fputc (';', out);
3298 }
3299 print_generic_expr (out, c->type_list, 0);
3300 fputc ('}', out);
3301 if (c->next_catch)
3302 fputc (',', out);
3303 }
3304 }
3305 break;
3306
3307 case ERT_ALLOWED_EXCEPTIONS:
3308 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3309 print_generic_expr (out, i->u.allowed.type_list, 0);
3310 break;
3311 }
3312 fputc ('\n', out);
3313
3314 /* If there are sub-regions, process them. */
3315 if (i->inner)
3316 i = i->inner, depth++;
3317 /* If there are peers, process them. */
3318 else if (i->next_peer)
3319 i = i->next_peer;
3320 /* Otherwise, step back up the tree to the next peer. */
3321 else
3322 {
3323 do
3324 {
3325 i = i->outer;
3326 depth--;
3327 if (i == NULL)
3328 return;
3329 }
3330 while (i->next_peer == NULL);
3331 i = i->next_peer;
3332 }
3333 }
3334 }
3335
3336 /* Dump the EH tree for FN on stderr. */
3337
3338 DEBUG_FUNCTION void
3339 debug_eh_tree (struct function *fn)
3340 {
3341 dump_eh_tree (stderr, fn);
3342 }
3343
3344 /* Verify invariants on EH datastructures. */
3345
3346 DEBUG_FUNCTION void
3347 verify_eh_tree (struct function *fun)
3348 {
3349 eh_region r, outer;
3350 int nvisited_lp, nvisited_r;
3351 int count_lp, count_r, depth, i;
3352 eh_landing_pad lp;
3353 bool err = false;
3354
3355 if (!fun->eh->region_tree)
3356 return;
3357
3358 count_r = 0;
3359 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3360 if (r)
3361 {
3362 if (r->index == i)
3363 count_r++;
3364 else
3365 {
3366 error ("region_array is corrupted for region %i", r->index);
3367 err = true;
3368 }
3369 }
3370
3371 count_lp = 0;
3372 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3373 if (lp)
3374 {
3375 if (lp->index == i)
3376 count_lp++;
3377 else
3378 {
3379 error ("lp_array is corrupted for lp %i", lp->index);
3380 err = true;
3381 }
3382 }
3383
3384 depth = nvisited_lp = nvisited_r = 0;
3385 outer = NULL;
3386 r = fun->eh->region_tree;
3387 while (1)
3388 {
3389 if ((*fun->eh->region_array)[r->index] != r)
3390 {
3391 error ("region_array is corrupted for region %i", r->index);
3392 err = true;
3393 }
3394 if (r->outer != outer)
3395 {
3396 error ("outer block of region %i is wrong", r->index);
3397 err = true;
3398 }
3399 if (depth < 0)
3400 {
3401 error ("negative nesting depth of region %i", r->index);
3402 err = true;
3403 }
3404 nvisited_r++;
3405
3406 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3407 {
3408 if ((*fun->eh->lp_array)[lp->index] != lp)
3409 {
3410 error ("lp_array is corrupted for lp %i", lp->index);
3411 err = true;
3412 }
3413 if (lp->region != r)
3414 {
3415 error ("region of lp %i is wrong", lp->index);
3416 err = true;
3417 }
3418 nvisited_lp++;
3419 }
3420
3421 if (r->inner)
3422 outer = r, r = r->inner, depth++;
3423 else if (r->next_peer)
3424 r = r->next_peer;
3425 else
3426 {
3427 do
3428 {
3429 r = r->outer;
3430 if (r == NULL)
3431 goto region_done;
3432 depth--;
3433 outer = r->outer;
3434 }
3435 while (r->next_peer == NULL);
3436 r = r->next_peer;
3437 }
3438 }
3439 region_done:
3440 if (depth != 0)
3441 {
3442 error ("tree list ends on depth %i", depth);
3443 err = true;
3444 }
3445 if (count_r != nvisited_r)
3446 {
3447 error ("region_array does not match region_tree");
3448 err = true;
3449 }
3450 if (count_lp != nvisited_lp)
3451 {
3452 error ("lp_array does not match region_tree");
3453 err = true;
3454 }
3455
3456 if (err)
3457 {
3458 dump_eh_tree (stderr, fun);
3459 internal_error ("verify_eh_tree failed");
3460 }
3461 }
3462 \f
3463 #include "gt-except.h"