re PR c++/52772 (internal compiler error: Segmentation fault)
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be "thrown" from within a
25 function. This event can then be "caught" by the callers of
26 the function.
27
28 The representation of exceptions changes several times during
29 the compilation process:
30
31 In the beginning, in the front end, we have the GENERIC trees
32 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
33 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
34
35 During initial gimplification (gimplify.c) these are lowered
36 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
37 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
38 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
39 conversion.
40
41 During pass_lower_eh (tree-eh.c) we record the nested structure
42 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
43 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
44 regions at this time. We can then flatten the statements within
45 the TRY nodes to straight-line code. Statements that had been within
46 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
47 so that we may remember what action is supposed to be taken if
48 a given statement does throw. During this lowering process,
49 we create an EH_LANDING_PAD node for each EH_REGION that has
50 some code within the function that needs to be executed if a
51 throw does happen. We also create RESX statements that are
52 used to transfer control from an inner EH_REGION to an outer
53 EH_REGION. We also create EH_DISPATCH statements as placeholders
54 for a runtime type comparison that should be made in order to
55 select the action to perform among different CATCH and EH_FILTER
56 regions.
57
58 During pass_lower_eh_dispatch (tree-eh.c), which is run after
59 all inlining is complete, we are able to run assign_filter_values,
60 which allows us to map the set of types manipulated by all of the
61 CATCH and EH_FILTER regions to a set of integers. This set of integers
62 will be how the exception runtime communicates with the code generated
63 within the function. We then expand the GIMPLE_EH_DISPATCH statements
64 to a switch or conditional branches that use the argument provided by
65 the runtime (__builtin_eh_filter) and the set of integers we computed
66 in assign_filter_values.
67
68 During pass_lower_resx (tree-eh.c), which is run near the end
69 of optimization, we expand RESX statements. If the eh region
70 that is outer to the RESX statement is a MUST_NOT_THROW, then
71 the RESX expands to some form of abort statement. If the eh
72 region that is outer to the RESX statement is within the current
73 function, then the RESX expands to a bookkeeping call
74 (__builtin_eh_copy_values) and a goto. Otherwise, the next
75 handler for the exception must be within a function somewhere
76 up the call chain, so we call back into the exception runtime
77 (__builtin_unwind_resume).
78
79 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
80 that create an rtl to eh_region mapping that corresponds to the
81 gimple to eh_region mapping that had been recorded in the
82 THROW_STMT_TABLE.
83
84 Then, via finish_eh_generation, we generate the real landing pads
85 to which the runtime will actually transfer control. These new
86 landing pads perform whatever bookkeeping is needed by the target
87 backend in order to resume execution within the current function.
88 Each of these new landing pads falls through into the post_landing_pad
89 label which had been used within the CFG up to this point. All
90 exception edges within the CFG are redirected to the new landing pads.
91 If the target uses setjmp to implement exceptions, the various extra
92 calls into the runtime to register and unregister the current stack
93 frame are emitted at this time.
94
95 During pass_convert_to_eh_region_ranges (except.c), we transform
96 the REG_EH_REGION notes attached to individual insns into
97 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
98 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
99 same associated action within the exception region tree, meaning
100 that (1) the exception is caught by the same landing pad within the
101 current function, (2) the exception is blocked by the runtime with
102 a MUST_NOT_THROW region, or (3) the exception is not handled at all
103 within the current function.
104
105 Finally, during assembly generation, we call
106 output_function_exception_table (except.c) to emit the tables with
107 which the exception runtime can determine if a given stack frame
108 handles a given exception, and if so what filter value to provide
109 to the function when the non-local control transfer is effected.
110 If the target uses dwarf2 unwinding to implement exceptions, then
111 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
112
113
114 #include "config.h"
115 #include "system.h"
116 #include "coretypes.h"
117 #include "tm.h"
118 #include "rtl.h"
119 #include "tree.h"
120 #include "flags.h"
121 #include "function.h"
122 #include "expr.h"
123 #include "libfuncs.h"
124 #include "insn-config.h"
125 #include "except.h"
126 #include "integrate.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "output.h"
130 #include "dwarf2asm.h"
131 #include "dwarf2out.h"
132 #include "dwarf2.h"
133 #include "toplev.h"
134 #include "hashtab.h"
135 #include "intl.h"
136 #include "ggc.h"
137 #include "tm_p.h"
138 #include "target.h"
139 #include "common/common-target.h"
140 #include "langhooks.h"
141 #include "cgraph.h"
142 #include "diagnostic.h"
143 #include "tree-pretty-print.h"
144 #include "tree-pass.h"
145 #include "timevar.h"
146 #include "tree-flow.h"
147 #include "cfgloop.h"
148
149 /* Provide defaults for stuff that may not be defined when using
150 sjlj exceptions. */
151 #ifndef EH_RETURN_DATA_REGNO
152 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
153 #endif
154
155 static GTY(()) int call_site_base;
156 static GTY ((param_is (union tree_node)))
157 htab_t type_to_runtime_map;
158
159 /* Describe the SjLj_Function_Context structure. */
160 static GTY(()) tree sjlj_fc_type_node;
161 static int sjlj_fc_call_site_ofs;
162 static int sjlj_fc_data_ofs;
163 static int sjlj_fc_personality_ofs;
164 static int sjlj_fc_lsda_ofs;
165 static int sjlj_fc_jbuf_ofs;
166 \f
167
168 struct GTY(()) call_site_record_d
169 {
170 rtx landing_pad;
171 int action;
172 };
173 \f
174 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
175 eh_landing_pad *);
176
177 static int t2r_eq (const void *, const void *);
178 static hashval_t t2r_hash (const void *);
179
180 static int ttypes_filter_eq (const void *, const void *);
181 static hashval_t ttypes_filter_hash (const void *);
182 static int ehspec_filter_eq (const void *, const void *);
183 static hashval_t ehspec_filter_hash (const void *);
184 static int add_ttypes_entry (htab_t, tree);
185 static int add_ehspec_entry (htab_t, htab_t, tree);
186 static void dw2_build_landing_pads (void);
187
188 static int action_record_eq (const void *, const void *);
189 static hashval_t action_record_hash (const void *);
190 static int add_action_record (htab_t, int, int);
191 static int collect_one_action_chain (htab_t, eh_region);
192 static int add_call_site (rtx, int, int);
193
194 static void push_uleb128 (VEC (uchar, gc) **, unsigned int);
195 static void push_sleb128 (VEC (uchar, gc) **, int);
196 #ifndef HAVE_AS_LEB128
197 static int dw2_size_of_call_site_table (int);
198 static int sjlj_size_of_call_site_table (void);
199 #endif
200 static void dw2_output_call_site_table (int, int);
201 static void sjlj_output_call_site_table (void);
202
203 \f
204 void
205 init_eh (void)
206 {
207 if (! flag_exceptions)
208 return;
209
210 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
211
212 /* Create the SjLj_Function_Context structure. This should match
213 the definition in unwind-sjlj.c. */
214 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
215 {
216 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
217
218 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
219
220 f_prev = build_decl (BUILTINS_LOCATION,
221 FIELD_DECL, get_identifier ("__prev"),
222 build_pointer_type (sjlj_fc_type_node));
223 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
224
225 f_cs = build_decl (BUILTINS_LOCATION,
226 FIELD_DECL, get_identifier ("__call_site"),
227 integer_type_node);
228 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
229
230 tmp = build_index_type (size_int (4 - 1));
231 tmp = build_array_type (lang_hooks.types.type_for_mode
232 (targetm.unwind_word_mode (), 1),
233 tmp);
234 f_data = build_decl (BUILTINS_LOCATION,
235 FIELD_DECL, get_identifier ("__data"), tmp);
236 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
237
238 f_per = build_decl (BUILTINS_LOCATION,
239 FIELD_DECL, get_identifier ("__personality"),
240 ptr_type_node);
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
242
243 f_lsda = build_decl (BUILTINS_LOCATION,
244 FIELD_DECL, get_identifier ("__lsda"),
245 ptr_type_node);
246 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
247
248 #ifdef DONT_USE_BUILTIN_SETJMP
249 #ifdef JMP_BUF_SIZE
250 tmp = size_int (JMP_BUF_SIZE - 1);
251 #else
252 /* Should be large enough for most systems, if it is not,
253 JMP_BUF_SIZE should be defined with the proper value. It will
254 also tend to be larger than necessary for most systems, a more
255 optimal port will define JMP_BUF_SIZE. */
256 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
257 #endif
258 #else
259 /* builtin_setjmp takes a pointer to 5 words. */
260 tmp = size_int (5 * BITS_PER_WORD / POINTER_SIZE - 1);
261 #endif
262 tmp = build_index_type (tmp);
263 tmp = build_array_type (ptr_type_node, tmp);
264 f_jbuf = build_decl (BUILTINS_LOCATION,
265 FIELD_DECL, get_identifier ("__jbuf"), tmp);
266 #ifdef DONT_USE_BUILTIN_SETJMP
267 /* We don't know what the alignment requirements of the
268 runtime's jmp_buf has. Overestimate. */
269 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
270 DECL_USER_ALIGN (f_jbuf) = 1;
271 #endif
272 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
273
274 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
275 TREE_CHAIN (f_prev) = f_cs;
276 TREE_CHAIN (f_cs) = f_data;
277 TREE_CHAIN (f_data) = f_per;
278 TREE_CHAIN (f_per) = f_lsda;
279 TREE_CHAIN (f_lsda) = f_jbuf;
280
281 layout_type (sjlj_fc_type_node);
282
283 /* Cache the interesting field offsets so that we have
284 easy access from rtl. */
285 sjlj_fc_call_site_ofs
286 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
287 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
288 sjlj_fc_data_ofs
289 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
290 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
291 sjlj_fc_personality_ofs
292 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
293 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
294 sjlj_fc_lsda_ofs
295 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
296 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
297 sjlj_fc_jbuf_ofs
298 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
299 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
300 }
301 }
302
303 void
304 init_eh_for_function (void)
305 {
306 cfun->eh = ggc_alloc_cleared_eh_status ();
307
308 /* Make sure zero'th entries are used. */
309 VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL);
310 VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL);
311 }
312 \f
313 /* Routines to generate the exception tree somewhat directly.
314 These are used from tree-eh.c when processing exception related
315 nodes during tree optimization. */
316
317 static eh_region
318 gen_eh_region (enum eh_region_type type, eh_region outer)
319 {
320 eh_region new_eh;
321
322 /* Insert a new blank region as a leaf in the tree. */
323 new_eh = ggc_alloc_cleared_eh_region_d ();
324 new_eh->type = type;
325 new_eh->outer = outer;
326 if (outer)
327 {
328 new_eh->next_peer = outer->inner;
329 outer->inner = new_eh;
330 }
331 else
332 {
333 new_eh->next_peer = cfun->eh->region_tree;
334 cfun->eh->region_tree = new_eh;
335 }
336
337 new_eh->index = VEC_length (eh_region, cfun->eh->region_array);
338 VEC_safe_push (eh_region, gc, cfun->eh->region_array, new_eh);
339
340 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
341 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
342 new_eh->use_cxa_end_cleanup = true;
343
344 return new_eh;
345 }
346
347 eh_region
348 gen_eh_region_cleanup (eh_region outer)
349 {
350 return gen_eh_region (ERT_CLEANUP, outer);
351 }
352
353 eh_region
354 gen_eh_region_try (eh_region outer)
355 {
356 return gen_eh_region (ERT_TRY, outer);
357 }
358
359 eh_catch
360 gen_eh_region_catch (eh_region t, tree type_or_list)
361 {
362 eh_catch c, l;
363 tree type_list, type_node;
364
365 gcc_assert (t->type == ERT_TRY);
366
367 /* Ensure to always end up with a type list to normalize further
368 processing, then register each type against the runtime types map. */
369 type_list = type_or_list;
370 if (type_or_list)
371 {
372 if (TREE_CODE (type_or_list) != TREE_LIST)
373 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
374
375 type_node = type_list;
376 for (; type_node; type_node = TREE_CHAIN (type_node))
377 add_type_for_runtime (TREE_VALUE (type_node));
378 }
379
380 c = ggc_alloc_cleared_eh_catch_d ();
381 c->type_list = type_list;
382 l = t->u.eh_try.last_catch;
383 c->prev_catch = l;
384 if (l)
385 l->next_catch = c;
386 else
387 t->u.eh_try.first_catch = c;
388 t->u.eh_try.last_catch = c;
389
390 return c;
391 }
392
393 eh_region
394 gen_eh_region_allowed (eh_region outer, tree allowed)
395 {
396 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
397 region->u.allowed.type_list = allowed;
398
399 for (; allowed ; allowed = TREE_CHAIN (allowed))
400 add_type_for_runtime (TREE_VALUE (allowed));
401
402 return region;
403 }
404
405 eh_region
406 gen_eh_region_must_not_throw (eh_region outer)
407 {
408 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
409 }
410
411 eh_landing_pad
412 gen_eh_landing_pad (eh_region region)
413 {
414 eh_landing_pad lp = ggc_alloc_cleared_eh_landing_pad_d ();
415
416 lp->next_lp = region->landing_pads;
417 lp->region = region;
418 lp->index = VEC_length (eh_landing_pad, cfun->eh->lp_array);
419 region->landing_pads = lp;
420
421 VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, lp);
422
423 return lp;
424 }
425
426 eh_region
427 get_eh_region_from_number_fn (struct function *ifun, int i)
428 {
429 return VEC_index (eh_region, ifun->eh->region_array, i);
430 }
431
432 eh_region
433 get_eh_region_from_number (int i)
434 {
435 return get_eh_region_from_number_fn (cfun, i);
436 }
437
438 eh_landing_pad
439 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
440 {
441 return VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
442 }
443
444 eh_landing_pad
445 get_eh_landing_pad_from_number (int i)
446 {
447 return get_eh_landing_pad_from_number_fn (cfun, i);
448 }
449
450 eh_region
451 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
452 {
453 if (i < 0)
454 return VEC_index (eh_region, ifun->eh->region_array, -i);
455 else if (i == 0)
456 return NULL;
457 else
458 {
459 eh_landing_pad lp;
460 lp = VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
461 return lp->region;
462 }
463 }
464
465 eh_region
466 get_eh_region_from_lp_number (int i)
467 {
468 return get_eh_region_from_lp_number_fn (cfun, i);
469 }
470 \f
471 /* Returns true if the current function has exception handling regions. */
472
473 bool
474 current_function_has_exception_handlers (void)
475 {
476 return cfun->eh->region_tree != NULL;
477 }
478 \f
479 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
480 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
481
482 struct duplicate_eh_regions_data
483 {
484 duplicate_eh_regions_map label_map;
485 void *label_map_data;
486 struct pointer_map_t *eh_map;
487 };
488
489 static void
490 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
491 eh_region old_r, eh_region outer)
492 {
493 eh_landing_pad old_lp, new_lp;
494 eh_region new_r;
495 void **slot;
496
497 new_r = gen_eh_region (old_r->type, outer);
498 slot = pointer_map_insert (data->eh_map, (void *)old_r);
499 gcc_assert (*slot == NULL);
500 *slot = (void *)new_r;
501
502 switch (old_r->type)
503 {
504 case ERT_CLEANUP:
505 break;
506
507 case ERT_TRY:
508 {
509 eh_catch oc, nc;
510 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
511 {
512 /* We should be doing all our region duplication before and
513 during inlining, which is before filter lists are created. */
514 gcc_assert (oc->filter_list == NULL);
515 nc = gen_eh_region_catch (new_r, oc->type_list);
516 nc->label = data->label_map (oc->label, data->label_map_data);
517 }
518 }
519 break;
520
521 case ERT_ALLOWED_EXCEPTIONS:
522 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
523 if (old_r->u.allowed.label)
524 new_r->u.allowed.label
525 = data->label_map (old_r->u.allowed.label, data->label_map_data);
526 else
527 new_r->u.allowed.label = NULL_TREE;
528 break;
529
530 case ERT_MUST_NOT_THROW:
531 new_r->u.must_not_throw = old_r->u.must_not_throw;
532 break;
533 }
534
535 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
536 {
537 /* Don't bother copying unused landing pads. */
538 if (old_lp->post_landing_pad == NULL)
539 continue;
540
541 new_lp = gen_eh_landing_pad (new_r);
542 slot = pointer_map_insert (data->eh_map, (void *)old_lp);
543 gcc_assert (*slot == NULL);
544 *slot = (void *)new_lp;
545
546 new_lp->post_landing_pad
547 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
548 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
549 }
550
551 /* Make sure to preserve the original use of __cxa_end_cleanup. */
552 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
553
554 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
555 duplicate_eh_regions_1 (data, old_r, new_r);
556 }
557
558 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
559 the current function and root the tree below OUTER_REGION.
560 The special case of COPY_REGION of NULL means all regions.
561 Remap labels using MAP/MAP_DATA callback. Return a pointer map
562 that allows the caller to remap uses of both EH regions and
563 EH landing pads. */
564
565 struct pointer_map_t *
566 duplicate_eh_regions (struct function *ifun,
567 eh_region copy_region, int outer_lp,
568 duplicate_eh_regions_map map, void *map_data)
569 {
570 struct duplicate_eh_regions_data data;
571 eh_region outer_region;
572
573 #ifdef ENABLE_CHECKING
574 verify_eh_tree (ifun);
575 #endif
576
577 data.label_map = map;
578 data.label_map_data = map_data;
579 data.eh_map = pointer_map_create ();
580
581 outer_region = get_eh_region_from_lp_number (outer_lp);
582
583 /* Copy all the regions in the subtree. */
584 if (copy_region)
585 duplicate_eh_regions_1 (&data, copy_region, outer_region);
586 else
587 {
588 eh_region r;
589 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
590 duplicate_eh_regions_1 (&data, r, outer_region);
591 }
592
593 #ifdef ENABLE_CHECKING
594 verify_eh_tree (cfun);
595 #endif
596
597 return data.eh_map;
598 }
599
600 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
601
602 eh_region
603 eh_region_outermost (struct function *ifun, eh_region region_a,
604 eh_region region_b)
605 {
606 sbitmap b_outer;
607
608 gcc_assert (ifun->eh->region_array);
609 gcc_assert (ifun->eh->region_tree);
610
611 b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array));
612 sbitmap_zero (b_outer);
613
614 do
615 {
616 SET_BIT (b_outer, region_b->index);
617 region_b = region_b->outer;
618 }
619 while (region_b);
620
621 do
622 {
623 if (TEST_BIT (b_outer, region_a->index))
624 break;
625 region_a = region_a->outer;
626 }
627 while (region_a);
628
629 sbitmap_free (b_outer);
630 return region_a;
631 }
632 \f
633 static int
634 t2r_eq (const void *pentry, const void *pdata)
635 {
636 const_tree const entry = (const_tree) pentry;
637 const_tree const data = (const_tree) pdata;
638
639 return TREE_PURPOSE (entry) == data;
640 }
641
642 static hashval_t
643 t2r_hash (const void *pentry)
644 {
645 const_tree const entry = (const_tree) pentry;
646 return TREE_HASH (TREE_PURPOSE (entry));
647 }
648
649 void
650 add_type_for_runtime (tree type)
651 {
652 tree *slot;
653
654 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
655 if (TREE_CODE (type) == NOP_EXPR)
656 return;
657
658 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
659 TREE_HASH (type), INSERT);
660 if (*slot == NULL)
661 {
662 tree runtime = lang_hooks.eh_runtime_type (type);
663 *slot = tree_cons (type, runtime, NULL_TREE);
664 }
665 }
666
667 tree
668 lookup_type_for_runtime (tree type)
669 {
670 tree *slot;
671
672 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
673 if (TREE_CODE (type) == NOP_EXPR)
674 return type;
675
676 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
677 TREE_HASH (type), NO_INSERT);
678
679 /* We should have always inserted the data earlier. */
680 return TREE_VALUE (*slot);
681 }
682
683 \f
684 /* Represent an entry in @TTypes for either catch actions
685 or exception filter actions. */
686 struct ttypes_filter {
687 tree t;
688 int filter;
689 };
690
691 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
692 (a tree) for a @TTypes type node we are thinking about adding. */
693
694 static int
695 ttypes_filter_eq (const void *pentry, const void *pdata)
696 {
697 const struct ttypes_filter *const entry
698 = (const struct ttypes_filter *) pentry;
699 const_tree const data = (const_tree) pdata;
700
701 return entry->t == data;
702 }
703
704 static hashval_t
705 ttypes_filter_hash (const void *pentry)
706 {
707 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
708 return TREE_HASH (entry->t);
709 }
710
711 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
712 exception specification list we are thinking about adding. */
713 /* ??? Currently we use the type lists in the order given. Someone
714 should put these in some canonical order. */
715
716 static int
717 ehspec_filter_eq (const void *pentry, const void *pdata)
718 {
719 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
720 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
721
722 return type_list_equal (entry->t, data->t);
723 }
724
725 /* Hash function for exception specification lists. */
726
727 static hashval_t
728 ehspec_filter_hash (const void *pentry)
729 {
730 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
731 hashval_t h = 0;
732 tree list;
733
734 for (list = entry->t; list ; list = TREE_CHAIN (list))
735 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
736 return h;
737 }
738
739 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
740 to speed up the search. Return the filter value to be used. */
741
742 static int
743 add_ttypes_entry (htab_t ttypes_hash, tree type)
744 {
745 struct ttypes_filter **slot, *n;
746
747 slot = (struct ttypes_filter **)
748 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
749
750 if ((n = *slot) == NULL)
751 {
752 /* Filter value is a 1 based table index. */
753
754 n = XNEW (struct ttypes_filter);
755 n->t = type;
756 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
757 *slot = n;
758
759 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
760 }
761
762 return n->filter;
763 }
764
765 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
766 to speed up the search. Return the filter value to be used. */
767
768 static int
769 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
770 {
771 struct ttypes_filter **slot, *n;
772 struct ttypes_filter dummy;
773
774 dummy.t = list;
775 slot = (struct ttypes_filter **)
776 htab_find_slot (ehspec_hash, &dummy, INSERT);
777
778 if ((n = *slot) == NULL)
779 {
780 int len;
781
782 if (targetm.arm_eabi_unwinder)
783 len = VEC_length (tree, cfun->eh->ehspec_data.arm_eabi);
784 else
785 len = VEC_length (uchar, cfun->eh->ehspec_data.other);
786
787 /* Filter value is a -1 based byte index into a uleb128 buffer. */
788
789 n = XNEW (struct ttypes_filter);
790 n->t = list;
791 n->filter = -(len + 1);
792 *slot = n;
793
794 /* Generate a 0 terminated list of filter values. */
795 for (; list ; list = TREE_CHAIN (list))
796 {
797 if (targetm.arm_eabi_unwinder)
798 VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi,
799 TREE_VALUE (list));
800 else
801 {
802 /* Look up each type in the list and encode its filter
803 value as a uleb128. */
804 push_uleb128 (&cfun->eh->ehspec_data.other,
805 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
806 }
807 }
808 if (targetm.arm_eabi_unwinder)
809 VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
810 else
811 VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0);
812 }
813
814 return n->filter;
815 }
816
817 /* Generate the action filter values to be used for CATCH and
818 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
819 we use lots of landing pads, and so every type or list can share
820 the same filter value, which saves table space. */
821
822 void
823 assign_filter_values (void)
824 {
825 int i;
826 htab_t ttypes, ehspec;
827 eh_region r;
828 eh_catch c;
829
830 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
831 if (targetm.arm_eabi_unwinder)
832 cfun->eh->ehspec_data.arm_eabi = VEC_alloc (tree, gc, 64);
833 else
834 cfun->eh->ehspec_data.other = VEC_alloc (uchar, gc, 64);
835
836 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
837 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
838
839 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
840 {
841 if (r == NULL)
842 continue;
843
844 switch (r->type)
845 {
846 case ERT_TRY:
847 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
848 {
849 /* Whatever type_list is (NULL or true list), we build a list
850 of filters for the region. */
851 c->filter_list = NULL_TREE;
852
853 if (c->type_list != NULL)
854 {
855 /* Get a filter value for each of the types caught and store
856 them in the region's dedicated list. */
857 tree tp_node = c->type_list;
858
859 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
860 {
861 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
862 tree flt_node = build_int_cst (integer_type_node, flt);
863
864 c->filter_list
865 = tree_cons (NULL_TREE, flt_node, c->filter_list);
866 }
867 }
868 else
869 {
870 /* Get a filter value for the NULL list also since it
871 will need an action record anyway. */
872 int flt = add_ttypes_entry (ttypes, NULL);
873 tree flt_node = build_int_cst (integer_type_node, flt);
874
875 c->filter_list
876 = tree_cons (NULL_TREE, flt_node, NULL);
877 }
878 }
879 break;
880
881 case ERT_ALLOWED_EXCEPTIONS:
882 r->u.allowed.filter
883 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
884 break;
885
886 default:
887 break;
888 }
889 }
890
891 htab_delete (ttypes);
892 htab_delete (ehspec);
893 }
894
895 /* Emit SEQ into basic block just before INSN (that is assumed to be
896 first instruction of some existing BB and return the newly
897 produced block. */
898 static basic_block
899 emit_to_new_bb_before (rtx seq, rtx insn)
900 {
901 rtx last;
902 basic_block bb, prev_bb;
903 edge e;
904 edge_iterator ei;
905
906 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
907 call), we don't want it to go into newly created landing pad or other EH
908 construct. */
909 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
910 if (e->flags & EDGE_FALLTHRU)
911 force_nonfallthru (e);
912 else
913 ei_next (&ei);
914 last = emit_insn_before (seq, insn);
915 if (BARRIER_P (last))
916 last = PREV_INSN (last);
917 prev_bb = BLOCK_FOR_INSN (insn)->prev_bb;
918 bb = create_basic_block (seq, last, prev_bb);
919 update_bb_for_insn (bb);
920 bb->flags |= BB_SUPERBLOCK;
921 return bb;
922 }
923 \f
924 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
925 at the rtl level. Emit the code required by the target at a landing
926 pad for the given region. */
927
928 void
929 expand_dw2_landing_pad_for_region (eh_region region)
930 {
931 #ifdef HAVE_exception_receiver
932 if (HAVE_exception_receiver)
933 emit_insn (gen_exception_receiver ());
934 else
935 #endif
936 #ifdef HAVE_nonlocal_goto_receiver
937 if (HAVE_nonlocal_goto_receiver)
938 emit_insn (gen_nonlocal_goto_receiver ());
939 else
940 #endif
941 { /* Nothing */ }
942
943 if (region->exc_ptr_reg)
944 emit_move_insn (region->exc_ptr_reg,
945 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
946 if (region->filter_reg)
947 emit_move_insn (region->filter_reg,
948 gen_rtx_REG (targetm.eh_return_filter_mode (),
949 EH_RETURN_DATA_REGNO (1)));
950 }
951
952 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
953
954 static void
955 dw2_build_landing_pads (void)
956 {
957 int i;
958 eh_landing_pad lp;
959 int e_flags = EDGE_FALLTHRU;
960
961 /* If we're going to partition blocks, we need to be able to add
962 new landing pads later, which means that we need to hold on to
963 the post-landing-pad block. Prevent it from being merged away.
964 We'll remove this bit after partitioning. */
965 if (flag_reorder_blocks_and_partition)
966 e_flags |= EDGE_PRESERVE;
967
968 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
969 {
970 basic_block bb;
971 rtx seq;
972 edge e;
973
974 if (lp == NULL || lp->post_landing_pad == NULL)
975 continue;
976
977 start_sequence ();
978
979 lp->landing_pad = gen_label_rtx ();
980 emit_label (lp->landing_pad);
981 LABEL_PRESERVE_P (lp->landing_pad) = 1;
982
983 expand_dw2_landing_pad_for_region (lp->region);
984
985 seq = get_insns ();
986 end_sequence ();
987
988 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
989 e = make_edge (bb, bb->next_bb, e_flags);
990 e->count = bb->count;
991 e->probability = REG_BR_PROB_BASE;
992 if (current_loops)
993 {
994 struct loop *loop = bb->next_bb->loop_father;
995 /* If we created a pre-header block, add the new block to the
996 outer loop, otherwise to the loop itself. */
997 if (bb->next_bb == loop->header)
998 add_bb_to_loop (bb, loop_outer (loop));
999 else
1000 add_bb_to_loop (bb, loop);
1001 }
1002 }
1003 }
1004
1005 \f
1006 static VEC (int, heap) *sjlj_lp_call_site_index;
1007
1008 /* Process all active landing pads. Assign each one a compact dispatch
1009 index, and a call-site index. */
1010
1011 static int
1012 sjlj_assign_call_site_values (void)
1013 {
1014 htab_t ar_hash;
1015 int i, disp_index;
1016 eh_landing_pad lp;
1017
1018 crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
1019 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1020
1021 disp_index = 0;
1022 call_site_base = 1;
1023 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1024 if (lp && lp->post_landing_pad)
1025 {
1026 int action, call_site;
1027
1028 /* First: build the action table. */
1029 action = collect_one_action_chain (ar_hash, lp->region);
1030
1031 /* Next: assign call-site values. If dwarf2 terms, this would be
1032 the region number assigned by convert_to_eh_region_ranges, but
1033 handles no-action and must-not-throw differently. */
1034 /* Map must-not-throw to otherwise unused call-site index 0. */
1035 if (action == -2)
1036 call_site = 0;
1037 /* Map no-action to otherwise unused call-site index -1. */
1038 else if (action == -1)
1039 call_site = -1;
1040 /* Otherwise, look it up in the table. */
1041 else
1042 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1043 VEC_replace (int, sjlj_lp_call_site_index, i, call_site);
1044
1045 disp_index++;
1046 }
1047
1048 htab_delete (ar_hash);
1049
1050 return disp_index;
1051 }
1052
1053 /* Emit code to record the current call-site index before every
1054 insn that can throw. */
1055
1056 static void
1057 sjlj_mark_call_sites (void)
1058 {
1059 int last_call_site = -2;
1060 rtx insn, mem;
1061
1062 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1063 {
1064 eh_landing_pad lp;
1065 eh_region r;
1066 bool nothrow;
1067 int this_call_site;
1068 rtx before, p;
1069
1070 /* Reset value tracking at extended basic block boundaries. */
1071 if (LABEL_P (insn))
1072 last_call_site = -2;
1073
1074 if (! INSN_P (insn))
1075 continue;
1076
1077 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1078 if (nothrow)
1079 continue;
1080 if (lp)
1081 this_call_site = VEC_index (int, sjlj_lp_call_site_index, lp->index);
1082 else if (r == NULL)
1083 {
1084 /* Calls (and trapping insns) without notes are outside any
1085 exception handling region in this function. Mark them as
1086 no action. */
1087 this_call_site = -1;
1088 }
1089 else
1090 {
1091 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1092 this_call_site = 0;
1093 }
1094
1095 if (this_call_site != -1)
1096 crtl->uses_eh_lsda = 1;
1097
1098 if (this_call_site == last_call_site)
1099 continue;
1100
1101 /* Don't separate a call from it's argument loads. */
1102 before = insn;
1103 if (CALL_P (insn))
1104 before = find_first_parameter_load (insn, NULL_RTX);
1105
1106 start_sequence ();
1107 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1108 sjlj_fc_call_site_ofs);
1109 emit_move_insn (mem, GEN_INT (this_call_site));
1110 p = get_insns ();
1111 end_sequence ();
1112
1113 emit_insn_before (p, before);
1114 last_call_site = this_call_site;
1115 }
1116 }
1117
1118 /* Construct the SjLj_Function_Context. */
1119
1120 static void
1121 sjlj_emit_function_enter (rtx dispatch_label)
1122 {
1123 rtx fn_begin, fc, mem, seq;
1124 bool fn_begin_outside_block;
1125 rtx personality = get_personality_function (current_function_decl);
1126
1127 fc = crtl->eh.sjlj_fc;
1128
1129 start_sequence ();
1130
1131 /* We're storing this libcall's address into memory instead of
1132 calling it directly. Thus, we must call assemble_external_libcall
1133 here, as we can not depend on emit_library_call to do it for us. */
1134 assemble_external_libcall (personality);
1135 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1136 emit_move_insn (mem, personality);
1137
1138 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1139 if (crtl->uses_eh_lsda)
1140 {
1141 char buf[20];
1142 rtx sym;
1143
1144 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1145 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1146 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1147 emit_move_insn (mem, sym);
1148 }
1149 else
1150 emit_move_insn (mem, const0_rtx);
1151
1152 if (dispatch_label)
1153 {
1154 #ifdef DONT_USE_BUILTIN_SETJMP
1155 rtx x, last;
1156 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1157 TYPE_MODE (integer_type_node), 1,
1158 plus_constant (XEXP (fc, 0),
1159 sjlj_fc_jbuf_ofs), Pmode);
1160
1161 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1162 TYPE_MODE (integer_type_node), 0,
1163 dispatch_label);
1164 last = get_last_insn ();
1165 if (JUMP_P (last) && any_condjump_p (last))
1166 {
1167 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1168 add_reg_note (last, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE / 100));
1169 }
1170 #else
1171 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0),
1172 sjlj_fc_jbuf_ofs),
1173 dispatch_label);
1174 #endif
1175 }
1176
1177 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1178 1, XEXP (fc, 0), Pmode);
1179
1180 seq = get_insns ();
1181 end_sequence ();
1182
1183 /* ??? Instead of doing this at the beginning of the function,
1184 do this in a block that is at loop level 0 and dominates all
1185 can_throw_internal instructions. */
1186
1187 fn_begin_outside_block = true;
1188 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1189 if (NOTE_P (fn_begin))
1190 {
1191 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1192 break;
1193 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1194 fn_begin_outside_block = false;
1195 }
1196
1197 if (fn_begin_outside_block)
1198 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1199 else
1200 emit_insn_after (seq, fn_begin);
1201 }
1202
1203 /* Call back from expand_function_end to know where we should put
1204 the call to unwind_sjlj_unregister_libfunc if needed. */
1205
1206 void
1207 sjlj_emit_function_exit_after (rtx after)
1208 {
1209 crtl->eh.sjlj_exit_after = after;
1210 }
1211
1212 static void
1213 sjlj_emit_function_exit (void)
1214 {
1215 rtx seq, insn;
1216
1217 start_sequence ();
1218
1219 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1220 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1221
1222 seq = get_insns ();
1223 end_sequence ();
1224
1225 /* ??? Really this can be done in any block at loop level 0 that
1226 post-dominates all can_throw_internal instructions. This is
1227 the last possible moment. */
1228
1229 insn = crtl->eh.sjlj_exit_after;
1230 if (LABEL_P (insn))
1231 insn = NEXT_INSN (insn);
1232
1233 emit_insn_after (seq, insn);
1234 }
1235
1236 static void
1237 sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
1238 {
1239 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1240 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1241 eh_landing_pad lp;
1242 rtx mem, seq, fc, before, exc_ptr_reg, filter_reg;
1243 rtx first_reachable_label;
1244 basic_block bb;
1245 eh_region r;
1246 edge e;
1247 int i, disp_index;
1248 gimple switch_stmt;
1249
1250 fc = crtl->eh.sjlj_fc;
1251
1252 start_sequence ();
1253
1254 emit_label (dispatch_label);
1255
1256 #ifndef DONT_USE_BUILTIN_SETJMP
1257 expand_builtin_setjmp_receiver (dispatch_label);
1258
1259 /* The caller of expand_builtin_setjmp_receiver is responsible for
1260 making sure that the label doesn't vanish. The only other caller
1261 is the expander for __builtin_setjmp_receiver, which places this
1262 label on the nonlocal_goto_label list. Since we're modeling these
1263 CFG edges more exactly, we can use the forced_labels list instead. */
1264 LABEL_PRESERVE_P (dispatch_label) = 1;
1265 forced_labels
1266 = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
1267 #endif
1268
1269 /* Load up exc_ptr and filter values from the function context. */
1270 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1271 if (unwind_word_mode != ptr_mode)
1272 {
1273 #ifdef POINTERS_EXTEND_UNSIGNED
1274 mem = convert_memory_address (ptr_mode, mem);
1275 #else
1276 mem = convert_to_mode (ptr_mode, mem, 0);
1277 #endif
1278 }
1279 exc_ptr_reg = force_reg (ptr_mode, mem);
1280
1281 mem = adjust_address (fc, unwind_word_mode,
1282 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1283 if (unwind_word_mode != filter_mode)
1284 mem = convert_to_mode (filter_mode, mem, 0);
1285 filter_reg = force_reg (filter_mode, mem);
1286
1287 /* Jump to one of the directly reachable regions. */
1288
1289 disp_index = 0;
1290 first_reachable_label = NULL;
1291
1292 /* If there's exactly one call site in the function, don't bother
1293 generating a switch statement. */
1294 switch_stmt = NULL;
1295 if (num_dispatch > 1)
1296 {
1297 tree disp;
1298
1299 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1300 sjlj_fc_call_site_ofs);
1301 disp = make_tree (integer_type_node, mem);
1302
1303 switch_stmt = gimple_build_switch_nlabels (num_dispatch, disp, NULL);
1304 }
1305
1306 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1307 if (lp && lp->post_landing_pad)
1308 {
1309 rtx seq2, label;
1310
1311 start_sequence ();
1312
1313 lp->landing_pad = dispatch_label;
1314
1315 if (num_dispatch > 1)
1316 {
1317 tree t_label, case_elt, t;
1318
1319 t_label = create_artificial_label (UNKNOWN_LOCATION);
1320 t = build_int_cst (integer_type_node, disp_index);
1321 case_elt = build_case_label (t, NULL, t_label);
1322 gimple_switch_set_label (switch_stmt, disp_index, case_elt);
1323
1324 label = label_rtx (t_label);
1325 }
1326 else
1327 label = gen_label_rtx ();
1328
1329 if (disp_index == 0)
1330 first_reachable_label = label;
1331 emit_label (label);
1332
1333 r = lp->region;
1334 if (r->exc_ptr_reg)
1335 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1336 if (r->filter_reg)
1337 emit_move_insn (r->filter_reg, filter_reg);
1338
1339 seq2 = get_insns ();
1340 end_sequence ();
1341
1342 before = label_rtx (lp->post_landing_pad);
1343 bb = emit_to_new_bb_before (seq2, before);
1344 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1345 e->count = bb->count;
1346 e->probability = REG_BR_PROB_BASE;
1347
1348 disp_index++;
1349 }
1350 gcc_assert (disp_index == num_dispatch);
1351
1352 if (num_dispatch > 1)
1353 {
1354 expand_case (switch_stmt);
1355 expand_builtin_trap ();
1356 }
1357
1358 seq = get_insns ();
1359 end_sequence ();
1360
1361 bb = emit_to_new_bb_before (seq, first_reachable_label);
1362 if (num_dispatch == 1)
1363 {
1364 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1365 e->count = bb->count;
1366 e->probability = REG_BR_PROB_BASE;
1367 }
1368 }
1369
1370 static void
1371 sjlj_build_landing_pads (void)
1372 {
1373 int num_dispatch;
1374
1375 num_dispatch = VEC_length (eh_landing_pad, cfun->eh->lp_array);
1376 if (num_dispatch == 0)
1377 return;
1378 VEC_safe_grow (int, heap, sjlj_lp_call_site_index, num_dispatch);
1379
1380 num_dispatch = sjlj_assign_call_site_values ();
1381 if (num_dispatch > 0)
1382 {
1383 rtx dispatch_label = gen_label_rtx ();
1384 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1385 TYPE_MODE (sjlj_fc_type_node),
1386 TYPE_ALIGN (sjlj_fc_type_node));
1387 crtl->eh.sjlj_fc
1388 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1389 int_size_in_bytes (sjlj_fc_type_node),
1390 align);
1391
1392 sjlj_mark_call_sites ();
1393 sjlj_emit_function_enter (dispatch_label);
1394 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1395 sjlj_emit_function_exit ();
1396 }
1397
1398 /* If we do not have any landing pads, we may still need to register a
1399 personality routine and (empty) LSDA to handle must-not-throw regions. */
1400 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1401 {
1402 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1403 TYPE_MODE (sjlj_fc_type_node),
1404 TYPE_ALIGN (sjlj_fc_type_node));
1405 crtl->eh.sjlj_fc
1406 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1407 int_size_in_bytes (sjlj_fc_type_node),
1408 align);
1409
1410 sjlj_mark_call_sites ();
1411 sjlj_emit_function_enter (NULL_RTX);
1412 sjlj_emit_function_exit ();
1413 }
1414
1415 VEC_free (int, heap, sjlj_lp_call_site_index);
1416 }
1417
1418 /* After initial rtl generation, call back to finish generating
1419 exception support code. */
1420
1421 void
1422 finish_eh_generation (void)
1423 {
1424 basic_block bb;
1425
1426 /* Construct the landing pads. */
1427 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1428 sjlj_build_landing_pads ();
1429 else
1430 dw2_build_landing_pads ();
1431 break_superblocks ();
1432
1433 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1434 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1435 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
1436 commit_edge_insertions ();
1437
1438 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1439 FOR_EACH_BB (bb)
1440 {
1441 eh_landing_pad lp;
1442 edge_iterator ei;
1443 edge e;
1444
1445 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1446
1447 FOR_EACH_EDGE (e, ei, bb->succs)
1448 if (e->flags & EDGE_EH)
1449 break;
1450
1451 /* We should not have generated any new throwing insns during this
1452 pass, and we should not have lost any EH edges, so we only need
1453 to handle two cases here:
1454 (1) reachable handler and an existing edge to post-landing-pad,
1455 (2) no reachable handler and no edge. */
1456 gcc_assert ((lp != NULL) == (e != NULL));
1457 if (lp != NULL)
1458 {
1459 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1460
1461 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1462 e->flags |= (CALL_P (BB_END (bb))
1463 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1464 : EDGE_ABNORMAL);
1465 }
1466 }
1467 }
1468 \f
1469 /* This section handles removing dead code for flow. */
1470
1471 void
1472 remove_eh_landing_pad (eh_landing_pad lp)
1473 {
1474 eh_landing_pad *pp;
1475
1476 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1477 continue;
1478 *pp = lp->next_lp;
1479
1480 if (lp->post_landing_pad)
1481 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1482 VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
1483 }
1484
1485 /* Splice REGION from the region tree. */
1486
1487 void
1488 remove_eh_handler (eh_region region)
1489 {
1490 eh_region *pp, *pp_start, p, outer;
1491 eh_landing_pad lp;
1492
1493 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1494 {
1495 if (lp->post_landing_pad)
1496 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1497 VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
1498 }
1499
1500 outer = region->outer;
1501 if (outer)
1502 pp_start = &outer->inner;
1503 else
1504 pp_start = &cfun->eh->region_tree;
1505 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1506 continue;
1507 if (region->inner)
1508 {
1509 *pp = p = region->inner;
1510 do
1511 {
1512 p->outer = outer;
1513 pp = &p->next_peer;
1514 p = *pp;
1515 }
1516 while (p);
1517 }
1518 *pp = region->next_peer;
1519
1520 VEC_replace (eh_region, cfun->eh->region_array, region->index, NULL);
1521 }
1522
1523 /* Invokes CALLBACK for every exception handler landing pad label.
1524 Only used by reload hackery; should not be used by new code. */
1525
1526 void
1527 for_each_eh_label (void (*callback) (rtx))
1528 {
1529 eh_landing_pad lp;
1530 int i;
1531
1532 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1533 {
1534 if (lp)
1535 {
1536 rtx lab = lp->landing_pad;
1537 if (lab && LABEL_P (lab))
1538 (*callback) (lab);
1539 }
1540 }
1541 }
1542 \f
1543 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1544 call insn.
1545
1546 At the gimple level, we use LP_NR
1547 > 0 : The statement transfers to landing pad LP_NR
1548 = 0 : The statement is outside any EH region
1549 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1550
1551 At the rtl level, we use LP_NR
1552 > 0 : The insn transfers to landing pad LP_NR
1553 = 0 : The insn cannot throw
1554 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1555 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1556 missing note: The insn is outside any EH region.
1557
1558 ??? This difference probably ought to be avoided. We could stand
1559 to record nothrow for arbitrary gimple statements, and so avoid
1560 some moderately complex lookups in stmt_could_throw_p. Perhaps
1561 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1562 no-nonlocal-goto property should be recorded elsewhere as a bit
1563 on the call_insn directly. Perhaps we should make more use of
1564 attaching the trees to call_insns (reachable via symbol_ref in
1565 direct call cases) and just pull the data out of the trees. */
1566
1567 void
1568 make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
1569 {
1570 rtx value;
1571 if (ecf_flags & ECF_NOTHROW)
1572 value = const0_rtx;
1573 else if (lp_nr != 0)
1574 value = GEN_INT (lp_nr);
1575 else
1576 return;
1577 add_reg_note (insn, REG_EH_REGION, value);
1578 }
1579
1580 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1581 nor perform a non-local goto. Replace the region note if it
1582 already exists. */
1583
1584 void
1585 make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
1586 {
1587 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1588 rtx intmin = GEN_INT (INT_MIN);
1589
1590 if (note != 0)
1591 XEXP (note, 0) = intmin;
1592 else
1593 add_reg_note (insn, REG_EH_REGION, intmin);
1594 }
1595
1596 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1597 to the contrary. */
1598
1599 bool
1600 insn_could_throw_p (const_rtx insn)
1601 {
1602 if (!flag_exceptions)
1603 return false;
1604 if (CALL_P (insn))
1605 return true;
1606 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1607 return may_trap_p (PATTERN (insn));
1608 return false;
1609 }
1610
1611 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1612 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1613 to look for a note, or the note itself. */
1614
1615 void
1616 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
1617 {
1618 rtx insn, note = note_or_insn;
1619
1620 if (INSN_P (note_or_insn))
1621 {
1622 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1623 if (note == NULL)
1624 return;
1625 }
1626 note = XEXP (note, 0);
1627
1628 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1629 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1630 && insn_could_throw_p (insn))
1631 add_reg_note (insn, REG_EH_REGION, note);
1632 }
1633
1634 /* Likewise, but iterate backward. */
1635
1636 void
1637 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
1638 {
1639 rtx insn, note = note_or_insn;
1640
1641 if (INSN_P (note_or_insn))
1642 {
1643 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1644 if (note == NULL)
1645 return;
1646 }
1647 note = XEXP (note, 0);
1648
1649 for (insn = last; insn != first; insn = PREV_INSN (insn))
1650 if (insn_could_throw_p (insn))
1651 add_reg_note (insn, REG_EH_REGION, note);
1652 }
1653
1654
1655 /* Extract all EH information from INSN. Return true if the insn
1656 was marked NOTHROW. */
1657
1658 static bool
1659 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1660 eh_landing_pad *plp)
1661 {
1662 eh_landing_pad lp = NULL;
1663 eh_region r = NULL;
1664 bool ret = false;
1665 rtx note;
1666 int lp_nr;
1667
1668 if (! INSN_P (insn))
1669 goto egress;
1670
1671 if (NONJUMP_INSN_P (insn)
1672 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1673 insn = XVECEXP (PATTERN (insn), 0, 0);
1674
1675 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1676 if (!note)
1677 {
1678 ret = !insn_could_throw_p (insn);
1679 goto egress;
1680 }
1681
1682 lp_nr = INTVAL (XEXP (note, 0));
1683 if (lp_nr == 0 || lp_nr == INT_MIN)
1684 {
1685 ret = true;
1686 goto egress;
1687 }
1688
1689 if (lp_nr < 0)
1690 r = VEC_index (eh_region, cfun->eh->region_array, -lp_nr);
1691 else
1692 {
1693 lp = VEC_index (eh_landing_pad, cfun->eh->lp_array, lp_nr);
1694 r = lp->region;
1695 }
1696
1697 egress:
1698 *plp = lp;
1699 *pr = r;
1700 return ret;
1701 }
1702
1703 /* Return the landing pad to which INSN may go, or NULL if it does not
1704 have a reachable landing pad within this function. */
1705
1706 eh_landing_pad
1707 get_eh_landing_pad_from_rtx (const_rtx insn)
1708 {
1709 eh_landing_pad lp;
1710 eh_region r;
1711
1712 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1713 return lp;
1714 }
1715
1716 /* Return the region to which INSN may go, or NULL if it does not
1717 have a reachable region within this function. */
1718
1719 eh_region
1720 get_eh_region_from_rtx (const_rtx insn)
1721 {
1722 eh_landing_pad lp;
1723 eh_region r;
1724
1725 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1726 return r;
1727 }
1728
1729 /* Return true if INSN throws and is caught by something in this function. */
1730
1731 bool
1732 can_throw_internal (const_rtx insn)
1733 {
1734 return get_eh_landing_pad_from_rtx (insn) != NULL;
1735 }
1736
1737 /* Return true if INSN throws and escapes from the current function. */
1738
1739 bool
1740 can_throw_external (const_rtx insn)
1741 {
1742 eh_landing_pad lp;
1743 eh_region r;
1744 bool nothrow;
1745
1746 if (! INSN_P (insn))
1747 return false;
1748
1749 if (NONJUMP_INSN_P (insn)
1750 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1751 {
1752 rtx seq = PATTERN (insn);
1753 int i, n = XVECLEN (seq, 0);
1754
1755 for (i = 0; i < n; i++)
1756 if (can_throw_external (XVECEXP (seq, 0, i)))
1757 return true;
1758
1759 return false;
1760 }
1761
1762 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1763
1764 /* If we can't throw, we obviously can't throw external. */
1765 if (nothrow)
1766 return false;
1767
1768 /* If we have an internal landing pad, then we're not external. */
1769 if (lp != NULL)
1770 return false;
1771
1772 /* If we're not within an EH region, then we are external. */
1773 if (r == NULL)
1774 return true;
1775
1776 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1777 which don't always have landing pads. */
1778 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1779 return false;
1780 }
1781
1782 /* Return true if INSN cannot throw at all. */
1783
1784 bool
1785 insn_nothrow_p (const_rtx insn)
1786 {
1787 eh_landing_pad lp;
1788 eh_region r;
1789
1790 if (! INSN_P (insn))
1791 return true;
1792
1793 if (NONJUMP_INSN_P (insn)
1794 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1795 {
1796 rtx seq = PATTERN (insn);
1797 int i, n = XVECLEN (seq, 0);
1798
1799 for (i = 0; i < n; i++)
1800 if (!insn_nothrow_p (XVECEXP (seq, 0, i)))
1801 return false;
1802
1803 return true;
1804 }
1805
1806 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1807 }
1808
1809 /* Return true if INSN can perform a non-local goto. */
1810 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1811
1812 bool
1813 can_nonlocal_goto (const_rtx insn)
1814 {
1815 if (nonlocal_goto_handler_labels && CALL_P (insn))
1816 {
1817 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1818 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1819 return true;
1820 }
1821 return false;
1822 }
1823 \f
1824 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1825
1826 static unsigned int
1827 set_nothrow_function_flags (void)
1828 {
1829 rtx insn;
1830
1831 crtl->nothrow = 1;
1832
1833 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1834 something that can throw an exception. We specifically exempt
1835 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1836 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1837 is optimistic. */
1838
1839 crtl->all_throwers_are_sibcalls = 1;
1840
1841 /* If we don't know that this implementation of the function will
1842 actually be used, then we must not set TREE_NOTHROW, since
1843 callers must not assume that this function does not throw. */
1844 if (TREE_NOTHROW (current_function_decl))
1845 return 0;
1846
1847 if (! flag_exceptions)
1848 return 0;
1849
1850 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1851 if (can_throw_external (insn))
1852 {
1853 crtl->nothrow = 0;
1854
1855 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1856 {
1857 crtl->all_throwers_are_sibcalls = 0;
1858 return 0;
1859 }
1860 }
1861
1862 for (insn = crtl->epilogue_delay_list; insn;
1863 insn = XEXP (insn, 1))
1864 if (can_throw_external (insn))
1865 {
1866 crtl->nothrow = 0;
1867
1868 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1869 {
1870 crtl->all_throwers_are_sibcalls = 0;
1871 return 0;
1872 }
1873 }
1874 if (crtl->nothrow
1875 && (cgraph_function_body_availability (cgraph_get_node
1876 (current_function_decl))
1877 >= AVAIL_AVAILABLE))
1878 {
1879 struct cgraph_node *node = cgraph_get_node (current_function_decl);
1880 struct cgraph_edge *e;
1881 for (e = node->callers; e; e = e->next_caller)
1882 e->can_throw_external = false;
1883 cgraph_set_nothrow_flag (node, true);
1884
1885 if (dump_file)
1886 fprintf (dump_file, "Marking function nothrow: %s\n\n",
1887 current_function_name ());
1888 }
1889 return 0;
1890 }
1891
1892 struct rtl_opt_pass pass_set_nothrow_function_flags =
1893 {
1894 {
1895 RTL_PASS,
1896 "nothrow", /* name */
1897 NULL, /* gate */
1898 set_nothrow_function_flags, /* execute */
1899 NULL, /* sub */
1900 NULL, /* next */
1901 0, /* static_pass_number */
1902 TV_NONE, /* tv_id */
1903 0, /* properties_required */
1904 0, /* properties_provided */
1905 0, /* properties_destroyed */
1906 0, /* todo_flags_start */
1907 0 /* todo_flags_finish */
1908 }
1909 };
1910
1911 \f
1912 /* Various hooks for unwind library. */
1913
1914 /* Expand the EH support builtin functions:
1915 __builtin_eh_pointer and __builtin_eh_filter. */
1916
1917 static eh_region
1918 expand_builtin_eh_common (tree region_nr_t)
1919 {
1920 HOST_WIDE_INT region_nr;
1921 eh_region region;
1922
1923 gcc_assert (host_integerp (region_nr_t, 0));
1924 region_nr = tree_low_cst (region_nr_t, 0);
1925
1926 region = VEC_index (eh_region, cfun->eh->region_array, region_nr);
1927
1928 /* ??? We shouldn't have been able to delete a eh region without
1929 deleting all the code that depended on it. */
1930 gcc_assert (region != NULL);
1931
1932 return region;
1933 }
1934
1935 /* Expand to the exc_ptr value from the given eh region. */
1936
1937 rtx
1938 expand_builtin_eh_pointer (tree exp)
1939 {
1940 eh_region region
1941 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1942 if (region->exc_ptr_reg == NULL)
1943 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1944 return region->exc_ptr_reg;
1945 }
1946
1947 /* Expand to the filter value from the given eh region. */
1948
1949 rtx
1950 expand_builtin_eh_filter (tree exp)
1951 {
1952 eh_region region
1953 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1954 if (region->filter_reg == NULL)
1955 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
1956 return region->filter_reg;
1957 }
1958
1959 /* Copy the exc_ptr and filter values from one landing pad's registers
1960 to another. This is used to inline the resx statement. */
1961
1962 rtx
1963 expand_builtin_eh_copy_values (tree exp)
1964 {
1965 eh_region dst
1966 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1967 eh_region src
1968 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
1969 enum machine_mode fmode = targetm.eh_return_filter_mode ();
1970
1971 if (dst->exc_ptr_reg == NULL)
1972 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1973 if (src->exc_ptr_reg == NULL)
1974 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1975
1976 if (dst->filter_reg == NULL)
1977 dst->filter_reg = gen_reg_rtx (fmode);
1978 if (src->filter_reg == NULL)
1979 src->filter_reg = gen_reg_rtx (fmode);
1980
1981 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
1982 emit_move_insn (dst->filter_reg, src->filter_reg);
1983
1984 return const0_rtx;
1985 }
1986
1987 /* Do any necessary initialization to access arbitrary stack frames.
1988 On the SPARC, this means flushing the register windows. */
1989
1990 void
1991 expand_builtin_unwind_init (void)
1992 {
1993 /* Set this so all the registers get saved in our frame; we need to be
1994 able to copy the saved values for any registers from frames we unwind. */
1995 crtl->saves_all_registers = 1;
1996
1997 #ifdef SETUP_FRAME_ADDRESSES
1998 SETUP_FRAME_ADDRESSES ();
1999 #endif
2000 }
2001
2002 /* Map a non-negative number to an eh return data register number; expands
2003 to -1 if no return data register is associated with the input number.
2004 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2005
2006 rtx
2007 expand_builtin_eh_return_data_regno (tree exp)
2008 {
2009 tree which = CALL_EXPR_ARG (exp, 0);
2010 unsigned HOST_WIDE_INT iwhich;
2011
2012 if (TREE_CODE (which) != INTEGER_CST)
2013 {
2014 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2015 return constm1_rtx;
2016 }
2017
2018 iwhich = tree_low_cst (which, 1);
2019 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2020 if (iwhich == INVALID_REGNUM)
2021 return constm1_rtx;
2022
2023 #ifdef DWARF_FRAME_REGNUM
2024 iwhich = DWARF_FRAME_REGNUM (iwhich);
2025 #else
2026 iwhich = DBX_REGISTER_NUMBER (iwhich);
2027 #endif
2028
2029 return GEN_INT (iwhich);
2030 }
2031
2032 /* Given a value extracted from the return address register or stack slot,
2033 return the actual address encoded in that value. */
2034
2035 rtx
2036 expand_builtin_extract_return_addr (tree addr_tree)
2037 {
2038 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2039
2040 if (GET_MODE (addr) != Pmode
2041 && GET_MODE (addr) != VOIDmode)
2042 {
2043 #ifdef POINTERS_EXTEND_UNSIGNED
2044 addr = convert_memory_address (Pmode, addr);
2045 #else
2046 addr = convert_to_mode (Pmode, addr, 0);
2047 #endif
2048 }
2049
2050 /* First mask out any unwanted bits. */
2051 #ifdef MASK_RETURN_ADDR
2052 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2053 #endif
2054
2055 /* Then adjust to find the real return address. */
2056 #if defined (RETURN_ADDR_OFFSET)
2057 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2058 #endif
2059
2060 return addr;
2061 }
2062
2063 /* Given an actual address in addr_tree, do any necessary encoding
2064 and return the value to be stored in the return address register or
2065 stack slot so the epilogue will return to that address. */
2066
2067 rtx
2068 expand_builtin_frob_return_addr (tree addr_tree)
2069 {
2070 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2071
2072 addr = convert_memory_address (Pmode, addr);
2073
2074 #ifdef RETURN_ADDR_OFFSET
2075 addr = force_reg (Pmode, addr);
2076 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2077 #endif
2078
2079 return addr;
2080 }
2081
2082 /* Set up the epilogue with the magic bits we'll need to return to the
2083 exception handler. */
2084
2085 void
2086 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2087 tree handler_tree)
2088 {
2089 rtx tmp;
2090
2091 #ifdef EH_RETURN_STACKADJ_RTX
2092 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2093 VOIDmode, EXPAND_NORMAL);
2094 tmp = convert_memory_address (Pmode, tmp);
2095 if (!crtl->eh.ehr_stackadj)
2096 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2097 else if (tmp != crtl->eh.ehr_stackadj)
2098 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2099 #endif
2100
2101 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2102 VOIDmode, EXPAND_NORMAL);
2103 tmp = convert_memory_address (Pmode, tmp);
2104 if (!crtl->eh.ehr_handler)
2105 crtl->eh.ehr_handler = copy_to_reg (tmp);
2106 else if (tmp != crtl->eh.ehr_handler)
2107 emit_move_insn (crtl->eh.ehr_handler, tmp);
2108
2109 if (!crtl->eh.ehr_label)
2110 crtl->eh.ehr_label = gen_label_rtx ();
2111 emit_jump (crtl->eh.ehr_label);
2112 }
2113
2114 /* Expand __builtin_eh_return. This exit path from the function loads up
2115 the eh return data registers, adjusts the stack, and branches to a
2116 given PC other than the normal return address. */
2117
2118 void
2119 expand_eh_return (void)
2120 {
2121 rtx around_label;
2122
2123 if (! crtl->eh.ehr_label)
2124 return;
2125
2126 crtl->calls_eh_return = 1;
2127
2128 #ifdef EH_RETURN_STACKADJ_RTX
2129 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2130 #endif
2131
2132 around_label = gen_label_rtx ();
2133 emit_jump (around_label);
2134
2135 emit_label (crtl->eh.ehr_label);
2136 clobber_return_register ();
2137
2138 #ifdef EH_RETURN_STACKADJ_RTX
2139 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2140 #endif
2141
2142 #ifdef HAVE_eh_return
2143 if (HAVE_eh_return)
2144 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2145 else
2146 #endif
2147 {
2148 #ifdef EH_RETURN_HANDLER_RTX
2149 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2150 #else
2151 error ("__builtin_eh_return not supported on this target");
2152 #endif
2153 }
2154
2155 emit_label (around_label);
2156 }
2157
2158 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2159 POINTERS_EXTEND_UNSIGNED and return it. */
2160
2161 rtx
2162 expand_builtin_extend_pointer (tree addr_tree)
2163 {
2164 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2165 int extend;
2166
2167 #ifdef POINTERS_EXTEND_UNSIGNED
2168 extend = POINTERS_EXTEND_UNSIGNED;
2169 #else
2170 /* The previous EH code did an unsigned extend by default, so we do this also
2171 for consistency. */
2172 extend = 1;
2173 #endif
2174
2175 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2176 }
2177 \f
2178 /* In the following functions, we represent entries in the action table
2179 as 1-based indices. Special cases are:
2180
2181 0: null action record, non-null landing pad; implies cleanups
2182 -1: null action record, null landing pad; implies no action
2183 -2: no call-site entry; implies must_not_throw
2184 -3: we have yet to process outer regions
2185
2186 Further, no special cases apply to the "next" field of the record.
2187 For next, 0 means end of list. */
2188
2189 struct action_record
2190 {
2191 int offset;
2192 int filter;
2193 int next;
2194 };
2195
2196 static int
2197 action_record_eq (const void *pentry, const void *pdata)
2198 {
2199 const struct action_record *entry = (const struct action_record *) pentry;
2200 const struct action_record *data = (const struct action_record *) pdata;
2201 return entry->filter == data->filter && entry->next == data->next;
2202 }
2203
2204 static hashval_t
2205 action_record_hash (const void *pentry)
2206 {
2207 const struct action_record *entry = (const struct action_record *) pentry;
2208 return entry->next * 1009 + entry->filter;
2209 }
2210
2211 static int
2212 add_action_record (htab_t ar_hash, int filter, int next)
2213 {
2214 struct action_record **slot, *new_ar, tmp;
2215
2216 tmp.filter = filter;
2217 tmp.next = next;
2218 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2219
2220 if ((new_ar = *slot) == NULL)
2221 {
2222 new_ar = XNEW (struct action_record);
2223 new_ar->offset = VEC_length (uchar, crtl->eh.action_record_data) + 1;
2224 new_ar->filter = filter;
2225 new_ar->next = next;
2226 *slot = new_ar;
2227
2228 /* The filter value goes in untouched. The link to the next
2229 record is a "self-relative" byte offset, or zero to indicate
2230 that there is no next record. So convert the absolute 1 based
2231 indices we've been carrying around into a displacement. */
2232
2233 push_sleb128 (&crtl->eh.action_record_data, filter);
2234 if (next)
2235 next -= VEC_length (uchar, crtl->eh.action_record_data) + 1;
2236 push_sleb128 (&crtl->eh.action_record_data, next);
2237 }
2238
2239 return new_ar->offset;
2240 }
2241
2242 static int
2243 collect_one_action_chain (htab_t ar_hash, eh_region region)
2244 {
2245 int next;
2246
2247 /* If we've reached the top of the region chain, then we have
2248 no actions, and require no landing pad. */
2249 if (region == NULL)
2250 return -1;
2251
2252 switch (region->type)
2253 {
2254 case ERT_CLEANUP:
2255 {
2256 eh_region r;
2257 /* A cleanup adds a zero filter to the beginning of the chain, but
2258 there are special cases to look out for. If there are *only*
2259 cleanups along a path, then it compresses to a zero action.
2260 Further, if there are multiple cleanups along a path, we only
2261 need to represent one of them, as that is enough to trigger
2262 entry to the landing pad at runtime. */
2263 next = collect_one_action_chain (ar_hash, region->outer);
2264 if (next <= 0)
2265 return 0;
2266 for (r = region->outer; r ; r = r->outer)
2267 if (r->type == ERT_CLEANUP)
2268 return next;
2269 return add_action_record (ar_hash, 0, next);
2270 }
2271
2272 case ERT_TRY:
2273 {
2274 eh_catch c;
2275
2276 /* Process the associated catch regions in reverse order.
2277 If there's a catch-all handler, then we don't need to
2278 search outer regions. Use a magic -3 value to record
2279 that we haven't done the outer search. */
2280 next = -3;
2281 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2282 {
2283 if (c->type_list == NULL)
2284 {
2285 /* Retrieve the filter from the head of the filter list
2286 where we have stored it (see assign_filter_values). */
2287 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2288 next = add_action_record (ar_hash, filter, 0);
2289 }
2290 else
2291 {
2292 /* Once the outer search is done, trigger an action record for
2293 each filter we have. */
2294 tree flt_node;
2295
2296 if (next == -3)
2297 {
2298 next = collect_one_action_chain (ar_hash, region->outer);
2299
2300 /* If there is no next action, terminate the chain. */
2301 if (next == -1)
2302 next = 0;
2303 /* If all outer actions are cleanups or must_not_throw,
2304 we'll have no action record for it, since we had wanted
2305 to encode these states in the call-site record directly.
2306 Add a cleanup action to the chain to catch these. */
2307 else if (next <= 0)
2308 next = add_action_record (ar_hash, 0, 0);
2309 }
2310
2311 flt_node = c->filter_list;
2312 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2313 {
2314 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2315 next = add_action_record (ar_hash, filter, next);
2316 }
2317 }
2318 }
2319 return next;
2320 }
2321
2322 case ERT_ALLOWED_EXCEPTIONS:
2323 /* An exception specification adds its filter to the
2324 beginning of the chain. */
2325 next = collect_one_action_chain (ar_hash, region->outer);
2326
2327 /* If there is no next action, terminate the chain. */
2328 if (next == -1)
2329 next = 0;
2330 /* If all outer actions are cleanups or must_not_throw,
2331 we'll have no action record for it, since we had wanted
2332 to encode these states in the call-site record directly.
2333 Add a cleanup action to the chain to catch these. */
2334 else if (next <= 0)
2335 next = add_action_record (ar_hash, 0, 0);
2336
2337 return add_action_record (ar_hash, region->u.allowed.filter, next);
2338
2339 case ERT_MUST_NOT_THROW:
2340 /* A must-not-throw region with no inner handlers or cleanups
2341 requires no call-site entry. Note that this differs from
2342 the no handler or cleanup case in that we do require an lsda
2343 to be generated. Return a magic -2 value to record this. */
2344 return -2;
2345 }
2346
2347 gcc_unreachable ();
2348 }
2349
2350 static int
2351 add_call_site (rtx landing_pad, int action, int section)
2352 {
2353 call_site_record record;
2354
2355 record = ggc_alloc_call_site_record_d ();
2356 record->landing_pad = landing_pad;
2357 record->action = action;
2358
2359 VEC_safe_push (call_site_record, gc,
2360 crtl->eh.call_site_record[section], record);
2361
2362 return call_site_base + VEC_length (call_site_record,
2363 crtl->eh.call_site_record[section]) - 1;
2364 }
2365
2366 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2367 The new note numbers will not refer to region numbers, but
2368 instead to call site entries. */
2369
2370 static unsigned int
2371 convert_to_eh_region_ranges (void)
2372 {
2373 rtx insn, iter, note;
2374 htab_t ar_hash;
2375 int last_action = -3;
2376 rtx last_action_insn = NULL_RTX;
2377 rtx last_landing_pad = NULL_RTX;
2378 rtx first_no_action_insn = NULL_RTX;
2379 int call_site = 0;
2380 int cur_sec = 0;
2381 rtx section_switch_note = NULL_RTX;
2382 rtx first_no_action_insn_before_switch = NULL_RTX;
2383 rtx last_no_action_insn_before_switch = NULL_RTX;
2384 int saved_call_site_base = call_site_base;
2385
2386 crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
2387
2388 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2389
2390 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2391 if (INSN_P (iter))
2392 {
2393 eh_landing_pad lp;
2394 eh_region region;
2395 bool nothrow;
2396 int this_action;
2397 rtx this_landing_pad;
2398
2399 insn = iter;
2400 if (NONJUMP_INSN_P (insn)
2401 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2402 insn = XVECEXP (PATTERN (insn), 0, 0);
2403
2404 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2405 if (nothrow)
2406 continue;
2407 if (region)
2408 this_action = collect_one_action_chain (ar_hash, region);
2409 else
2410 this_action = -1;
2411
2412 /* Existence of catch handlers, or must-not-throw regions
2413 implies that an lsda is needed (even if empty). */
2414 if (this_action != -1)
2415 crtl->uses_eh_lsda = 1;
2416
2417 /* Delay creation of region notes for no-action regions
2418 until we're sure that an lsda will be required. */
2419 else if (last_action == -3)
2420 {
2421 first_no_action_insn = iter;
2422 last_action = -1;
2423 }
2424
2425 if (this_action >= 0)
2426 this_landing_pad = lp->landing_pad;
2427 else
2428 this_landing_pad = NULL_RTX;
2429
2430 /* Differing actions or landing pads implies a change in call-site
2431 info, which implies some EH_REGION note should be emitted. */
2432 if (last_action != this_action
2433 || last_landing_pad != this_landing_pad)
2434 {
2435 /* If there is a queued no-action region in the other section
2436 with hot/cold partitioning, emit it now. */
2437 if (first_no_action_insn_before_switch)
2438 {
2439 gcc_assert (this_action != -1
2440 && last_action == (first_no_action_insn
2441 ? -1 : -3));
2442 call_site = add_call_site (NULL_RTX, 0, 0);
2443 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2444 first_no_action_insn_before_switch);
2445 NOTE_EH_HANDLER (note) = call_site;
2446 note = emit_note_after (NOTE_INSN_EH_REGION_END,
2447 last_no_action_insn_before_switch);
2448 NOTE_EH_HANDLER (note) = call_site;
2449 gcc_assert (last_action != -3
2450 || (last_action_insn
2451 == last_no_action_insn_before_switch));
2452 first_no_action_insn_before_switch = NULL_RTX;
2453 last_no_action_insn_before_switch = NULL_RTX;
2454 call_site_base++;
2455 }
2456 /* If we'd not seen a previous action (-3) or the previous
2457 action was must-not-throw (-2), then we do not need an
2458 end note. */
2459 if (last_action >= -1)
2460 {
2461 /* If we delayed the creation of the begin, do it now. */
2462 if (first_no_action_insn)
2463 {
2464 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2465 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2466 first_no_action_insn);
2467 NOTE_EH_HANDLER (note) = call_site;
2468 first_no_action_insn = NULL_RTX;
2469 }
2470
2471 note = emit_note_after (NOTE_INSN_EH_REGION_END,
2472 last_action_insn);
2473 NOTE_EH_HANDLER (note) = call_site;
2474 }
2475
2476 /* If the new action is must-not-throw, then no region notes
2477 are created. */
2478 if (this_action >= -1)
2479 {
2480 call_site = add_call_site (this_landing_pad,
2481 this_action < 0 ? 0 : this_action,
2482 cur_sec);
2483 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2484 NOTE_EH_HANDLER (note) = call_site;
2485 }
2486
2487 last_action = this_action;
2488 last_landing_pad = this_landing_pad;
2489 }
2490 last_action_insn = iter;
2491 }
2492 else if (NOTE_P (iter)
2493 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2494 {
2495 gcc_assert (section_switch_note == NULL_RTX);
2496 gcc_assert (flag_reorder_blocks_and_partition);
2497 section_switch_note = iter;
2498 if (first_no_action_insn)
2499 {
2500 first_no_action_insn_before_switch = first_no_action_insn;
2501 last_no_action_insn_before_switch = last_action_insn;
2502 first_no_action_insn = NULL_RTX;
2503 gcc_assert (last_action == -1);
2504 last_action = -3;
2505 }
2506 /* Force closing of current EH region before section switch and
2507 opening a new one afterwards. */
2508 else if (last_action != -3)
2509 last_landing_pad = pc_rtx;
2510 call_site_base += VEC_length (call_site_record,
2511 crtl->eh.call_site_record[cur_sec]);
2512 cur_sec++;
2513 gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
2514 crtl->eh.call_site_record[cur_sec]
2515 = VEC_alloc (call_site_record, gc, 10);
2516 }
2517
2518 if (last_action >= -1 && ! first_no_action_insn)
2519 {
2520 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
2521 NOTE_EH_HANDLER (note) = call_site;
2522 }
2523
2524 call_site_base = saved_call_site_base;
2525
2526 htab_delete (ar_hash);
2527 return 0;
2528 }
2529
2530 static bool
2531 gate_convert_to_eh_region_ranges (void)
2532 {
2533 /* Nothing to do for SJLJ exceptions or if no regions created. */
2534 if (cfun->eh->region_tree == NULL)
2535 return false;
2536 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2537 return false;
2538 return true;
2539 }
2540
2541 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
2542 {
2543 {
2544 RTL_PASS,
2545 "eh_ranges", /* name */
2546 gate_convert_to_eh_region_ranges, /* gate */
2547 convert_to_eh_region_ranges, /* execute */
2548 NULL, /* sub */
2549 NULL, /* next */
2550 0, /* static_pass_number */
2551 TV_NONE, /* tv_id */
2552 0, /* properties_required */
2553 0, /* properties_provided */
2554 0, /* properties_destroyed */
2555 0, /* todo_flags_start */
2556 0 /* todo_flags_finish */
2557 }
2558 };
2559 \f
2560 static void
2561 push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value)
2562 {
2563 do
2564 {
2565 unsigned char byte = value & 0x7f;
2566 value >>= 7;
2567 if (value)
2568 byte |= 0x80;
2569 VEC_safe_push (uchar, gc, *data_area, byte);
2570 }
2571 while (value);
2572 }
2573
2574 static void
2575 push_sleb128 (VEC (uchar, gc) **data_area, int value)
2576 {
2577 unsigned char byte;
2578 int more;
2579
2580 do
2581 {
2582 byte = value & 0x7f;
2583 value >>= 7;
2584 more = ! ((value == 0 && (byte & 0x40) == 0)
2585 || (value == -1 && (byte & 0x40) != 0));
2586 if (more)
2587 byte |= 0x80;
2588 VEC_safe_push (uchar, gc, *data_area, byte);
2589 }
2590 while (more);
2591 }
2592
2593 \f
2594 #ifndef HAVE_AS_LEB128
2595 static int
2596 dw2_size_of_call_site_table (int section)
2597 {
2598 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
2599 int size = n * (4 + 4 + 4);
2600 int i;
2601
2602 for (i = 0; i < n; ++i)
2603 {
2604 struct call_site_record_d *cs =
2605 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2606 size += size_of_uleb128 (cs->action);
2607 }
2608
2609 return size;
2610 }
2611
2612 static int
2613 sjlj_size_of_call_site_table (void)
2614 {
2615 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
2616 int size = 0;
2617 int i;
2618
2619 for (i = 0; i < n; ++i)
2620 {
2621 struct call_site_record_d *cs =
2622 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2623 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2624 size += size_of_uleb128 (cs->action);
2625 }
2626
2627 return size;
2628 }
2629 #endif
2630
2631 static void
2632 dw2_output_call_site_table (int cs_format, int section)
2633 {
2634 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
2635 int i;
2636 const char *begin;
2637
2638 if (section == 0)
2639 begin = current_function_func_begin_label;
2640 else if (first_function_block_is_cold)
2641 begin = crtl->subsections.hot_section_label;
2642 else
2643 begin = crtl->subsections.cold_section_label;
2644
2645 for (i = 0; i < n; ++i)
2646 {
2647 struct call_site_record_d *cs =
2648 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2649 char reg_start_lab[32];
2650 char reg_end_lab[32];
2651 char landing_pad_lab[32];
2652
2653 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2654 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2655
2656 if (cs->landing_pad)
2657 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2658 CODE_LABEL_NUMBER (cs->landing_pad));
2659
2660 /* ??? Perhaps use insn length scaling if the assembler supports
2661 generic arithmetic. */
2662 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2663 data4 if the function is small enough. */
2664 if (cs_format == DW_EH_PE_uleb128)
2665 {
2666 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2667 "region %d start", i);
2668 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2669 "length");
2670 if (cs->landing_pad)
2671 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2672 "landing pad");
2673 else
2674 dw2_asm_output_data_uleb128 (0, "landing pad");
2675 }
2676 else
2677 {
2678 dw2_asm_output_delta (4, reg_start_lab, begin,
2679 "region %d start", i);
2680 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2681 if (cs->landing_pad)
2682 dw2_asm_output_delta (4, landing_pad_lab, begin,
2683 "landing pad");
2684 else
2685 dw2_asm_output_data (4, 0, "landing pad");
2686 }
2687 dw2_asm_output_data_uleb128 (cs->action, "action");
2688 }
2689
2690 call_site_base += n;
2691 }
2692
2693 static void
2694 sjlj_output_call_site_table (void)
2695 {
2696 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
2697 int i;
2698
2699 for (i = 0; i < n; ++i)
2700 {
2701 struct call_site_record_d *cs =
2702 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2703
2704 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2705 "region %d landing pad", i);
2706 dw2_asm_output_data_uleb128 (cs->action, "action");
2707 }
2708
2709 call_site_base += n;
2710 }
2711
2712 /* Switch to the section that should be used for exception tables. */
2713
2714 static void
2715 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2716 {
2717 section *s;
2718
2719 if (exception_section)
2720 s = exception_section;
2721 else
2722 {
2723 /* Compute the section and cache it into exception_section,
2724 unless it depends on the function name. */
2725 if (targetm_common.have_named_sections)
2726 {
2727 int flags;
2728
2729 if (EH_TABLES_CAN_BE_READ_ONLY)
2730 {
2731 int tt_format =
2732 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2733 flags = ((! flag_pic
2734 || ((tt_format & 0x70) != DW_EH_PE_absptr
2735 && (tt_format & 0x70) != DW_EH_PE_aligned))
2736 ? 0 : SECTION_WRITE);
2737 }
2738 else
2739 flags = SECTION_WRITE;
2740
2741 #ifdef HAVE_LD_EH_GC_SECTIONS
2742 if (flag_function_sections)
2743 {
2744 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2745 sprintf (section_name, ".gcc_except_table.%s", fnname);
2746 s = get_section (section_name, flags, NULL);
2747 free (section_name);
2748 }
2749 else
2750 #endif
2751 exception_section
2752 = s = get_section (".gcc_except_table", flags, NULL);
2753 }
2754 else
2755 exception_section
2756 = s = flag_pic ? data_section : readonly_data_section;
2757 }
2758
2759 switch_to_section (s);
2760 }
2761
2762
2763 /* Output a reference from an exception table to the type_info object TYPE.
2764 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2765 the value. */
2766
2767 static void
2768 output_ttype (tree type, int tt_format, int tt_format_size)
2769 {
2770 rtx value;
2771 bool is_public = true;
2772
2773 if (type == NULL_TREE)
2774 value = const0_rtx;
2775 else
2776 {
2777 struct varpool_node *node;
2778
2779 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2780 runtime types so TYPE should already be a runtime type
2781 reference. When pass_ipa_free_lang data is made a default
2782 pass, we can then remove the call to lookup_type_for_runtime
2783 below. */
2784 if (TYPE_P (type))
2785 type = lookup_type_for_runtime (type);
2786
2787 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2788
2789 /* Let cgraph know that the rtti decl is used. Not all of the
2790 paths below go through assemble_integer, which would take
2791 care of this for us. */
2792 STRIP_NOPS (type);
2793 if (TREE_CODE (type) == ADDR_EXPR)
2794 {
2795 type = TREE_OPERAND (type, 0);
2796 if (TREE_CODE (type) == VAR_DECL)
2797 {
2798 node = varpool_node (type);
2799 if (node)
2800 varpool_mark_needed_node (node);
2801 is_public = TREE_PUBLIC (type);
2802 }
2803 }
2804 else
2805 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2806 }
2807
2808 /* Allow the target to override the type table entry format. */
2809 if (targetm.asm_out.ttype (value))
2810 return;
2811
2812 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2813 assemble_integer (value, tt_format_size,
2814 tt_format_size * BITS_PER_UNIT, 1);
2815 else
2816 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2817 }
2818
2819 static void
2820 output_one_function_exception_table (int section)
2821 {
2822 int tt_format, cs_format, lp_format, i;
2823 #ifdef HAVE_AS_LEB128
2824 char ttype_label[32];
2825 char cs_after_size_label[32];
2826 char cs_end_label[32];
2827 #else
2828 int call_site_len;
2829 #endif
2830 int have_tt_data;
2831 int tt_format_size = 0;
2832
2833 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data)
2834 || (targetm.arm_eabi_unwinder
2835 ? VEC_length (tree, cfun->eh->ehspec_data.arm_eabi)
2836 : VEC_length (uchar, cfun->eh->ehspec_data.other)));
2837
2838 /* Indicate the format of the @TType entries. */
2839 if (! have_tt_data)
2840 tt_format = DW_EH_PE_omit;
2841 else
2842 {
2843 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2844 #ifdef HAVE_AS_LEB128
2845 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2846 section ? "LLSDATTC" : "LLSDATT",
2847 current_function_funcdef_no);
2848 #endif
2849 tt_format_size = size_of_encoded_value (tt_format);
2850
2851 assemble_align (tt_format_size * BITS_PER_UNIT);
2852 }
2853
2854 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2855 current_function_funcdef_no);
2856
2857 /* The LSDA header. */
2858
2859 /* Indicate the format of the landing pad start pointer. An omitted
2860 field implies @LPStart == @Start. */
2861 /* Currently we always put @LPStart == @Start. This field would
2862 be most useful in moving the landing pads completely out of
2863 line to another section, but it could also be used to minimize
2864 the size of uleb128 landing pad offsets. */
2865 lp_format = DW_EH_PE_omit;
2866 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2867 eh_data_format_name (lp_format));
2868
2869 /* @LPStart pointer would go here. */
2870
2871 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2872 eh_data_format_name (tt_format));
2873
2874 #ifndef HAVE_AS_LEB128
2875 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2876 call_site_len = sjlj_size_of_call_site_table ();
2877 else
2878 call_site_len = dw2_size_of_call_site_table (section);
2879 #endif
2880
2881 /* A pc-relative 4-byte displacement to the @TType data. */
2882 if (have_tt_data)
2883 {
2884 #ifdef HAVE_AS_LEB128
2885 char ttype_after_disp_label[32];
2886 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
2887 section ? "LLSDATTDC" : "LLSDATTD",
2888 current_function_funcdef_no);
2889 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
2890 "@TType base offset");
2891 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
2892 #else
2893 /* Ug. Alignment queers things. */
2894 unsigned int before_disp, after_disp, last_disp, disp;
2895
2896 before_disp = 1 + 1;
2897 after_disp = (1 + size_of_uleb128 (call_site_len)
2898 + call_site_len
2899 + VEC_length (uchar, crtl->eh.action_record_data)
2900 + (VEC_length (tree, cfun->eh->ttype_data)
2901 * tt_format_size));
2902
2903 disp = after_disp;
2904 do
2905 {
2906 unsigned int disp_size, pad;
2907
2908 last_disp = disp;
2909 disp_size = size_of_uleb128 (disp);
2910 pad = before_disp + disp_size + after_disp;
2911 if (pad % tt_format_size)
2912 pad = tt_format_size - (pad % tt_format_size);
2913 else
2914 pad = 0;
2915 disp = after_disp + pad;
2916 }
2917 while (disp != last_disp);
2918
2919 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
2920 #endif
2921 }
2922
2923 /* Indicate the format of the call-site offsets. */
2924 #ifdef HAVE_AS_LEB128
2925 cs_format = DW_EH_PE_uleb128;
2926 #else
2927 cs_format = DW_EH_PE_udata4;
2928 #endif
2929 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
2930 eh_data_format_name (cs_format));
2931
2932 #ifdef HAVE_AS_LEB128
2933 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
2934 section ? "LLSDACSBC" : "LLSDACSB",
2935 current_function_funcdef_no);
2936 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
2937 section ? "LLSDACSEC" : "LLSDACSE",
2938 current_function_funcdef_no);
2939 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
2940 "Call-site table length");
2941 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
2942 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2943 sjlj_output_call_site_table ();
2944 else
2945 dw2_output_call_site_table (cs_format, section);
2946 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
2947 #else
2948 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
2949 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2950 sjlj_output_call_site_table ();
2951 else
2952 dw2_output_call_site_table (cs_format, section);
2953 #endif
2954
2955 /* ??? Decode and interpret the data for flag_debug_asm. */
2956 {
2957 uchar uc;
2958 FOR_EACH_VEC_ELT (uchar, crtl->eh.action_record_data, i, uc)
2959 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
2960 }
2961
2962 if (have_tt_data)
2963 assemble_align (tt_format_size * BITS_PER_UNIT);
2964
2965 i = VEC_length (tree, cfun->eh->ttype_data);
2966 while (i-- > 0)
2967 {
2968 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
2969 output_ttype (type, tt_format, tt_format_size);
2970 }
2971
2972 #ifdef HAVE_AS_LEB128
2973 if (have_tt_data)
2974 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
2975 #endif
2976
2977 /* ??? Decode and interpret the data for flag_debug_asm. */
2978 if (targetm.arm_eabi_unwinder)
2979 {
2980 tree type;
2981 for (i = 0;
2982 VEC_iterate (tree, cfun->eh->ehspec_data.arm_eabi, i, type); ++i)
2983 output_ttype (type, tt_format, tt_format_size);
2984 }
2985 else
2986 {
2987 uchar uc;
2988 for (i = 0;
2989 VEC_iterate (uchar, cfun->eh->ehspec_data.other, i, uc); ++i)
2990 dw2_asm_output_data (1, uc,
2991 i ? NULL : "Exception specification table");
2992 }
2993 }
2994
2995 void
2996 output_function_exception_table (const char *fnname)
2997 {
2998 rtx personality = get_personality_function (current_function_decl);
2999
3000 /* Not all functions need anything. */
3001 if (! crtl->uses_eh_lsda)
3002 return;
3003
3004 if (personality)
3005 {
3006 assemble_external_libcall (personality);
3007
3008 if (targetm.asm_out.emit_except_personality)
3009 targetm.asm_out.emit_except_personality (personality);
3010 }
3011
3012 switch_to_exception_section (fnname);
3013
3014 /* If the target wants a label to begin the table, emit it here. */
3015 targetm.asm_out.emit_except_table_label (asm_out_file);
3016
3017 output_one_function_exception_table (0);
3018 if (crtl->eh.call_site_record[1] != NULL)
3019 output_one_function_exception_table (1);
3020
3021 switch_to_section (current_function_section ());
3022 }
3023
3024 void
3025 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3026 {
3027 fun->eh->throw_stmt_table = table;
3028 }
3029
3030 htab_t
3031 get_eh_throw_stmt_table (struct function *fun)
3032 {
3033 return fun->eh->throw_stmt_table;
3034 }
3035 \f
3036 /* Determine if the function needs an EH personality function. */
3037
3038 enum eh_personality_kind
3039 function_needs_eh_personality (struct function *fn)
3040 {
3041 enum eh_personality_kind kind = eh_personality_none;
3042 eh_region i;
3043
3044 FOR_ALL_EH_REGION_FN (i, fn)
3045 {
3046 switch (i->type)
3047 {
3048 case ERT_CLEANUP:
3049 /* Can do with any personality including the generic C one. */
3050 kind = eh_personality_any;
3051 break;
3052
3053 case ERT_TRY:
3054 case ERT_ALLOWED_EXCEPTIONS:
3055 /* Always needs a EH personality function. The generic C
3056 personality doesn't handle these even for empty type lists. */
3057 return eh_personality_lang;
3058
3059 case ERT_MUST_NOT_THROW:
3060 /* Always needs a EH personality function. The language may specify
3061 what abort routine that must be used, e.g. std::terminate. */
3062 return eh_personality_lang;
3063 }
3064 }
3065
3066 return kind;
3067 }
3068 \f
3069 /* Dump EH information to OUT. */
3070
3071 void
3072 dump_eh_tree (FILE * out, struct function *fun)
3073 {
3074 eh_region i;
3075 int depth = 0;
3076 static const char *const type_name[] = {
3077 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3078 };
3079
3080 i = fun->eh->region_tree;
3081 if (!i)
3082 return;
3083
3084 fprintf (out, "Eh tree:\n");
3085 while (1)
3086 {
3087 fprintf (out, " %*s %i %s", depth * 2, "",
3088 i->index, type_name[(int) i->type]);
3089
3090 if (i->landing_pads)
3091 {
3092 eh_landing_pad lp;
3093
3094 fprintf (out, " land:");
3095 if (current_ir_type () == IR_GIMPLE)
3096 {
3097 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3098 {
3099 fprintf (out, "{%i,", lp->index);
3100 print_generic_expr (out, lp->post_landing_pad, 0);
3101 fputc ('}', out);
3102 if (lp->next_lp)
3103 fputc (',', out);
3104 }
3105 }
3106 else
3107 {
3108 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3109 {
3110 fprintf (out, "{%i,", lp->index);
3111 if (lp->landing_pad)
3112 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3113 NOTE_P (lp->landing_pad) ? "(del)" : "");
3114 else
3115 fprintf (out, "(nil),");
3116 if (lp->post_landing_pad)
3117 {
3118 rtx lab = label_rtx (lp->post_landing_pad);
3119 fprintf (out, "%i%s}", INSN_UID (lab),
3120 NOTE_P (lab) ? "(del)" : "");
3121 }
3122 else
3123 fprintf (out, "(nil)}");
3124 if (lp->next_lp)
3125 fputc (',', out);
3126 }
3127 }
3128 }
3129
3130 switch (i->type)
3131 {
3132 case ERT_CLEANUP:
3133 case ERT_MUST_NOT_THROW:
3134 break;
3135
3136 case ERT_TRY:
3137 {
3138 eh_catch c;
3139 fprintf (out, " catch:");
3140 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3141 {
3142 fputc ('{', out);
3143 if (c->label)
3144 {
3145 fprintf (out, "lab:");
3146 print_generic_expr (out, c->label, 0);
3147 fputc (';', out);
3148 }
3149 print_generic_expr (out, c->type_list, 0);
3150 fputc ('}', out);
3151 if (c->next_catch)
3152 fputc (',', out);
3153 }
3154 }
3155 break;
3156
3157 case ERT_ALLOWED_EXCEPTIONS:
3158 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3159 print_generic_expr (out, i->u.allowed.type_list, 0);
3160 break;
3161 }
3162 fputc ('\n', out);
3163
3164 /* If there are sub-regions, process them. */
3165 if (i->inner)
3166 i = i->inner, depth++;
3167 /* If there are peers, process them. */
3168 else if (i->next_peer)
3169 i = i->next_peer;
3170 /* Otherwise, step back up the tree to the next peer. */
3171 else
3172 {
3173 do
3174 {
3175 i = i->outer;
3176 depth--;
3177 if (i == NULL)
3178 return;
3179 }
3180 while (i->next_peer == NULL);
3181 i = i->next_peer;
3182 }
3183 }
3184 }
3185
3186 /* Dump the EH tree for FN on stderr. */
3187
3188 DEBUG_FUNCTION void
3189 debug_eh_tree (struct function *fn)
3190 {
3191 dump_eh_tree (stderr, fn);
3192 }
3193
3194 /* Verify invariants on EH datastructures. */
3195
3196 DEBUG_FUNCTION void
3197 verify_eh_tree (struct function *fun)
3198 {
3199 eh_region r, outer;
3200 int nvisited_lp, nvisited_r;
3201 int count_lp, count_r, depth, i;
3202 eh_landing_pad lp;
3203 bool err = false;
3204
3205 if (!fun->eh->region_tree)
3206 return;
3207
3208 count_r = 0;
3209 for (i = 1; VEC_iterate (eh_region, fun->eh->region_array, i, r); ++i)
3210 if (r)
3211 {
3212 if (r->index == i)
3213 count_r++;
3214 else
3215 {
3216 error ("region_array is corrupted for region %i", r->index);
3217 err = true;
3218 }
3219 }
3220
3221 count_lp = 0;
3222 for (i = 1; VEC_iterate (eh_landing_pad, fun->eh->lp_array, i, lp); ++i)
3223 if (lp)
3224 {
3225 if (lp->index == i)
3226 count_lp++;
3227 else
3228 {
3229 error ("lp_array is corrupted for lp %i", lp->index);
3230 err = true;
3231 }
3232 }
3233
3234 depth = nvisited_lp = nvisited_r = 0;
3235 outer = NULL;
3236 r = fun->eh->region_tree;
3237 while (1)
3238 {
3239 if (VEC_index (eh_region, fun->eh->region_array, r->index) != r)
3240 {
3241 error ("region_array is corrupted for region %i", r->index);
3242 err = true;
3243 }
3244 if (r->outer != outer)
3245 {
3246 error ("outer block of region %i is wrong", r->index);
3247 err = true;
3248 }
3249 if (depth < 0)
3250 {
3251 error ("negative nesting depth of region %i", r->index);
3252 err = true;
3253 }
3254 nvisited_r++;
3255
3256 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3257 {
3258 if (VEC_index (eh_landing_pad, fun->eh->lp_array, lp->index) != lp)
3259 {
3260 error ("lp_array is corrupted for lp %i", lp->index);
3261 err = true;
3262 }
3263 if (lp->region != r)
3264 {
3265 error ("region of lp %i is wrong", lp->index);
3266 err = true;
3267 }
3268 nvisited_lp++;
3269 }
3270
3271 if (r->inner)
3272 outer = r, r = r->inner, depth++;
3273 else if (r->next_peer)
3274 r = r->next_peer;
3275 else
3276 {
3277 do
3278 {
3279 r = r->outer;
3280 if (r == NULL)
3281 goto region_done;
3282 depth--;
3283 outer = r->outer;
3284 }
3285 while (r->next_peer == NULL);
3286 r = r->next_peer;
3287 }
3288 }
3289 region_done:
3290 if (depth != 0)
3291 {
3292 error ("tree list ends on depth %i", depth);
3293 err = true;
3294 }
3295 if (count_r != nvisited_r)
3296 {
3297 error ("region_array does not match region_tree");
3298 err = true;
3299 }
3300 if (count_lp != nvisited_lp)
3301 {
3302 error ("lp_array does not match region_tree");
3303 err = true;
3304 }
3305
3306 if (err)
3307 {
3308 dump_eh_tree (stderr, fun);
3309 internal_error ("verify_eh_tree failed");
3310 }
3311 }
3312 \f
3313 #include "gt-except.h"