gensupport.c (init_rtx_reader_args_cb): Start counting code generating patterns from...
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 /* An exception is an event that can be "thrown" from within a
25 function. This event can then be "caught" by the callers of
26 the function.
27
28 The representation of exceptions changes several times during
29 the compilation process:
30
31 In the beginning, in the front end, we have the GENERIC trees
32 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
33 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
34
35 During initial gimplification (gimplify.c) these are lowered
36 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
37 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
38 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
39 conversion.
40
41 During pass_lower_eh (tree-eh.c) we record the nested structure
42 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
43 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
44 regions at this time. We can then flatten the statements within
45 the TRY nodes to straight-line code. Statements that had been within
46 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
47 so that we may remember what action is supposed to be taken if
48 a given statement does throw. During this lowering process,
49 we create an EH_LANDING_PAD node for each EH_REGION that has
50 some code within the function that needs to be executed if a
51 throw does happen. We also create RESX statements that are
52 used to transfer control from an inner EH_REGION to an outer
53 EH_REGION. We also create EH_DISPATCH statements as placeholders
54 for a runtime type comparison that should be made in order to
55 select the action to perform among different CATCH and EH_FILTER
56 regions.
57
58 During pass_lower_eh_dispatch (tree-eh.c), which is run after
59 all inlining is complete, we are able to run assign_filter_values,
60 which allows us to map the set of types manipulated by all of the
61 CATCH and EH_FILTER regions to a set of integers. This set of integers
62 will be how the exception runtime communicates with the code generated
63 within the function. We then expand the GIMPLE_EH_DISPATCH statements
64 to a switch or conditional branches that use the argument provided by
65 the runtime (__builtin_eh_filter) and the set of integers we computed
66 in assign_filter_values.
67
68 During pass_lower_resx (tree-eh.c), which is run near the end
69 of optimization, we expand RESX statements. If the eh region
70 that is outer to the RESX statement is a MUST_NOT_THROW, then
71 the RESX expands to some form of abort statement. If the eh
72 region that is outer to the RESX statement is within the current
73 function, then the RESX expands to a bookkeeping call
74 (__builtin_eh_copy_values) and a goto. Otherwise, the next
75 handler for the exception must be within a function somewhere
76 up the call chain, so we call back into the exception runtime
77 (__builtin_unwind_resume).
78
79 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
80 that create an rtl to eh_region mapping that corresponds to the
81 gimple to eh_region mapping that had been recorded in the
82 THROW_STMT_TABLE.
83
84 Then, via finish_eh_generation, we generate the real landing pads
85 to which the runtime will actually transfer control. These new
86 landing pads perform whatever bookkeeping is needed by the target
87 backend in order to resume execution within the current function.
88 Each of these new landing pads falls through into the post_landing_pad
89 label which had been used within the CFG up to this point. All
90 exception edges within the CFG are redirected to the new landing pads.
91 If the target uses setjmp to implement exceptions, the various extra
92 calls into the runtime to register and unregister the current stack
93 frame are emitted at this time.
94
95 During pass_convert_to_eh_region_ranges (except.c), we transform
96 the REG_EH_REGION notes attached to individual insns into
97 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
98 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
99 same associated action within the exception region tree, meaning
100 that (1) the exception is caught by the same landing pad within the
101 current function, (2) the exception is blocked by the runtime with
102 a MUST_NOT_THROW region, or (3) the exception is not handled at all
103 within the current function.
104
105 Finally, during assembly generation, we call
106 output_function_exception_table (except.c) to emit the tables with
107 which the exception runtime can determine if a given stack frame
108 handles a given exception, and if so what filter value to provide
109 to the function when the non-local control transfer is effected.
110 If the target uses dwarf2 unwinding to implement exceptions, then
111 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
112
113
114 #include "config.h"
115 #include "system.h"
116 #include "coretypes.h"
117 #include "tm.h"
118 #include "rtl.h"
119 #include "tree.h"
120 #include "flags.h"
121 #include "function.h"
122 #include "expr.h"
123 #include "libfuncs.h"
124 #include "insn-config.h"
125 #include "except.h"
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "output.h"
129 #include "dwarf2asm.h"
130 #include "dwarf2out.h"
131 #include "dwarf2.h"
132 #include "toplev.h"
133 #include "hashtab.h"
134 #include "intl.h"
135 #include "ggc.h"
136 #include "tm_p.h"
137 #include "target.h"
138 #include "common/common-target.h"
139 #include "langhooks.h"
140 #include "cgraph.h"
141 #include "diagnostic.h"
142 #include "tree-pretty-print.h"
143 #include "tree-pass.h"
144 #include "timevar.h"
145 #include "tree-flow.h"
146 #include "cfgloop.h"
147
148 /* Provide defaults for stuff that may not be defined when using
149 sjlj exceptions. */
150 #ifndef EH_RETURN_DATA_REGNO
151 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
152 #endif
153
154 static GTY(()) int call_site_base;
155 static GTY ((param_is (union tree_node)))
156 htab_t type_to_runtime_map;
157
158 /* Describe the SjLj_Function_Context structure. */
159 static GTY(()) tree sjlj_fc_type_node;
160 static int sjlj_fc_call_site_ofs;
161 static int sjlj_fc_data_ofs;
162 static int sjlj_fc_personality_ofs;
163 static int sjlj_fc_lsda_ofs;
164 static int sjlj_fc_jbuf_ofs;
165 \f
166
167 struct GTY(()) call_site_record_d
168 {
169 rtx landing_pad;
170 int action;
171 };
172 \f
173 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
174 eh_landing_pad *);
175
176 static int t2r_eq (const void *, const void *);
177 static hashval_t t2r_hash (const void *);
178
179 static int ttypes_filter_eq (const void *, const void *);
180 static hashval_t ttypes_filter_hash (const void *);
181 static int ehspec_filter_eq (const void *, const void *);
182 static hashval_t ehspec_filter_hash (const void *);
183 static int add_ttypes_entry (htab_t, tree);
184 static int add_ehspec_entry (htab_t, htab_t, tree);
185 static void dw2_build_landing_pads (void);
186
187 static int action_record_eq (const void *, const void *);
188 static hashval_t action_record_hash (const void *);
189 static int add_action_record (htab_t, int, int);
190 static int collect_one_action_chain (htab_t, eh_region);
191 static int add_call_site (rtx, int, int);
192
193 static void push_uleb128 (VEC (uchar, gc) **, unsigned int);
194 static void push_sleb128 (VEC (uchar, gc) **, int);
195 #ifndef HAVE_AS_LEB128
196 static int dw2_size_of_call_site_table (int);
197 static int sjlj_size_of_call_site_table (void);
198 #endif
199 static void dw2_output_call_site_table (int, int);
200 static void sjlj_output_call_site_table (void);
201
202 \f
203 void
204 init_eh (void)
205 {
206 if (! flag_exceptions)
207 return;
208
209 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
210
211 /* Create the SjLj_Function_Context structure. This should match
212 the definition in unwind-sjlj.c. */
213 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
214 {
215 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
216
217 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
218
219 f_prev = build_decl (BUILTINS_LOCATION,
220 FIELD_DECL, get_identifier ("__prev"),
221 build_pointer_type (sjlj_fc_type_node));
222 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
223
224 f_cs = build_decl (BUILTINS_LOCATION,
225 FIELD_DECL, get_identifier ("__call_site"),
226 integer_type_node);
227 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
228
229 tmp = build_index_type (size_int (4 - 1));
230 tmp = build_array_type (lang_hooks.types.type_for_mode
231 (targetm.unwind_word_mode (), 1),
232 tmp);
233 f_data = build_decl (BUILTINS_LOCATION,
234 FIELD_DECL, get_identifier ("__data"), tmp);
235 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
236
237 f_per = build_decl (BUILTINS_LOCATION,
238 FIELD_DECL, get_identifier ("__personality"),
239 ptr_type_node);
240 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
241
242 f_lsda = build_decl (BUILTINS_LOCATION,
243 FIELD_DECL, get_identifier ("__lsda"),
244 ptr_type_node);
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
246
247 #ifdef DONT_USE_BUILTIN_SETJMP
248 #ifdef JMP_BUF_SIZE
249 tmp = size_int (JMP_BUF_SIZE - 1);
250 #else
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
256 #endif
257 #else
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = size_int (5 * BITS_PER_WORD / POINTER_SIZE - 1);
260 #endif
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (BUILTINS_LOCATION,
264 FIELD_DECL, get_identifier ("__jbuf"), tmp);
265 #ifdef DONT_USE_BUILTIN_SETJMP
266 /* We don't know what the alignment requirements of the
267 runtime's jmp_buf has. Overestimate. */
268 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
269 DECL_USER_ALIGN (f_jbuf) = 1;
270 #endif
271 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
272
273 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
274 TREE_CHAIN (f_prev) = f_cs;
275 TREE_CHAIN (f_cs) = f_data;
276 TREE_CHAIN (f_data) = f_per;
277 TREE_CHAIN (f_per) = f_lsda;
278 TREE_CHAIN (f_lsda) = f_jbuf;
279
280 layout_type (sjlj_fc_type_node);
281
282 /* Cache the interesting field offsets so that we have
283 easy access from rtl. */
284 sjlj_fc_call_site_ofs
285 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
286 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
287 sjlj_fc_data_ofs
288 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
289 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
290 sjlj_fc_personality_ofs
291 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
292 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
293 sjlj_fc_lsda_ofs
294 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
295 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
296 sjlj_fc_jbuf_ofs
297 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
298 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
299 }
300 }
301
302 void
303 init_eh_for_function (void)
304 {
305 cfun->eh = ggc_alloc_cleared_eh_status ();
306
307 /* Make sure zero'th entries are used. */
308 VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL);
309 VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL);
310 }
311 \f
312 /* Routines to generate the exception tree somewhat directly.
313 These are used from tree-eh.c when processing exception related
314 nodes during tree optimization. */
315
316 static eh_region
317 gen_eh_region (enum eh_region_type type, eh_region outer)
318 {
319 eh_region new_eh;
320
321 /* Insert a new blank region as a leaf in the tree. */
322 new_eh = ggc_alloc_cleared_eh_region_d ();
323 new_eh->type = type;
324 new_eh->outer = outer;
325 if (outer)
326 {
327 new_eh->next_peer = outer->inner;
328 outer->inner = new_eh;
329 }
330 else
331 {
332 new_eh->next_peer = cfun->eh->region_tree;
333 cfun->eh->region_tree = new_eh;
334 }
335
336 new_eh->index = VEC_length (eh_region, cfun->eh->region_array);
337 VEC_safe_push (eh_region, gc, cfun->eh->region_array, new_eh);
338
339 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
340 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
341 new_eh->use_cxa_end_cleanup = true;
342
343 return new_eh;
344 }
345
346 eh_region
347 gen_eh_region_cleanup (eh_region outer)
348 {
349 return gen_eh_region (ERT_CLEANUP, outer);
350 }
351
352 eh_region
353 gen_eh_region_try (eh_region outer)
354 {
355 return gen_eh_region (ERT_TRY, outer);
356 }
357
358 eh_catch
359 gen_eh_region_catch (eh_region t, tree type_or_list)
360 {
361 eh_catch c, l;
362 tree type_list, type_node;
363
364 gcc_assert (t->type == ERT_TRY);
365
366 /* Ensure to always end up with a type list to normalize further
367 processing, then register each type against the runtime types map. */
368 type_list = type_or_list;
369 if (type_or_list)
370 {
371 if (TREE_CODE (type_or_list) != TREE_LIST)
372 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
373
374 type_node = type_list;
375 for (; type_node; type_node = TREE_CHAIN (type_node))
376 add_type_for_runtime (TREE_VALUE (type_node));
377 }
378
379 c = ggc_alloc_cleared_eh_catch_d ();
380 c->type_list = type_list;
381 l = t->u.eh_try.last_catch;
382 c->prev_catch = l;
383 if (l)
384 l->next_catch = c;
385 else
386 t->u.eh_try.first_catch = c;
387 t->u.eh_try.last_catch = c;
388
389 return c;
390 }
391
392 eh_region
393 gen_eh_region_allowed (eh_region outer, tree allowed)
394 {
395 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
396 region->u.allowed.type_list = allowed;
397
398 for (; allowed ; allowed = TREE_CHAIN (allowed))
399 add_type_for_runtime (TREE_VALUE (allowed));
400
401 return region;
402 }
403
404 eh_region
405 gen_eh_region_must_not_throw (eh_region outer)
406 {
407 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
408 }
409
410 eh_landing_pad
411 gen_eh_landing_pad (eh_region region)
412 {
413 eh_landing_pad lp = ggc_alloc_cleared_eh_landing_pad_d ();
414
415 lp->next_lp = region->landing_pads;
416 lp->region = region;
417 lp->index = VEC_length (eh_landing_pad, cfun->eh->lp_array);
418 region->landing_pads = lp;
419
420 VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, lp);
421
422 return lp;
423 }
424
425 eh_region
426 get_eh_region_from_number_fn (struct function *ifun, int i)
427 {
428 return VEC_index (eh_region, ifun->eh->region_array, i);
429 }
430
431 eh_region
432 get_eh_region_from_number (int i)
433 {
434 return get_eh_region_from_number_fn (cfun, i);
435 }
436
437 eh_landing_pad
438 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
439 {
440 return VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
441 }
442
443 eh_landing_pad
444 get_eh_landing_pad_from_number (int i)
445 {
446 return get_eh_landing_pad_from_number_fn (cfun, i);
447 }
448
449 eh_region
450 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
451 {
452 if (i < 0)
453 return VEC_index (eh_region, ifun->eh->region_array, -i);
454 else if (i == 0)
455 return NULL;
456 else
457 {
458 eh_landing_pad lp;
459 lp = VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
460 return lp->region;
461 }
462 }
463
464 eh_region
465 get_eh_region_from_lp_number (int i)
466 {
467 return get_eh_region_from_lp_number_fn (cfun, i);
468 }
469 \f
470 /* Returns true if the current function has exception handling regions. */
471
472 bool
473 current_function_has_exception_handlers (void)
474 {
475 return cfun->eh->region_tree != NULL;
476 }
477 \f
478 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
479 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
480
481 struct duplicate_eh_regions_data
482 {
483 duplicate_eh_regions_map label_map;
484 void *label_map_data;
485 struct pointer_map_t *eh_map;
486 };
487
488 static void
489 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
490 eh_region old_r, eh_region outer)
491 {
492 eh_landing_pad old_lp, new_lp;
493 eh_region new_r;
494 void **slot;
495
496 new_r = gen_eh_region (old_r->type, outer);
497 slot = pointer_map_insert (data->eh_map, (void *)old_r);
498 gcc_assert (*slot == NULL);
499 *slot = (void *)new_r;
500
501 switch (old_r->type)
502 {
503 case ERT_CLEANUP:
504 break;
505
506 case ERT_TRY:
507 {
508 eh_catch oc, nc;
509 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
510 {
511 /* We should be doing all our region duplication before and
512 during inlining, which is before filter lists are created. */
513 gcc_assert (oc->filter_list == NULL);
514 nc = gen_eh_region_catch (new_r, oc->type_list);
515 nc->label = data->label_map (oc->label, data->label_map_data);
516 }
517 }
518 break;
519
520 case ERT_ALLOWED_EXCEPTIONS:
521 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
522 if (old_r->u.allowed.label)
523 new_r->u.allowed.label
524 = data->label_map (old_r->u.allowed.label, data->label_map_data);
525 else
526 new_r->u.allowed.label = NULL_TREE;
527 break;
528
529 case ERT_MUST_NOT_THROW:
530 new_r->u.must_not_throw = old_r->u.must_not_throw;
531 break;
532 }
533
534 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
535 {
536 /* Don't bother copying unused landing pads. */
537 if (old_lp->post_landing_pad == NULL)
538 continue;
539
540 new_lp = gen_eh_landing_pad (new_r);
541 slot = pointer_map_insert (data->eh_map, (void *)old_lp);
542 gcc_assert (*slot == NULL);
543 *slot = (void *)new_lp;
544
545 new_lp->post_landing_pad
546 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
547 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
548 }
549
550 /* Make sure to preserve the original use of __cxa_end_cleanup. */
551 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
552
553 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
554 duplicate_eh_regions_1 (data, old_r, new_r);
555 }
556
557 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
558 the current function and root the tree below OUTER_REGION.
559 The special case of COPY_REGION of NULL means all regions.
560 Remap labels using MAP/MAP_DATA callback. Return a pointer map
561 that allows the caller to remap uses of both EH regions and
562 EH landing pads. */
563
564 struct pointer_map_t *
565 duplicate_eh_regions (struct function *ifun,
566 eh_region copy_region, int outer_lp,
567 duplicate_eh_regions_map map, void *map_data)
568 {
569 struct duplicate_eh_regions_data data;
570 eh_region outer_region;
571
572 #ifdef ENABLE_CHECKING
573 verify_eh_tree (ifun);
574 #endif
575
576 data.label_map = map;
577 data.label_map_data = map_data;
578 data.eh_map = pointer_map_create ();
579
580 outer_region = get_eh_region_from_lp_number (outer_lp);
581
582 /* Copy all the regions in the subtree. */
583 if (copy_region)
584 duplicate_eh_regions_1 (&data, copy_region, outer_region);
585 else
586 {
587 eh_region r;
588 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
589 duplicate_eh_regions_1 (&data, r, outer_region);
590 }
591
592 #ifdef ENABLE_CHECKING
593 verify_eh_tree (cfun);
594 #endif
595
596 return data.eh_map;
597 }
598
599 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
600
601 eh_region
602 eh_region_outermost (struct function *ifun, eh_region region_a,
603 eh_region region_b)
604 {
605 sbitmap b_outer;
606
607 gcc_assert (ifun->eh->region_array);
608 gcc_assert (ifun->eh->region_tree);
609
610 b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array));
611 sbitmap_zero (b_outer);
612
613 do
614 {
615 SET_BIT (b_outer, region_b->index);
616 region_b = region_b->outer;
617 }
618 while (region_b);
619
620 do
621 {
622 if (TEST_BIT (b_outer, region_a->index))
623 break;
624 region_a = region_a->outer;
625 }
626 while (region_a);
627
628 sbitmap_free (b_outer);
629 return region_a;
630 }
631 \f
632 static int
633 t2r_eq (const void *pentry, const void *pdata)
634 {
635 const_tree const entry = (const_tree) pentry;
636 const_tree const data = (const_tree) pdata;
637
638 return TREE_PURPOSE (entry) == data;
639 }
640
641 static hashval_t
642 t2r_hash (const void *pentry)
643 {
644 const_tree const entry = (const_tree) pentry;
645 return TREE_HASH (TREE_PURPOSE (entry));
646 }
647
648 void
649 add_type_for_runtime (tree type)
650 {
651 tree *slot;
652
653 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
654 if (TREE_CODE (type) == NOP_EXPR)
655 return;
656
657 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
658 TREE_HASH (type), INSERT);
659 if (*slot == NULL)
660 {
661 tree runtime = lang_hooks.eh_runtime_type (type);
662 *slot = tree_cons (type, runtime, NULL_TREE);
663 }
664 }
665
666 tree
667 lookup_type_for_runtime (tree type)
668 {
669 tree *slot;
670
671 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
672 if (TREE_CODE (type) == NOP_EXPR)
673 return type;
674
675 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
676 TREE_HASH (type), NO_INSERT);
677
678 /* We should have always inserted the data earlier. */
679 return TREE_VALUE (*slot);
680 }
681
682 \f
683 /* Represent an entry in @TTypes for either catch actions
684 or exception filter actions. */
685 struct ttypes_filter {
686 tree t;
687 int filter;
688 };
689
690 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
691 (a tree) for a @TTypes type node we are thinking about adding. */
692
693 static int
694 ttypes_filter_eq (const void *pentry, const void *pdata)
695 {
696 const struct ttypes_filter *const entry
697 = (const struct ttypes_filter *) pentry;
698 const_tree const data = (const_tree) pdata;
699
700 return entry->t == data;
701 }
702
703 static hashval_t
704 ttypes_filter_hash (const void *pentry)
705 {
706 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
707 return TREE_HASH (entry->t);
708 }
709
710 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
711 exception specification list we are thinking about adding. */
712 /* ??? Currently we use the type lists in the order given. Someone
713 should put these in some canonical order. */
714
715 static int
716 ehspec_filter_eq (const void *pentry, const void *pdata)
717 {
718 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
719 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
720
721 return type_list_equal (entry->t, data->t);
722 }
723
724 /* Hash function for exception specification lists. */
725
726 static hashval_t
727 ehspec_filter_hash (const void *pentry)
728 {
729 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
730 hashval_t h = 0;
731 tree list;
732
733 for (list = entry->t; list ; list = TREE_CHAIN (list))
734 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
735 return h;
736 }
737
738 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
739 to speed up the search. Return the filter value to be used. */
740
741 static int
742 add_ttypes_entry (htab_t ttypes_hash, tree type)
743 {
744 struct ttypes_filter **slot, *n;
745
746 slot = (struct ttypes_filter **)
747 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
748
749 if ((n = *slot) == NULL)
750 {
751 /* Filter value is a 1 based table index. */
752
753 n = XNEW (struct ttypes_filter);
754 n->t = type;
755 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
756 *slot = n;
757
758 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
759 }
760
761 return n->filter;
762 }
763
764 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
765 to speed up the search. Return the filter value to be used. */
766
767 static int
768 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
769 {
770 struct ttypes_filter **slot, *n;
771 struct ttypes_filter dummy;
772
773 dummy.t = list;
774 slot = (struct ttypes_filter **)
775 htab_find_slot (ehspec_hash, &dummy, INSERT);
776
777 if ((n = *slot) == NULL)
778 {
779 int len;
780
781 if (targetm.arm_eabi_unwinder)
782 len = VEC_length (tree, cfun->eh->ehspec_data.arm_eabi);
783 else
784 len = VEC_length (uchar, cfun->eh->ehspec_data.other);
785
786 /* Filter value is a -1 based byte index into a uleb128 buffer. */
787
788 n = XNEW (struct ttypes_filter);
789 n->t = list;
790 n->filter = -(len + 1);
791 *slot = n;
792
793 /* Generate a 0 terminated list of filter values. */
794 for (; list ; list = TREE_CHAIN (list))
795 {
796 if (targetm.arm_eabi_unwinder)
797 VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi,
798 TREE_VALUE (list));
799 else
800 {
801 /* Look up each type in the list and encode its filter
802 value as a uleb128. */
803 push_uleb128 (&cfun->eh->ehspec_data.other,
804 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
805 }
806 }
807 if (targetm.arm_eabi_unwinder)
808 VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
809 else
810 VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0);
811 }
812
813 return n->filter;
814 }
815
816 /* Generate the action filter values to be used for CATCH and
817 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
818 we use lots of landing pads, and so every type or list can share
819 the same filter value, which saves table space. */
820
821 void
822 assign_filter_values (void)
823 {
824 int i;
825 htab_t ttypes, ehspec;
826 eh_region r;
827 eh_catch c;
828
829 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
830 if (targetm.arm_eabi_unwinder)
831 cfun->eh->ehspec_data.arm_eabi = VEC_alloc (tree, gc, 64);
832 else
833 cfun->eh->ehspec_data.other = VEC_alloc (uchar, gc, 64);
834
835 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
836 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
837
838 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
839 {
840 if (r == NULL)
841 continue;
842
843 switch (r->type)
844 {
845 case ERT_TRY:
846 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
847 {
848 /* Whatever type_list is (NULL or true list), we build a list
849 of filters for the region. */
850 c->filter_list = NULL_TREE;
851
852 if (c->type_list != NULL)
853 {
854 /* Get a filter value for each of the types caught and store
855 them in the region's dedicated list. */
856 tree tp_node = c->type_list;
857
858 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
859 {
860 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
861 tree flt_node = build_int_cst (integer_type_node, flt);
862
863 c->filter_list
864 = tree_cons (NULL_TREE, flt_node, c->filter_list);
865 }
866 }
867 else
868 {
869 /* Get a filter value for the NULL list also since it
870 will need an action record anyway. */
871 int flt = add_ttypes_entry (ttypes, NULL);
872 tree flt_node = build_int_cst (integer_type_node, flt);
873
874 c->filter_list
875 = tree_cons (NULL_TREE, flt_node, NULL);
876 }
877 }
878 break;
879
880 case ERT_ALLOWED_EXCEPTIONS:
881 r->u.allowed.filter
882 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
883 break;
884
885 default:
886 break;
887 }
888 }
889
890 htab_delete (ttypes);
891 htab_delete (ehspec);
892 }
893
894 /* Emit SEQ into basic block just before INSN (that is assumed to be
895 first instruction of some existing BB and return the newly
896 produced block. */
897 static basic_block
898 emit_to_new_bb_before (rtx seq, rtx insn)
899 {
900 rtx last;
901 basic_block bb, prev_bb;
902 edge e;
903 edge_iterator ei;
904
905 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
906 call), we don't want it to go into newly created landing pad or other EH
907 construct. */
908 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
909 if (e->flags & EDGE_FALLTHRU)
910 force_nonfallthru (e);
911 else
912 ei_next (&ei);
913 last = emit_insn_before (seq, insn);
914 if (BARRIER_P (last))
915 last = PREV_INSN (last);
916 prev_bb = BLOCK_FOR_INSN (insn)->prev_bb;
917 bb = create_basic_block (seq, last, prev_bb);
918 update_bb_for_insn (bb);
919 bb->flags |= BB_SUPERBLOCK;
920 return bb;
921 }
922 \f
923 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
924 at the rtl level. Emit the code required by the target at a landing
925 pad for the given region. */
926
927 void
928 expand_dw2_landing_pad_for_region (eh_region region)
929 {
930 #ifdef HAVE_exception_receiver
931 if (HAVE_exception_receiver)
932 emit_insn (gen_exception_receiver ());
933 else
934 #endif
935 #ifdef HAVE_nonlocal_goto_receiver
936 if (HAVE_nonlocal_goto_receiver)
937 emit_insn (gen_nonlocal_goto_receiver ());
938 else
939 #endif
940 { /* Nothing */ }
941
942 if (region->exc_ptr_reg)
943 emit_move_insn (region->exc_ptr_reg,
944 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
945 if (region->filter_reg)
946 emit_move_insn (region->filter_reg,
947 gen_rtx_REG (targetm.eh_return_filter_mode (),
948 EH_RETURN_DATA_REGNO (1)));
949 }
950
951 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
952
953 static void
954 dw2_build_landing_pads (void)
955 {
956 int i;
957 eh_landing_pad lp;
958 int e_flags = EDGE_FALLTHRU;
959
960 /* If we're going to partition blocks, we need to be able to add
961 new landing pads later, which means that we need to hold on to
962 the post-landing-pad block. Prevent it from being merged away.
963 We'll remove this bit after partitioning. */
964 if (flag_reorder_blocks_and_partition)
965 e_flags |= EDGE_PRESERVE;
966
967 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
968 {
969 basic_block bb;
970 rtx seq;
971 edge e;
972
973 if (lp == NULL || lp->post_landing_pad == NULL)
974 continue;
975
976 start_sequence ();
977
978 lp->landing_pad = gen_label_rtx ();
979 emit_label (lp->landing_pad);
980 LABEL_PRESERVE_P (lp->landing_pad) = 1;
981
982 expand_dw2_landing_pad_for_region (lp->region);
983
984 seq = get_insns ();
985 end_sequence ();
986
987 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
988 e = make_edge (bb, bb->next_bb, e_flags);
989 e->count = bb->count;
990 e->probability = REG_BR_PROB_BASE;
991 if (current_loops)
992 {
993 struct loop *loop = bb->next_bb->loop_father;
994 /* If we created a pre-header block, add the new block to the
995 outer loop, otherwise to the loop itself. */
996 if (bb->next_bb == loop->header)
997 add_bb_to_loop (bb, loop_outer (loop));
998 else
999 add_bb_to_loop (bb, loop);
1000 }
1001 }
1002 }
1003
1004 \f
1005 static VEC (int, heap) *sjlj_lp_call_site_index;
1006
1007 /* Process all active landing pads. Assign each one a compact dispatch
1008 index, and a call-site index. */
1009
1010 static int
1011 sjlj_assign_call_site_values (void)
1012 {
1013 htab_t ar_hash;
1014 int i, disp_index;
1015 eh_landing_pad lp;
1016
1017 crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
1018 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1019
1020 disp_index = 0;
1021 call_site_base = 1;
1022 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1023 if (lp && lp->post_landing_pad)
1024 {
1025 int action, call_site;
1026
1027 /* First: build the action table. */
1028 action = collect_one_action_chain (ar_hash, lp->region);
1029
1030 /* Next: assign call-site values. If dwarf2 terms, this would be
1031 the region number assigned by convert_to_eh_region_ranges, but
1032 handles no-action and must-not-throw differently. */
1033 /* Map must-not-throw to otherwise unused call-site index 0. */
1034 if (action == -2)
1035 call_site = 0;
1036 /* Map no-action to otherwise unused call-site index -1. */
1037 else if (action == -1)
1038 call_site = -1;
1039 /* Otherwise, look it up in the table. */
1040 else
1041 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1042 VEC_replace (int, sjlj_lp_call_site_index, i, call_site);
1043
1044 disp_index++;
1045 }
1046
1047 htab_delete (ar_hash);
1048
1049 return disp_index;
1050 }
1051
1052 /* Emit code to record the current call-site index before every
1053 insn that can throw. */
1054
1055 static void
1056 sjlj_mark_call_sites (void)
1057 {
1058 int last_call_site = -2;
1059 rtx insn, mem;
1060
1061 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1062 {
1063 eh_landing_pad lp;
1064 eh_region r;
1065 bool nothrow;
1066 int this_call_site;
1067 rtx before, p;
1068
1069 /* Reset value tracking at extended basic block boundaries. */
1070 if (LABEL_P (insn))
1071 last_call_site = -2;
1072
1073 if (! INSN_P (insn))
1074 continue;
1075
1076 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1077 if (nothrow)
1078 continue;
1079 if (lp)
1080 this_call_site = VEC_index (int, sjlj_lp_call_site_index, lp->index);
1081 else if (r == NULL)
1082 {
1083 /* Calls (and trapping insns) without notes are outside any
1084 exception handling region in this function. Mark them as
1085 no action. */
1086 this_call_site = -1;
1087 }
1088 else
1089 {
1090 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1091 this_call_site = 0;
1092 }
1093
1094 if (this_call_site != -1)
1095 crtl->uses_eh_lsda = 1;
1096
1097 if (this_call_site == last_call_site)
1098 continue;
1099
1100 /* Don't separate a call from it's argument loads. */
1101 before = insn;
1102 if (CALL_P (insn))
1103 before = find_first_parameter_load (insn, NULL_RTX);
1104
1105 start_sequence ();
1106 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1107 sjlj_fc_call_site_ofs);
1108 emit_move_insn (mem, GEN_INT (this_call_site));
1109 p = get_insns ();
1110 end_sequence ();
1111
1112 emit_insn_before (p, before);
1113 last_call_site = this_call_site;
1114 }
1115 }
1116
1117 /* Construct the SjLj_Function_Context. */
1118
1119 static void
1120 sjlj_emit_function_enter (rtx dispatch_label)
1121 {
1122 rtx fn_begin, fc, mem, seq;
1123 bool fn_begin_outside_block;
1124 rtx personality = get_personality_function (current_function_decl);
1125
1126 fc = crtl->eh.sjlj_fc;
1127
1128 start_sequence ();
1129
1130 /* We're storing this libcall's address into memory instead of
1131 calling it directly. Thus, we must call assemble_external_libcall
1132 here, as we can not depend on emit_library_call to do it for us. */
1133 assemble_external_libcall (personality);
1134 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1135 emit_move_insn (mem, personality);
1136
1137 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1138 if (crtl->uses_eh_lsda)
1139 {
1140 char buf[20];
1141 rtx sym;
1142
1143 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1144 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1145 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1146 emit_move_insn (mem, sym);
1147 }
1148 else
1149 emit_move_insn (mem, const0_rtx);
1150
1151 if (dispatch_label)
1152 {
1153 #ifdef DONT_USE_BUILTIN_SETJMP
1154 rtx x, last;
1155 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1156 TYPE_MODE (integer_type_node), 1,
1157 plus_constant (Pmode, XEXP (fc, 0),
1158 sjlj_fc_jbuf_ofs), Pmode);
1159
1160 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1161 TYPE_MODE (integer_type_node), 0,
1162 dispatch_label);
1163 last = get_last_insn ();
1164 if (JUMP_P (last) && any_condjump_p (last))
1165 {
1166 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1167 add_reg_note (last, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE / 100));
1168 }
1169 #else
1170 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1171 sjlj_fc_jbuf_ofs),
1172 dispatch_label);
1173 #endif
1174 }
1175
1176 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1177 1, XEXP (fc, 0), Pmode);
1178
1179 seq = get_insns ();
1180 end_sequence ();
1181
1182 /* ??? Instead of doing this at the beginning of the function,
1183 do this in a block that is at loop level 0 and dominates all
1184 can_throw_internal instructions. */
1185
1186 fn_begin_outside_block = true;
1187 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1188 if (NOTE_P (fn_begin))
1189 {
1190 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1191 break;
1192 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1193 fn_begin_outside_block = false;
1194 }
1195
1196 if (fn_begin_outside_block)
1197 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1198 else
1199 emit_insn_after (seq, fn_begin);
1200 }
1201
1202 /* Call back from expand_function_end to know where we should put
1203 the call to unwind_sjlj_unregister_libfunc if needed. */
1204
1205 void
1206 sjlj_emit_function_exit_after (rtx after)
1207 {
1208 crtl->eh.sjlj_exit_after = after;
1209 }
1210
1211 static void
1212 sjlj_emit_function_exit (void)
1213 {
1214 rtx seq, insn;
1215
1216 start_sequence ();
1217
1218 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1219 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1220
1221 seq = get_insns ();
1222 end_sequence ();
1223
1224 /* ??? Really this can be done in any block at loop level 0 that
1225 post-dominates all can_throw_internal instructions. This is
1226 the last possible moment. */
1227
1228 insn = crtl->eh.sjlj_exit_after;
1229 if (LABEL_P (insn))
1230 insn = NEXT_INSN (insn);
1231
1232 emit_insn_after (seq, insn);
1233 }
1234
1235 static void
1236 sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
1237 {
1238 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1239 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1240 eh_landing_pad lp;
1241 rtx mem, seq, fc, before, exc_ptr_reg, filter_reg;
1242 rtx first_reachable_label;
1243 basic_block bb;
1244 eh_region r;
1245 edge e;
1246 int i, disp_index;
1247 gimple switch_stmt;
1248
1249 fc = crtl->eh.sjlj_fc;
1250
1251 start_sequence ();
1252
1253 emit_label (dispatch_label);
1254
1255 #ifndef DONT_USE_BUILTIN_SETJMP
1256 expand_builtin_setjmp_receiver (dispatch_label);
1257
1258 /* The caller of expand_builtin_setjmp_receiver is responsible for
1259 making sure that the label doesn't vanish. The only other caller
1260 is the expander for __builtin_setjmp_receiver, which places this
1261 label on the nonlocal_goto_label list. Since we're modeling these
1262 CFG edges more exactly, we can use the forced_labels list instead. */
1263 LABEL_PRESERVE_P (dispatch_label) = 1;
1264 forced_labels
1265 = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
1266 #endif
1267
1268 /* Load up exc_ptr and filter values from the function context. */
1269 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1270 if (unwind_word_mode != ptr_mode)
1271 {
1272 #ifdef POINTERS_EXTEND_UNSIGNED
1273 mem = convert_memory_address (ptr_mode, mem);
1274 #else
1275 mem = convert_to_mode (ptr_mode, mem, 0);
1276 #endif
1277 }
1278 exc_ptr_reg = force_reg (ptr_mode, mem);
1279
1280 mem = adjust_address (fc, unwind_word_mode,
1281 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1282 if (unwind_word_mode != filter_mode)
1283 mem = convert_to_mode (filter_mode, mem, 0);
1284 filter_reg = force_reg (filter_mode, mem);
1285
1286 /* Jump to one of the directly reachable regions. */
1287
1288 disp_index = 0;
1289 first_reachable_label = NULL;
1290
1291 /* If there's exactly one call site in the function, don't bother
1292 generating a switch statement. */
1293 switch_stmt = NULL;
1294 if (num_dispatch > 1)
1295 {
1296 tree disp;
1297
1298 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1299 sjlj_fc_call_site_ofs);
1300 disp = make_tree (integer_type_node, mem);
1301
1302 switch_stmt = gimple_build_switch_nlabels (num_dispatch, disp, NULL);
1303 }
1304
1305 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1306 if (lp && lp->post_landing_pad)
1307 {
1308 rtx seq2, label;
1309
1310 start_sequence ();
1311
1312 lp->landing_pad = dispatch_label;
1313
1314 if (num_dispatch > 1)
1315 {
1316 tree t_label, case_elt, t;
1317
1318 t_label = create_artificial_label (UNKNOWN_LOCATION);
1319 t = build_int_cst (integer_type_node, disp_index);
1320 case_elt = build_case_label (t, NULL, t_label);
1321 gimple_switch_set_label (switch_stmt, disp_index, case_elt);
1322
1323 label = label_rtx (t_label);
1324 }
1325 else
1326 label = gen_label_rtx ();
1327
1328 if (disp_index == 0)
1329 first_reachable_label = label;
1330 emit_label (label);
1331
1332 r = lp->region;
1333 if (r->exc_ptr_reg)
1334 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1335 if (r->filter_reg)
1336 emit_move_insn (r->filter_reg, filter_reg);
1337
1338 seq2 = get_insns ();
1339 end_sequence ();
1340
1341 before = label_rtx (lp->post_landing_pad);
1342 bb = emit_to_new_bb_before (seq2, before);
1343 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1344 e->count = bb->count;
1345 e->probability = REG_BR_PROB_BASE;
1346 if (current_loops)
1347 {
1348 struct loop *loop = bb->next_bb->loop_father;
1349 /* If we created a pre-header block, add the new block to the
1350 outer loop, otherwise to the loop itself. */
1351 if (bb->next_bb == loop->header)
1352 add_bb_to_loop (bb, loop_outer (loop));
1353 else
1354 add_bb_to_loop (bb, loop);
1355 /* ??? For multiple dispatches we will end up with edges
1356 from the loop tree root into this loop, making it a
1357 multiple-entry loop. Discard all affected loops. */
1358 if (num_dispatch > 1)
1359 {
1360 for (loop = bb->loop_father;
1361 loop_outer (loop); loop = loop_outer (loop))
1362 {
1363 loop->header = NULL;
1364 loop->latch = NULL;
1365 }
1366 }
1367 }
1368
1369 disp_index++;
1370 }
1371 gcc_assert (disp_index == num_dispatch);
1372
1373 if (num_dispatch > 1)
1374 {
1375 expand_case (switch_stmt);
1376 expand_builtin_trap ();
1377 }
1378
1379 seq = get_insns ();
1380 end_sequence ();
1381
1382 bb = emit_to_new_bb_before (seq, first_reachable_label);
1383 if (num_dispatch == 1)
1384 {
1385 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1386 e->count = bb->count;
1387 e->probability = REG_BR_PROB_BASE;
1388 if (current_loops)
1389 {
1390 struct loop *loop = bb->next_bb->loop_father;
1391 /* If we created a pre-header block, add the new block to the
1392 outer loop, otherwise to the loop itself. */
1393 if (bb->next_bb == loop->header)
1394 add_bb_to_loop (bb, loop_outer (loop));
1395 else
1396 add_bb_to_loop (bb, loop);
1397 }
1398 }
1399 else
1400 {
1401 /* We are not wiring up edges here, but as the dispatcher call
1402 is at function begin simply associate the block with the
1403 outermost (non-)loop. */
1404 if (current_loops)
1405 add_bb_to_loop (bb, current_loops->tree_root);
1406 }
1407 }
1408
1409 static void
1410 sjlj_build_landing_pads (void)
1411 {
1412 int num_dispatch;
1413
1414 num_dispatch = VEC_length (eh_landing_pad, cfun->eh->lp_array);
1415 if (num_dispatch == 0)
1416 return;
1417 VEC_safe_grow (int, heap, sjlj_lp_call_site_index, num_dispatch);
1418
1419 num_dispatch = sjlj_assign_call_site_values ();
1420 if (num_dispatch > 0)
1421 {
1422 rtx dispatch_label = gen_label_rtx ();
1423 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1424 TYPE_MODE (sjlj_fc_type_node),
1425 TYPE_ALIGN (sjlj_fc_type_node));
1426 crtl->eh.sjlj_fc
1427 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1428 int_size_in_bytes (sjlj_fc_type_node),
1429 align);
1430
1431 sjlj_mark_call_sites ();
1432 sjlj_emit_function_enter (dispatch_label);
1433 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1434 sjlj_emit_function_exit ();
1435 }
1436
1437 /* If we do not have any landing pads, we may still need to register a
1438 personality routine and (empty) LSDA to handle must-not-throw regions. */
1439 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1440 {
1441 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1442 TYPE_MODE (sjlj_fc_type_node),
1443 TYPE_ALIGN (sjlj_fc_type_node));
1444 crtl->eh.sjlj_fc
1445 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1446 int_size_in_bytes (sjlj_fc_type_node),
1447 align);
1448
1449 sjlj_mark_call_sites ();
1450 sjlj_emit_function_enter (NULL_RTX);
1451 sjlj_emit_function_exit ();
1452 }
1453
1454 VEC_free (int, heap, sjlj_lp_call_site_index);
1455 }
1456
1457 /* After initial rtl generation, call back to finish generating
1458 exception support code. */
1459
1460 void
1461 finish_eh_generation (void)
1462 {
1463 basic_block bb;
1464
1465 /* Construct the landing pads. */
1466 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1467 sjlj_build_landing_pads ();
1468 else
1469 dw2_build_landing_pads ();
1470 break_superblocks ();
1471
1472 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1473 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1474 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
1475 commit_edge_insertions ();
1476
1477 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1478 FOR_EACH_BB (bb)
1479 {
1480 eh_landing_pad lp;
1481 edge_iterator ei;
1482 edge e;
1483
1484 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1485
1486 FOR_EACH_EDGE (e, ei, bb->succs)
1487 if (e->flags & EDGE_EH)
1488 break;
1489
1490 /* We should not have generated any new throwing insns during this
1491 pass, and we should not have lost any EH edges, so we only need
1492 to handle two cases here:
1493 (1) reachable handler and an existing edge to post-landing-pad,
1494 (2) no reachable handler and no edge. */
1495 gcc_assert ((lp != NULL) == (e != NULL));
1496 if (lp != NULL)
1497 {
1498 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1499
1500 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1501 e->flags |= (CALL_P (BB_END (bb))
1502 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1503 : EDGE_ABNORMAL);
1504 }
1505 }
1506 }
1507 \f
1508 /* This section handles removing dead code for flow. */
1509
1510 void
1511 remove_eh_landing_pad (eh_landing_pad lp)
1512 {
1513 eh_landing_pad *pp;
1514
1515 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1516 continue;
1517 *pp = lp->next_lp;
1518
1519 if (lp->post_landing_pad)
1520 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1521 VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
1522 }
1523
1524 /* Splice REGION from the region tree. */
1525
1526 void
1527 remove_eh_handler (eh_region region)
1528 {
1529 eh_region *pp, *pp_start, p, outer;
1530 eh_landing_pad lp;
1531
1532 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1533 {
1534 if (lp->post_landing_pad)
1535 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1536 VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
1537 }
1538
1539 outer = region->outer;
1540 if (outer)
1541 pp_start = &outer->inner;
1542 else
1543 pp_start = &cfun->eh->region_tree;
1544 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1545 continue;
1546 if (region->inner)
1547 {
1548 *pp = p = region->inner;
1549 do
1550 {
1551 p->outer = outer;
1552 pp = &p->next_peer;
1553 p = *pp;
1554 }
1555 while (p);
1556 }
1557 *pp = region->next_peer;
1558
1559 VEC_replace (eh_region, cfun->eh->region_array, region->index, NULL);
1560 }
1561
1562 /* Invokes CALLBACK for every exception handler landing pad label.
1563 Only used by reload hackery; should not be used by new code. */
1564
1565 void
1566 for_each_eh_label (void (*callback) (rtx))
1567 {
1568 eh_landing_pad lp;
1569 int i;
1570
1571 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1572 {
1573 if (lp)
1574 {
1575 rtx lab = lp->landing_pad;
1576 if (lab && LABEL_P (lab))
1577 (*callback) (lab);
1578 }
1579 }
1580 }
1581 \f
1582 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1583 call insn.
1584
1585 At the gimple level, we use LP_NR
1586 > 0 : The statement transfers to landing pad LP_NR
1587 = 0 : The statement is outside any EH region
1588 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1589
1590 At the rtl level, we use LP_NR
1591 > 0 : The insn transfers to landing pad LP_NR
1592 = 0 : The insn cannot throw
1593 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1594 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1595 missing note: The insn is outside any EH region.
1596
1597 ??? This difference probably ought to be avoided. We could stand
1598 to record nothrow for arbitrary gimple statements, and so avoid
1599 some moderately complex lookups in stmt_could_throw_p. Perhaps
1600 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1601 no-nonlocal-goto property should be recorded elsewhere as a bit
1602 on the call_insn directly. Perhaps we should make more use of
1603 attaching the trees to call_insns (reachable via symbol_ref in
1604 direct call cases) and just pull the data out of the trees. */
1605
1606 void
1607 make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
1608 {
1609 rtx value;
1610 if (ecf_flags & ECF_NOTHROW)
1611 value = const0_rtx;
1612 else if (lp_nr != 0)
1613 value = GEN_INT (lp_nr);
1614 else
1615 return;
1616 add_reg_note (insn, REG_EH_REGION, value);
1617 }
1618
1619 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1620 nor perform a non-local goto. Replace the region note if it
1621 already exists. */
1622
1623 void
1624 make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
1625 {
1626 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1627 rtx intmin = GEN_INT (INT_MIN);
1628
1629 if (note != 0)
1630 XEXP (note, 0) = intmin;
1631 else
1632 add_reg_note (insn, REG_EH_REGION, intmin);
1633 }
1634
1635 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1636 to the contrary. */
1637
1638 bool
1639 insn_could_throw_p (const_rtx insn)
1640 {
1641 if (!flag_exceptions)
1642 return false;
1643 if (CALL_P (insn))
1644 return true;
1645 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1646 return may_trap_p (PATTERN (insn));
1647 return false;
1648 }
1649
1650 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1651 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1652 to look for a note, or the note itself. */
1653
1654 void
1655 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
1656 {
1657 rtx insn, note = note_or_insn;
1658
1659 if (INSN_P (note_or_insn))
1660 {
1661 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1662 if (note == NULL)
1663 return;
1664 }
1665 note = XEXP (note, 0);
1666
1667 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1668 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1669 && insn_could_throw_p (insn))
1670 add_reg_note (insn, REG_EH_REGION, note);
1671 }
1672
1673 /* Likewise, but iterate backward. */
1674
1675 void
1676 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
1677 {
1678 rtx insn, note = note_or_insn;
1679
1680 if (INSN_P (note_or_insn))
1681 {
1682 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1683 if (note == NULL)
1684 return;
1685 }
1686 note = XEXP (note, 0);
1687
1688 for (insn = last; insn != first; insn = PREV_INSN (insn))
1689 if (insn_could_throw_p (insn))
1690 add_reg_note (insn, REG_EH_REGION, note);
1691 }
1692
1693
1694 /* Extract all EH information from INSN. Return true if the insn
1695 was marked NOTHROW. */
1696
1697 static bool
1698 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1699 eh_landing_pad *plp)
1700 {
1701 eh_landing_pad lp = NULL;
1702 eh_region r = NULL;
1703 bool ret = false;
1704 rtx note;
1705 int lp_nr;
1706
1707 if (! INSN_P (insn))
1708 goto egress;
1709
1710 if (NONJUMP_INSN_P (insn)
1711 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1712 insn = XVECEXP (PATTERN (insn), 0, 0);
1713
1714 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1715 if (!note)
1716 {
1717 ret = !insn_could_throw_p (insn);
1718 goto egress;
1719 }
1720
1721 lp_nr = INTVAL (XEXP (note, 0));
1722 if (lp_nr == 0 || lp_nr == INT_MIN)
1723 {
1724 ret = true;
1725 goto egress;
1726 }
1727
1728 if (lp_nr < 0)
1729 r = VEC_index (eh_region, cfun->eh->region_array, -lp_nr);
1730 else
1731 {
1732 lp = VEC_index (eh_landing_pad, cfun->eh->lp_array, lp_nr);
1733 r = lp->region;
1734 }
1735
1736 egress:
1737 *plp = lp;
1738 *pr = r;
1739 return ret;
1740 }
1741
1742 /* Return the landing pad to which INSN may go, or NULL if it does not
1743 have a reachable landing pad within this function. */
1744
1745 eh_landing_pad
1746 get_eh_landing_pad_from_rtx (const_rtx insn)
1747 {
1748 eh_landing_pad lp;
1749 eh_region r;
1750
1751 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1752 return lp;
1753 }
1754
1755 /* Return the region to which INSN may go, or NULL if it does not
1756 have a reachable region within this function. */
1757
1758 eh_region
1759 get_eh_region_from_rtx (const_rtx insn)
1760 {
1761 eh_landing_pad lp;
1762 eh_region r;
1763
1764 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1765 return r;
1766 }
1767
1768 /* Return true if INSN throws and is caught by something in this function. */
1769
1770 bool
1771 can_throw_internal (const_rtx insn)
1772 {
1773 return get_eh_landing_pad_from_rtx (insn) != NULL;
1774 }
1775
1776 /* Return true if INSN throws and escapes from the current function. */
1777
1778 bool
1779 can_throw_external (const_rtx insn)
1780 {
1781 eh_landing_pad lp;
1782 eh_region r;
1783 bool nothrow;
1784
1785 if (! INSN_P (insn))
1786 return false;
1787
1788 if (NONJUMP_INSN_P (insn)
1789 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1790 {
1791 rtx seq = PATTERN (insn);
1792 int i, n = XVECLEN (seq, 0);
1793
1794 for (i = 0; i < n; i++)
1795 if (can_throw_external (XVECEXP (seq, 0, i)))
1796 return true;
1797
1798 return false;
1799 }
1800
1801 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1802
1803 /* If we can't throw, we obviously can't throw external. */
1804 if (nothrow)
1805 return false;
1806
1807 /* If we have an internal landing pad, then we're not external. */
1808 if (lp != NULL)
1809 return false;
1810
1811 /* If we're not within an EH region, then we are external. */
1812 if (r == NULL)
1813 return true;
1814
1815 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1816 which don't always have landing pads. */
1817 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1818 return false;
1819 }
1820
1821 /* Return true if INSN cannot throw at all. */
1822
1823 bool
1824 insn_nothrow_p (const_rtx insn)
1825 {
1826 eh_landing_pad lp;
1827 eh_region r;
1828
1829 if (! INSN_P (insn))
1830 return true;
1831
1832 if (NONJUMP_INSN_P (insn)
1833 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1834 {
1835 rtx seq = PATTERN (insn);
1836 int i, n = XVECLEN (seq, 0);
1837
1838 for (i = 0; i < n; i++)
1839 if (!insn_nothrow_p (XVECEXP (seq, 0, i)))
1840 return false;
1841
1842 return true;
1843 }
1844
1845 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1846 }
1847
1848 /* Return true if INSN can perform a non-local goto. */
1849 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1850
1851 bool
1852 can_nonlocal_goto (const_rtx insn)
1853 {
1854 if (nonlocal_goto_handler_labels && CALL_P (insn))
1855 {
1856 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1857 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1858 return true;
1859 }
1860 return false;
1861 }
1862 \f
1863 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1864
1865 static unsigned int
1866 set_nothrow_function_flags (void)
1867 {
1868 rtx insn;
1869
1870 crtl->nothrow = 1;
1871
1872 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1873 something that can throw an exception. We specifically exempt
1874 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1875 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1876 is optimistic. */
1877
1878 crtl->all_throwers_are_sibcalls = 1;
1879
1880 /* If we don't know that this implementation of the function will
1881 actually be used, then we must not set TREE_NOTHROW, since
1882 callers must not assume that this function does not throw. */
1883 if (TREE_NOTHROW (current_function_decl))
1884 return 0;
1885
1886 if (! flag_exceptions)
1887 return 0;
1888
1889 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1890 if (can_throw_external (insn))
1891 {
1892 crtl->nothrow = 0;
1893
1894 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1895 {
1896 crtl->all_throwers_are_sibcalls = 0;
1897 return 0;
1898 }
1899 }
1900
1901 for (insn = crtl->epilogue_delay_list; insn;
1902 insn = XEXP (insn, 1))
1903 if (can_throw_external (insn))
1904 {
1905 crtl->nothrow = 0;
1906
1907 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1908 {
1909 crtl->all_throwers_are_sibcalls = 0;
1910 return 0;
1911 }
1912 }
1913 if (crtl->nothrow
1914 && (cgraph_function_body_availability (cgraph_get_node
1915 (current_function_decl))
1916 >= AVAIL_AVAILABLE))
1917 {
1918 struct cgraph_node *node = cgraph_get_node (current_function_decl);
1919 struct cgraph_edge *e;
1920 for (e = node->callers; e; e = e->next_caller)
1921 e->can_throw_external = false;
1922 cgraph_set_nothrow_flag (node, true);
1923
1924 if (dump_file)
1925 fprintf (dump_file, "Marking function nothrow: %s\n\n",
1926 current_function_name ());
1927 }
1928 return 0;
1929 }
1930
1931 struct rtl_opt_pass pass_set_nothrow_function_flags =
1932 {
1933 {
1934 RTL_PASS,
1935 "nothrow", /* name */
1936 NULL, /* gate */
1937 set_nothrow_function_flags, /* execute */
1938 NULL, /* sub */
1939 NULL, /* next */
1940 0, /* static_pass_number */
1941 TV_NONE, /* tv_id */
1942 0, /* properties_required */
1943 0, /* properties_provided */
1944 0, /* properties_destroyed */
1945 0, /* todo_flags_start */
1946 0 /* todo_flags_finish */
1947 }
1948 };
1949
1950 \f
1951 /* Various hooks for unwind library. */
1952
1953 /* Expand the EH support builtin functions:
1954 __builtin_eh_pointer and __builtin_eh_filter. */
1955
1956 static eh_region
1957 expand_builtin_eh_common (tree region_nr_t)
1958 {
1959 HOST_WIDE_INT region_nr;
1960 eh_region region;
1961
1962 gcc_assert (host_integerp (region_nr_t, 0));
1963 region_nr = tree_low_cst (region_nr_t, 0);
1964
1965 region = VEC_index (eh_region, cfun->eh->region_array, region_nr);
1966
1967 /* ??? We shouldn't have been able to delete a eh region without
1968 deleting all the code that depended on it. */
1969 gcc_assert (region != NULL);
1970
1971 return region;
1972 }
1973
1974 /* Expand to the exc_ptr value from the given eh region. */
1975
1976 rtx
1977 expand_builtin_eh_pointer (tree exp)
1978 {
1979 eh_region region
1980 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1981 if (region->exc_ptr_reg == NULL)
1982 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1983 return region->exc_ptr_reg;
1984 }
1985
1986 /* Expand to the filter value from the given eh region. */
1987
1988 rtx
1989 expand_builtin_eh_filter (tree exp)
1990 {
1991 eh_region region
1992 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1993 if (region->filter_reg == NULL)
1994 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
1995 return region->filter_reg;
1996 }
1997
1998 /* Copy the exc_ptr and filter values from one landing pad's registers
1999 to another. This is used to inline the resx statement. */
2000
2001 rtx
2002 expand_builtin_eh_copy_values (tree exp)
2003 {
2004 eh_region dst
2005 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2006 eh_region src
2007 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2008 enum machine_mode fmode = targetm.eh_return_filter_mode ();
2009
2010 if (dst->exc_ptr_reg == NULL)
2011 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2012 if (src->exc_ptr_reg == NULL)
2013 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2014
2015 if (dst->filter_reg == NULL)
2016 dst->filter_reg = gen_reg_rtx (fmode);
2017 if (src->filter_reg == NULL)
2018 src->filter_reg = gen_reg_rtx (fmode);
2019
2020 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2021 emit_move_insn (dst->filter_reg, src->filter_reg);
2022
2023 return const0_rtx;
2024 }
2025
2026 /* Do any necessary initialization to access arbitrary stack frames.
2027 On the SPARC, this means flushing the register windows. */
2028
2029 void
2030 expand_builtin_unwind_init (void)
2031 {
2032 /* Set this so all the registers get saved in our frame; we need to be
2033 able to copy the saved values for any registers from frames we unwind. */
2034 crtl->saves_all_registers = 1;
2035
2036 #ifdef SETUP_FRAME_ADDRESSES
2037 SETUP_FRAME_ADDRESSES ();
2038 #endif
2039 }
2040
2041 /* Map a non-negative number to an eh return data register number; expands
2042 to -1 if no return data register is associated with the input number.
2043 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2044
2045 rtx
2046 expand_builtin_eh_return_data_regno (tree exp)
2047 {
2048 tree which = CALL_EXPR_ARG (exp, 0);
2049 unsigned HOST_WIDE_INT iwhich;
2050
2051 if (TREE_CODE (which) != INTEGER_CST)
2052 {
2053 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2054 return constm1_rtx;
2055 }
2056
2057 iwhich = tree_low_cst (which, 1);
2058 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2059 if (iwhich == INVALID_REGNUM)
2060 return constm1_rtx;
2061
2062 #ifdef DWARF_FRAME_REGNUM
2063 iwhich = DWARF_FRAME_REGNUM (iwhich);
2064 #else
2065 iwhich = DBX_REGISTER_NUMBER (iwhich);
2066 #endif
2067
2068 return GEN_INT (iwhich);
2069 }
2070
2071 /* Given a value extracted from the return address register or stack slot,
2072 return the actual address encoded in that value. */
2073
2074 rtx
2075 expand_builtin_extract_return_addr (tree addr_tree)
2076 {
2077 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2078
2079 if (GET_MODE (addr) != Pmode
2080 && GET_MODE (addr) != VOIDmode)
2081 {
2082 #ifdef POINTERS_EXTEND_UNSIGNED
2083 addr = convert_memory_address (Pmode, addr);
2084 #else
2085 addr = convert_to_mode (Pmode, addr, 0);
2086 #endif
2087 }
2088
2089 /* First mask out any unwanted bits. */
2090 #ifdef MASK_RETURN_ADDR
2091 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2092 #endif
2093
2094 /* Then adjust to find the real return address. */
2095 #if defined (RETURN_ADDR_OFFSET)
2096 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2097 #endif
2098
2099 return addr;
2100 }
2101
2102 /* Given an actual address in addr_tree, do any necessary encoding
2103 and return the value to be stored in the return address register or
2104 stack slot so the epilogue will return to that address. */
2105
2106 rtx
2107 expand_builtin_frob_return_addr (tree addr_tree)
2108 {
2109 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2110
2111 addr = convert_memory_address (Pmode, addr);
2112
2113 #ifdef RETURN_ADDR_OFFSET
2114 addr = force_reg (Pmode, addr);
2115 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2116 #endif
2117
2118 return addr;
2119 }
2120
2121 /* Set up the epilogue with the magic bits we'll need to return to the
2122 exception handler. */
2123
2124 void
2125 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2126 tree handler_tree)
2127 {
2128 rtx tmp;
2129
2130 #ifdef EH_RETURN_STACKADJ_RTX
2131 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2132 VOIDmode, EXPAND_NORMAL);
2133 tmp = convert_memory_address (Pmode, tmp);
2134 if (!crtl->eh.ehr_stackadj)
2135 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2136 else if (tmp != crtl->eh.ehr_stackadj)
2137 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2138 #endif
2139
2140 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2141 VOIDmode, EXPAND_NORMAL);
2142 tmp = convert_memory_address (Pmode, tmp);
2143 if (!crtl->eh.ehr_handler)
2144 crtl->eh.ehr_handler = copy_to_reg (tmp);
2145 else if (tmp != crtl->eh.ehr_handler)
2146 emit_move_insn (crtl->eh.ehr_handler, tmp);
2147
2148 if (!crtl->eh.ehr_label)
2149 crtl->eh.ehr_label = gen_label_rtx ();
2150 emit_jump (crtl->eh.ehr_label);
2151 }
2152
2153 /* Expand __builtin_eh_return. This exit path from the function loads up
2154 the eh return data registers, adjusts the stack, and branches to a
2155 given PC other than the normal return address. */
2156
2157 void
2158 expand_eh_return (void)
2159 {
2160 rtx around_label;
2161
2162 if (! crtl->eh.ehr_label)
2163 return;
2164
2165 crtl->calls_eh_return = 1;
2166
2167 #ifdef EH_RETURN_STACKADJ_RTX
2168 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2169 #endif
2170
2171 around_label = gen_label_rtx ();
2172 emit_jump (around_label);
2173
2174 emit_label (crtl->eh.ehr_label);
2175 clobber_return_register ();
2176
2177 #ifdef EH_RETURN_STACKADJ_RTX
2178 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2179 #endif
2180
2181 #ifdef HAVE_eh_return
2182 if (HAVE_eh_return)
2183 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2184 else
2185 #endif
2186 {
2187 #ifdef EH_RETURN_HANDLER_RTX
2188 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2189 #else
2190 error ("__builtin_eh_return not supported on this target");
2191 #endif
2192 }
2193
2194 emit_label (around_label);
2195 }
2196
2197 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2198 POINTERS_EXTEND_UNSIGNED and return it. */
2199
2200 rtx
2201 expand_builtin_extend_pointer (tree addr_tree)
2202 {
2203 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2204 int extend;
2205
2206 #ifdef POINTERS_EXTEND_UNSIGNED
2207 extend = POINTERS_EXTEND_UNSIGNED;
2208 #else
2209 /* The previous EH code did an unsigned extend by default, so we do this also
2210 for consistency. */
2211 extend = 1;
2212 #endif
2213
2214 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2215 }
2216 \f
2217 /* In the following functions, we represent entries in the action table
2218 as 1-based indices. Special cases are:
2219
2220 0: null action record, non-null landing pad; implies cleanups
2221 -1: null action record, null landing pad; implies no action
2222 -2: no call-site entry; implies must_not_throw
2223 -3: we have yet to process outer regions
2224
2225 Further, no special cases apply to the "next" field of the record.
2226 For next, 0 means end of list. */
2227
2228 struct action_record
2229 {
2230 int offset;
2231 int filter;
2232 int next;
2233 };
2234
2235 static int
2236 action_record_eq (const void *pentry, const void *pdata)
2237 {
2238 const struct action_record *entry = (const struct action_record *) pentry;
2239 const struct action_record *data = (const struct action_record *) pdata;
2240 return entry->filter == data->filter && entry->next == data->next;
2241 }
2242
2243 static hashval_t
2244 action_record_hash (const void *pentry)
2245 {
2246 const struct action_record *entry = (const struct action_record *) pentry;
2247 return entry->next * 1009 + entry->filter;
2248 }
2249
2250 static int
2251 add_action_record (htab_t ar_hash, int filter, int next)
2252 {
2253 struct action_record **slot, *new_ar, tmp;
2254
2255 tmp.filter = filter;
2256 tmp.next = next;
2257 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2258
2259 if ((new_ar = *slot) == NULL)
2260 {
2261 new_ar = XNEW (struct action_record);
2262 new_ar->offset = VEC_length (uchar, crtl->eh.action_record_data) + 1;
2263 new_ar->filter = filter;
2264 new_ar->next = next;
2265 *slot = new_ar;
2266
2267 /* The filter value goes in untouched. The link to the next
2268 record is a "self-relative" byte offset, or zero to indicate
2269 that there is no next record. So convert the absolute 1 based
2270 indices we've been carrying around into a displacement. */
2271
2272 push_sleb128 (&crtl->eh.action_record_data, filter);
2273 if (next)
2274 next -= VEC_length (uchar, crtl->eh.action_record_data) + 1;
2275 push_sleb128 (&crtl->eh.action_record_data, next);
2276 }
2277
2278 return new_ar->offset;
2279 }
2280
2281 static int
2282 collect_one_action_chain (htab_t ar_hash, eh_region region)
2283 {
2284 int next;
2285
2286 /* If we've reached the top of the region chain, then we have
2287 no actions, and require no landing pad. */
2288 if (region == NULL)
2289 return -1;
2290
2291 switch (region->type)
2292 {
2293 case ERT_CLEANUP:
2294 {
2295 eh_region r;
2296 /* A cleanup adds a zero filter to the beginning of the chain, but
2297 there are special cases to look out for. If there are *only*
2298 cleanups along a path, then it compresses to a zero action.
2299 Further, if there are multiple cleanups along a path, we only
2300 need to represent one of them, as that is enough to trigger
2301 entry to the landing pad at runtime. */
2302 next = collect_one_action_chain (ar_hash, region->outer);
2303 if (next <= 0)
2304 return 0;
2305 for (r = region->outer; r ; r = r->outer)
2306 if (r->type == ERT_CLEANUP)
2307 return next;
2308 return add_action_record (ar_hash, 0, next);
2309 }
2310
2311 case ERT_TRY:
2312 {
2313 eh_catch c;
2314
2315 /* Process the associated catch regions in reverse order.
2316 If there's a catch-all handler, then we don't need to
2317 search outer regions. Use a magic -3 value to record
2318 that we haven't done the outer search. */
2319 next = -3;
2320 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2321 {
2322 if (c->type_list == NULL)
2323 {
2324 /* Retrieve the filter from the head of the filter list
2325 where we have stored it (see assign_filter_values). */
2326 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2327 next = add_action_record (ar_hash, filter, 0);
2328 }
2329 else
2330 {
2331 /* Once the outer search is done, trigger an action record for
2332 each filter we have. */
2333 tree flt_node;
2334
2335 if (next == -3)
2336 {
2337 next = collect_one_action_chain (ar_hash, region->outer);
2338
2339 /* If there is no next action, terminate the chain. */
2340 if (next == -1)
2341 next = 0;
2342 /* If all outer actions are cleanups or must_not_throw,
2343 we'll have no action record for it, since we had wanted
2344 to encode these states in the call-site record directly.
2345 Add a cleanup action to the chain to catch these. */
2346 else if (next <= 0)
2347 next = add_action_record (ar_hash, 0, 0);
2348 }
2349
2350 flt_node = c->filter_list;
2351 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2352 {
2353 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2354 next = add_action_record (ar_hash, filter, next);
2355 }
2356 }
2357 }
2358 return next;
2359 }
2360
2361 case ERT_ALLOWED_EXCEPTIONS:
2362 /* An exception specification adds its filter to the
2363 beginning of the chain. */
2364 next = collect_one_action_chain (ar_hash, region->outer);
2365
2366 /* If there is no next action, terminate the chain. */
2367 if (next == -1)
2368 next = 0;
2369 /* If all outer actions are cleanups or must_not_throw,
2370 we'll have no action record for it, since we had wanted
2371 to encode these states in the call-site record directly.
2372 Add a cleanup action to the chain to catch these. */
2373 else if (next <= 0)
2374 next = add_action_record (ar_hash, 0, 0);
2375
2376 return add_action_record (ar_hash, region->u.allowed.filter, next);
2377
2378 case ERT_MUST_NOT_THROW:
2379 /* A must-not-throw region with no inner handlers or cleanups
2380 requires no call-site entry. Note that this differs from
2381 the no handler or cleanup case in that we do require an lsda
2382 to be generated. Return a magic -2 value to record this. */
2383 return -2;
2384 }
2385
2386 gcc_unreachable ();
2387 }
2388
2389 static int
2390 add_call_site (rtx landing_pad, int action, int section)
2391 {
2392 call_site_record record;
2393
2394 record = ggc_alloc_call_site_record_d ();
2395 record->landing_pad = landing_pad;
2396 record->action = action;
2397
2398 VEC_safe_push (call_site_record, gc,
2399 crtl->eh.call_site_record[section], record);
2400
2401 return call_site_base + VEC_length (call_site_record,
2402 crtl->eh.call_site_record[section]) - 1;
2403 }
2404
2405 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2406 The new note numbers will not refer to region numbers, but
2407 instead to call site entries. */
2408
2409 static unsigned int
2410 convert_to_eh_region_ranges (void)
2411 {
2412 rtx insn, iter, note;
2413 htab_t ar_hash;
2414 int last_action = -3;
2415 rtx last_action_insn = NULL_RTX;
2416 rtx last_landing_pad = NULL_RTX;
2417 rtx first_no_action_insn = NULL_RTX;
2418 int call_site = 0;
2419 int cur_sec = 0;
2420 rtx section_switch_note = NULL_RTX;
2421 rtx first_no_action_insn_before_switch = NULL_RTX;
2422 rtx last_no_action_insn_before_switch = NULL_RTX;
2423 int saved_call_site_base = call_site_base;
2424
2425 crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
2426
2427 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2428
2429 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2430 if (INSN_P (iter))
2431 {
2432 eh_landing_pad lp;
2433 eh_region region;
2434 bool nothrow;
2435 int this_action;
2436 rtx this_landing_pad;
2437
2438 insn = iter;
2439 if (NONJUMP_INSN_P (insn)
2440 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2441 insn = XVECEXP (PATTERN (insn), 0, 0);
2442
2443 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2444 if (nothrow)
2445 continue;
2446 if (region)
2447 this_action = collect_one_action_chain (ar_hash, region);
2448 else
2449 this_action = -1;
2450
2451 /* Existence of catch handlers, or must-not-throw regions
2452 implies that an lsda is needed (even if empty). */
2453 if (this_action != -1)
2454 crtl->uses_eh_lsda = 1;
2455
2456 /* Delay creation of region notes for no-action regions
2457 until we're sure that an lsda will be required. */
2458 else if (last_action == -3)
2459 {
2460 first_no_action_insn = iter;
2461 last_action = -1;
2462 }
2463
2464 if (this_action >= 0)
2465 this_landing_pad = lp->landing_pad;
2466 else
2467 this_landing_pad = NULL_RTX;
2468
2469 /* Differing actions or landing pads implies a change in call-site
2470 info, which implies some EH_REGION note should be emitted. */
2471 if (last_action != this_action
2472 || last_landing_pad != this_landing_pad)
2473 {
2474 /* If there is a queued no-action region in the other section
2475 with hot/cold partitioning, emit it now. */
2476 if (first_no_action_insn_before_switch)
2477 {
2478 gcc_assert (this_action != -1
2479 && last_action == (first_no_action_insn
2480 ? -1 : -3));
2481 call_site = add_call_site (NULL_RTX, 0, 0);
2482 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2483 first_no_action_insn_before_switch);
2484 NOTE_EH_HANDLER (note) = call_site;
2485 note = emit_note_after (NOTE_INSN_EH_REGION_END,
2486 last_no_action_insn_before_switch);
2487 NOTE_EH_HANDLER (note) = call_site;
2488 gcc_assert (last_action != -3
2489 || (last_action_insn
2490 == last_no_action_insn_before_switch));
2491 first_no_action_insn_before_switch = NULL_RTX;
2492 last_no_action_insn_before_switch = NULL_RTX;
2493 call_site_base++;
2494 }
2495 /* If we'd not seen a previous action (-3) or the previous
2496 action was must-not-throw (-2), then we do not need an
2497 end note. */
2498 if (last_action >= -1)
2499 {
2500 /* If we delayed the creation of the begin, do it now. */
2501 if (first_no_action_insn)
2502 {
2503 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2504 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2505 first_no_action_insn);
2506 NOTE_EH_HANDLER (note) = call_site;
2507 first_no_action_insn = NULL_RTX;
2508 }
2509
2510 note = emit_note_after (NOTE_INSN_EH_REGION_END,
2511 last_action_insn);
2512 NOTE_EH_HANDLER (note) = call_site;
2513 }
2514
2515 /* If the new action is must-not-throw, then no region notes
2516 are created. */
2517 if (this_action >= -1)
2518 {
2519 call_site = add_call_site (this_landing_pad,
2520 this_action < 0 ? 0 : this_action,
2521 cur_sec);
2522 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2523 NOTE_EH_HANDLER (note) = call_site;
2524 }
2525
2526 last_action = this_action;
2527 last_landing_pad = this_landing_pad;
2528 }
2529 last_action_insn = iter;
2530 }
2531 else if (NOTE_P (iter)
2532 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2533 {
2534 gcc_assert (section_switch_note == NULL_RTX);
2535 gcc_assert (flag_reorder_blocks_and_partition);
2536 section_switch_note = iter;
2537 if (first_no_action_insn)
2538 {
2539 first_no_action_insn_before_switch = first_no_action_insn;
2540 last_no_action_insn_before_switch = last_action_insn;
2541 first_no_action_insn = NULL_RTX;
2542 gcc_assert (last_action == -1);
2543 last_action = -3;
2544 }
2545 /* Force closing of current EH region before section switch and
2546 opening a new one afterwards. */
2547 else if (last_action != -3)
2548 last_landing_pad = pc_rtx;
2549 call_site_base += VEC_length (call_site_record,
2550 crtl->eh.call_site_record[cur_sec]);
2551 cur_sec++;
2552 gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
2553 crtl->eh.call_site_record[cur_sec]
2554 = VEC_alloc (call_site_record, gc, 10);
2555 }
2556
2557 if (last_action >= -1 && ! first_no_action_insn)
2558 {
2559 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
2560 NOTE_EH_HANDLER (note) = call_site;
2561 }
2562
2563 call_site_base = saved_call_site_base;
2564
2565 htab_delete (ar_hash);
2566 return 0;
2567 }
2568
2569 static bool
2570 gate_convert_to_eh_region_ranges (void)
2571 {
2572 /* Nothing to do for SJLJ exceptions or if no regions created. */
2573 if (cfun->eh->region_tree == NULL)
2574 return false;
2575 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2576 return false;
2577 return true;
2578 }
2579
2580 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
2581 {
2582 {
2583 RTL_PASS,
2584 "eh_ranges", /* name */
2585 gate_convert_to_eh_region_ranges, /* gate */
2586 convert_to_eh_region_ranges, /* execute */
2587 NULL, /* sub */
2588 NULL, /* next */
2589 0, /* static_pass_number */
2590 TV_NONE, /* tv_id */
2591 0, /* properties_required */
2592 0, /* properties_provided */
2593 0, /* properties_destroyed */
2594 0, /* todo_flags_start */
2595 0 /* todo_flags_finish */
2596 }
2597 };
2598 \f
2599 static void
2600 push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value)
2601 {
2602 do
2603 {
2604 unsigned char byte = value & 0x7f;
2605 value >>= 7;
2606 if (value)
2607 byte |= 0x80;
2608 VEC_safe_push (uchar, gc, *data_area, byte);
2609 }
2610 while (value);
2611 }
2612
2613 static void
2614 push_sleb128 (VEC (uchar, gc) **data_area, int value)
2615 {
2616 unsigned char byte;
2617 int more;
2618
2619 do
2620 {
2621 byte = value & 0x7f;
2622 value >>= 7;
2623 more = ! ((value == 0 && (byte & 0x40) == 0)
2624 || (value == -1 && (byte & 0x40) != 0));
2625 if (more)
2626 byte |= 0x80;
2627 VEC_safe_push (uchar, gc, *data_area, byte);
2628 }
2629 while (more);
2630 }
2631
2632 \f
2633 #ifndef HAVE_AS_LEB128
2634 static int
2635 dw2_size_of_call_site_table (int section)
2636 {
2637 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
2638 int size = n * (4 + 4 + 4);
2639 int i;
2640
2641 for (i = 0; i < n; ++i)
2642 {
2643 struct call_site_record_d *cs =
2644 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2645 size += size_of_uleb128 (cs->action);
2646 }
2647
2648 return size;
2649 }
2650
2651 static int
2652 sjlj_size_of_call_site_table (void)
2653 {
2654 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
2655 int size = 0;
2656 int i;
2657
2658 for (i = 0; i < n; ++i)
2659 {
2660 struct call_site_record_d *cs =
2661 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2662 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2663 size += size_of_uleb128 (cs->action);
2664 }
2665
2666 return size;
2667 }
2668 #endif
2669
2670 static void
2671 dw2_output_call_site_table (int cs_format, int section)
2672 {
2673 int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
2674 int i;
2675 const char *begin;
2676
2677 if (section == 0)
2678 begin = current_function_func_begin_label;
2679 else if (first_function_block_is_cold)
2680 begin = crtl->subsections.hot_section_label;
2681 else
2682 begin = crtl->subsections.cold_section_label;
2683
2684 for (i = 0; i < n; ++i)
2685 {
2686 struct call_site_record_d *cs =
2687 VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2688 char reg_start_lab[32];
2689 char reg_end_lab[32];
2690 char landing_pad_lab[32];
2691
2692 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2693 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2694
2695 if (cs->landing_pad)
2696 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2697 CODE_LABEL_NUMBER (cs->landing_pad));
2698
2699 /* ??? Perhaps use insn length scaling if the assembler supports
2700 generic arithmetic. */
2701 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2702 data4 if the function is small enough. */
2703 if (cs_format == DW_EH_PE_uleb128)
2704 {
2705 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2706 "region %d start", i);
2707 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2708 "length");
2709 if (cs->landing_pad)
2710 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2711 "landing pad");
2712 else
2713 dw2_asm_output_data_uleb128 (0, "landing pad");
2714 }
2715 else
2716 {
2717 dw2_asm_output_delta (4, reg_start_lab, begin,
2718 "region %d start", i);
2719 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2720 if (cs->landing_pad)
2721 dw2_asm_output_delta (4, landing_pad_lab, begin,
2722 "landing pad");
2723 else
2724 dw2_asm_output_data (4, 0, "landing pad");
2725 }
2726 dw2_asm_output_data_uleb128 (cs->action, "action");
2727 }
2728
2729 call_site_base += n;
2730 }
2731
2732 static void
2733 sjlj_output_call_site_table (void)
2734 {
2735 int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
2736 int i;
2737
2738 for (i = 0; i < n; ++i)
2739 {
2740 struct call_site_record_d *cs =
2741 VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2742
2743 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2744 "region %d landing pad", i);
2745 dw2_asm_output_data_uleb128 (cs->action, "action");
2746 }
2747
2748 call_site_base += n;
2749 }
2750
2751 /* Switch to the section that should be used for exception tables. */
2752
2753 static void
2754 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2755 {
2756 section *s;
2757
2758 if (exception_section)
2759 s = exception_section;
2760 else
2761 {
2762 /* Compute the section and cache it into exception_section,
2763 unless it depends on the function name. */
2764 if (targetm_common.have_named_sections)
2765 {
2766 int flags;
2767
2768 if (EH_TABLES_CAN_BE_READ_ONLY)
2769 {
2770 int tt_format =
2771 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2772 flags = ((! flag_pic
2773 || ((tt_format & 0x70) != DW_EH_PE_absptr
2774 && (tt_format & 0x70) != DW_EH_PE_aligned))
2775 ? 0 : SECTION_WRITE);
2776 }
2777 else
2778 flags = SECTION_WRITE;
2779
2780 #ifdef HAVE_LD_EH_GC_SECTIONS
2781 if (flag_function_sections)
2782 {
2783 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2784 sprintf (section_name, ".gcc_except_table.%s", fnname);
2785 s = get_section (section_name, flags, NULL);
2786 free (section_name);
2787 }
2788 else
2789 #endif
2790 exception_section
2791 = s = get_section (".gcc_except_table", flags, NULL);
2792 }
2793 else
2794 exception_section
2795 = s = flag_pic ? data_section : readonly_data_section;
2796 }
2797
2798 switch_to_section (s);
2799 }
2800
2801
2802 /* Output a reference from an exception table to the type_info object TYPE.
2803 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2804 the value. */
2805
2806 static void
2807 output_ttype (tree type, int tt_format, int tt_format_size)
2808 {
2809 rtx value;
2810 bool is_public = true;
2811
2812 if (type == NULL_TREE)
2813 value = const0_rtx;
2814 else
2815 {
2816 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2817 runtime types so TYPE should already be a runtime type
2818 reference. When pass_ipa_free_lang data is made a default
2819 pass, we can then remove the call to lookup_type_for_runtime
2820 below. */
2821 if (TYPE_P (type))
2822 type = lookup_type_for_runtime (type);
2823
2824 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2825
2826 /* Let cgraph know that the rtti decl is used. Not all of the
2827 paths below go through assemble_integer, which would take
2828 care of this for us. */
2829 STRIP_NOPS (type);
2830 if (TREE_CODE (type) == ADDR_EXPR)
2831 {
2832 type = TREE_OPERAND (type, 0);
2833 if (TREE_CODE (type) == VAR_DECL)
2834 is_public = TREE_PUBLIC (type);
2835 }
2836 else
2837 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2838 }
2839
2840 /* Allow the target to override the type table entry format. */
2841 if (targetm.asm_out.ttype (value))
2842 return;
2843
2844 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2845 assemble_integer (value, tt_format_size,
2846 tt_format_size * BITS_PER_UNIT, 1);
2847 else
2848 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2849 }
2850
2851 static void
2852 output_one_function_exception_table (int section)
2853 {
2854 int tt_format, cs_format, lp_format, i;
2855 #ifdef HAVE_AS_LEB128
2856 char ttype_label[32];
2857 char cs_after_size_label[32];
2858 char cs_end_label[32];
2859 #else
2860 int call_site_len;
2861 #endif
2862 int have_tt_data;
2863 int tt_format_size = 0;
2864
2865 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data)
2866 || (targetm.arm_eabi_unwinder
2867 ? VEC_length (tree, cfun->eh->ehspec_data.arm_eabi)
2868 : VEC_length (uchar, cfun->eh->ehspec_data.other)));
2869
2870 /* Indicate the format of the @TType entries. */
2871 if (! have_tt_data)
2872 tt_format = DW_EH_PE_omit;
2873 else
2874 {
2875 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2876 #ifdef HAVE_AS_LEB128
2877 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2878 section ? "LLSDATTC" : "LLSDATT",
2879 current_function_funcdef_no);
2880 #endif
2881 tt_format_size = size_of_encoded_value (tt_format);
2882
2883 assemble_align (tt_format_size * BITS_PER_UNIT);
2884 }
2885
2886 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2887 current_function_funcdef_no);
2888
2889 /* The LSDA header. */
2890
2891 /* Indicate the format of the landing pad start pointer. An omitted
2892 field implies @LPStart == @Start. */
2893 /* Currently we always put @LPStart == @Start. This field would
2894 be most useful in moving the landing pads completely out of
2895 line to another section, but it could also be used to minimize
2896 the size of uleb128 landing pad offsets. */
2897 lp_format = DW_EH_PE_omit;
2898 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2899 eh_data_format_name (lp_format));
2900
2901 /* @LPStart pointer would go here. */
2902
2903 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2904 eh_data_format_name (tt_format));
2905
2906 #ifndef HAVE_AS_LEB128
2907 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2908 call_site_len = sjlj_size_of_call_site_table ();
2909 else
2910 call_site_len = dw2_size_of_call_site_table (section);
2911 #endif
2912
2913 /* A pc-relative 4-byte displacement to the @TType data. */
2914 if (have_tt_data)
2915 {
2916 #ifdef HAVE_AS_LEB128
2917 char ttype_after_disp_label[32];
2918 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
2919 section ? "LLSDATTDC" : "LLSDATTD",
2920 current_function_funcdef_no);
2921 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
2922 "@TType base offset");
2923 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
2924 #else
2925 /* Ug. Alignment queers things. */
2926 unsigned int before_disp, after_disp, last_disp, disp;
2927
2928 before_disp = 1 + 1;
2929 after_disp = (1 + size_of_uleb128 (call_site_len)
2930 + call_site_len
2931 + VEC_length (uchar, crtl->eh.action_record_data)
2932 + (VEC_length (tree, cfun->eh->ttype_data)
2933 * tt_format_size));
2934
2935 disp = after_disp;
2936 do
2937 {
2938 unsigned int disp_size, pad;
2939
2940 last_disp = disp;
2941 disp_size = size_of_uleb128 (disp);
2942 pad = before_disp + disp_size + after_disp;
2943 if (pad % tt_format_size)
2944 pad = tt_format_size - (pad % tt_format_size);
2945 else
2946 pad = 0;
2947 disp = after_disp + pad;
2948 }
2949 while (disp != last_disp);
2950
2951 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
2952 #endif
2953 }
2954
2955 /* Indicate the format of the call-site offsets. */
2956 #ifdef HAVE_AS_LEB128
2957 cs_format = DW_EH_PE_uleb128;
2958 #else
2959 cs_format = DW_EH_PE_udata4;
2960 #endif
2961 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
2962 eh_data_format_name (cs_format));
2963
2964 #ifdef HAVE_AS_LEB128
2965 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
2966 section ? "LLSDACSBC" : "LLSDACSB",
2967 current_function_funcdef_no);
2968 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
2969 section ? "LLSDACSEC" : "LLSDACSE",
2970 current_function_funcdef_no);
2971 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
2972 "Call-site table length");
2973 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
2974 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2975 sjlj_output_call_site_table ();
2976 else
2977 dw2_output_call_site_table (cs_format, section);
2978 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
2979 #else
2980 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
2981 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2982 sjlj_output_call_site_table ();
2983 else
2984 dw2_output_call_site_table (cs_format, section);
2985 #endif
2986
2987 /* ??? Decode and interpret the data for flag_debug_asm. */
2988 {
2989 uchar uc;
2990 FOR_EACH_VEC_ELT (uchar, crtl->eh.action_record_data, i, uc)
2991 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
2992 }
2993
2994 if (have_tt_data)
2995 assemble_align (tt_format_size * BITS_PER_UNIT);
2996
2997 i = VEC_length (tree, cfun->eh->ttype_data);
2998 while (i-- > 0)
2999 {
3000 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3001 output_ttype (type, tt_format, tt_format_size);
3002 }
3003
3004 #ifdef HAVE_AS_LEB128
3005 if (have_tt_data)
3006 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3007 #endif
3008
3009 /* ??? Decode and interpret the data for flag_debug_asm. */
3010 if (targetm.arm_eabi_unwinder)
3011 {
3012 tree type;
3013 for (i = 0;
3014 VEC_iterate (tree, cfun->eh->ehspec_data.arm_eabi, i, type); ++i)
3015 output_ttype (type, tt_format, tt_format_size);
3016 }
3017 else
3018 {
3019 uchar uc;
3020 for (i = 0;
3021 VEC_iterate (uchar, cfun->eh->ehspec_data.other, i, uc); ++i)
3022 dw2_asm_output_data (1, uc,
3023 i ? NULL : "Exception specification table");
3024 }
3025 }
3026
3027 void
3028 output_function_exception_table (const char *fnname)
3029 {
3030 rtx personality = get_personality_function (current_function_decl);
3031
3032 /* Not all functions need anything. */
3033 if (! crtl->uses_eh_lsda)
3034 return;
3035
3036 if (personality)
3037 {
3038 assemble_external_libcall (personality);
3039
3040 if (targetm.asm_out.emit_except_personality)
3041 targetm.asm_out.emit_except_personality (personality);
3042 }
3043
3044 switch_to_exception_section (fnname);
3045
3046 /* If the target wants a label to begin the table, emit it here. */
3047 targetm.asm_out.emit_except_table_label (asm_out_file);
3048
3049 output_one_function_exception_table (0);
3050 if (crtl->eh.call_site_record[1] != NULL)
3051 output_one_function_exception_table (1);
3052
3053 switch_to_section (current_function_section ());
3054 }
3055
3056 void
3057 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3058 {
3059 fun->eh->throw_stmt_table = table;
3060 }
3061
3062 htab_t
3063 get_eh_throw_stmt_table (struct function *fun)
3064 {
3065 return fun->eh->throw_stmt_table;
3066 }
3067 \f
3068 /* Determine if the function needs an EH personality function. */
3069
3070 enum eh_personality_kind
3071 function_needs_eh_personality (struct function *fn)
3072 {
3073 enum eh_personality_kind kind = eh_personality_none;
3074 eh_region i;
3075
3076 FOR_ALL_EH_REGION_FN (i, fn)
3077 {
3078 switch (i->type)
3079 {
3080 case ERT_CLEANUP:
3081 /* Can do with any personality including the generic C one. */
3082 kind = eh_personality_any;
3083 break;
3084
3085 case ERT_TRY:
3086 case ERT_ALLOWED_EXCEPTIONS:
3087 /* Always needs a EH personality function. The generic C
3088 personality doesn't handle these even for empty type lists. */
3089 return eh_personality_lang;
3090
3091 case ERT_MUST_NOT_THROW:
3092 /* Always needs a EH personality function. The language may specify
3093 what abort routine that must be used, e.g. std::terminate. */
3094 return eh_personality_lang;
3095 }
3096 }
3097
3098 return kind;
3099 }
3100 \f
3101 /* Dump EH information to OUT. */
3102
3103 void
3104 dump_eh_tree (FILE * out, struct function *fun)
3105 {
3106 eh_region i;
3107 int depth = 0;
3108 static const char *const type_name[] = {
3109 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3110 };
3111
3112 i = fun->eh->region_tree;
3113 if (!i)
3114 return;
3115
3116 fprintf (out, "Eh tree:\n");
3117 while (1)
3118 {
3119 fprintf (out, " %*s %i %s", depth * 2, "",
3120 i->index, type_name[(int) i->type]);
3121
3122 if (i->landing_pads)
3123 {
3124 eh_landing_pad lp;
3125
3126 fprintf (out, " land:");
3127 if (current_ir_type () == IR_GIMPLE)
3128 {
3129 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3130 {
3131 fprintf (out, "{%i,", lp->index);
3132 print_generic_expr (out, lp->post_landing_pad, 0);
3133 fputc ('}', out);
3134 if (lp->next_lp)
3135 fputc (',', out);
3136 }
3137 }
3138 else
3139 {
3140 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3141 {
3142 fprintf (out, "{%i,", lp->index);
3143 if (lp->landing_pad)
3144 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3145 NOTE_P (lp->landing_pad) ? "(del)" : "");
3146 else
3147 fprintf (out, "(nil),");
3148 if (lp->post_landing_pad)
3149 {
3150 rtx lab = label_rtx (lp->post_landing_pad);
3151 fprintf (out, "%i%s}", INSN_UID (lab),
3152 NOTE_P (lab) ? "(del)" : "");
3153 }
3154 else
3155 fprintf (out, "(nil)}");
3156 if (lp->next_lp)
3157 fputc (',', out);
3158 }
3159 }
3160 }
3161
3162 switch (i->type)
3163 {
3164 case ERT_CLEANUP:
3165 case ERT_MUST_NOT_THROW:
3166 break;
3167
3168 case ERT_TRY:
3169 {
3170 eh_catch c;
3171 fprintf (out, " catch:");
3172 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3173 {
3174 fputc ('{', out);
3175 if (c->label)
3176 {
3177 fprintf (out, "lab:");
3178 print_generic_expr (out, c->label, 0);
3179 fputc (';', out);
3180 }
3181 print_generic_expr (out, c->type_list, 0);
3182 fputc ('}', out);
3183 if (c->next_catch)
3184 fputc (',', out);
3185 }
3186 }
3187 break;
3188
3189 case ERT_ALLOWED_EXCEPTIONS:
3190 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3191 print_generic_expr (out, i->u.allowed.type_list, 0);
3192 break;
3193 }
3194 fputc ('\n', out);
3195
3196 /* If there are sub-regions, process them. */
3197 if (i->inner)
3198 i = i->inner, depth++;
3199 /* If there are peers, process them. */
3200 else if (i->next_peer)
3201 i = i->next_peer;
3202 /* Otherwise, step back up the tree to the next peer. */
3203 else
3204 {
3205 do
3206 {
3207 i = i->outer;
3208 depth--;
3209 if (i == NULL)
3210 return;
3211 }
3212 while (i->next_peer == NULL);
3213 i = i->next_peer;
3214 }
3215 }
3216 }
3217
3218 /* Dump the EH tree for FN on stderr. */
3219
3220 DEBUG_FUNCTION void
3221 debug_eh_tree (struct function *fn)
3222 {
3223 dump_eh_tree (stderr, fn);
3224 }
3225
3226 /* Verify invariants on EH datastructures. */
3227
3228 DEBUG_FUNCTION void
3229 verify_eh_tree (struct function *fun)
3230 {
3231 eh_region r, outer;
3232 int nvisited_lp, nvisited_r;
3233 int count_lp, count_r, depth, i;
3234 eh_landing_pad lp;
3235 bool err = false;
3236
3237 if (!fun->eh->region_tree)
3238 return;
3239
3240 count_r = 0;
3241 for (i = 1; VEC_iterate (eh_region, fun->eh->region_array, i, r); ++i)
3242 if (r)
3243 {
3244 if (r->index == i)
3245 count_r++;
3246 else
3247 {
3248 error ("region_array is corrupted for region %i", r->index);
3249 err = true;
3250 }
3251 }
3252
3253 count_lp = 0;
3254 for (i = 1; VEC_iterate (eh_landing_pad, fun->eh->lp_array, i, lp); ++i)
3255 if (lp)
3256 {
3257 if (lp->index == i)
3258 count_lp++;
3259 else
3260 {
3261 error ("lp_array is corrupted for lp %i", lp->index);
3262 err = true;
3263 }
3264 }
3265
3266 depth = nvisited_lp = nvisited_r = 0;
3267 outer = NULL;
3268 r = fun->eh->region_tree;
3269 while (1)
3270 {
3271 if (VEC_index (eh_region, fun->eh->region_array, r->index) != r)
3272 {
3273 error ("region_array is corrupted for region %i", r->index);
3274 err = true;
3275 }
3276 if (r->outer != outer)
3277 {
3278 error ("outer block of region %i is wrong", r->index);
3279 err = true;
3280 }
3281 if (depth < 0)
3282 {
3283 error ("negative nesting depth of region %i", r->index);
3284 err = true;
3285 }
3286 nvisited_r++;
3287
3288 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3289 {
3290 if (VEC_index (eh_landing_pad, fun->eh->lp_array, lp->index) != lp)
3291 {
3292 error ("lp_array is corrupted for lp %i", lp->index);
3293 err = true;
3294 }
3295 if (lp->region != r)
3296 {
3297 error ("region of lp %i is wrong", lp->index);
3298 err = true;
3299 }
3300 nvisited_lp++;
3301 }
3302
3303 if (r->inner)
3304 outer = r, r = r->inner, depth++;
3305 else if (r->next_peer)
3306 r = r->next_peer;
3307 else
3308 {
3309 do
3310 {
3311 r = r->outer;
3312 if (r == NULL)
3313 goto region_done;
3314 depth--;
3315 outer = r->outer;
3316 }
3317 while (r->next_peer == NULL);
3318 r = r->next_peer;
3319 }
3320 }
3321 region_done:
3322 if (depth != 0)
3323 {
3324 error ("tree list ends on depth %i", depth);
3325 err = true;
3326 }
3327 if (count_r != nvisited_r)
3328 {
3329 error ("region_array does not match region_tree");
3330 err = true;
3331 }
3332 if (count_lp != nvisited_lp)
3333 {
3334 error ("lp_array does not match region_tree");
3335 err = true;
3336 }
3337
3338 if (err)
3339 {
3340 dump_eh_tree (stderr, fun);
3341 internal_error ("verify_eh_tree failed");
3342 }
3343 }
3344 \f
3345 #include "gt-except.h"