a74578ea4c7c311f12bbe414aac66098dcaeeadf
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 92-96, 1997 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* An exception is an event that can be signaled from within a
24 function. This event can then be "caught" or "trapped" by the
25 callers of this function. This potentially allows program flow to
26 be transferred to any arbitrary code assocated with a function call
27 several levels up the stack.
28
29 The intended use for this mechanism is for signaling "exceptional
30 events" in an out-of-band fashion, hence its name. The C++ language
31 (and many other OO-styled or functional languages) practically
32 requires such a mechanism, as otherwise it becomes very difficult
33 or even impossible to signal failure conditions in complex
34 situations. The traditional C++ example is when an error occurs in
35 the process of constructing an object; without such a mechanism, it
36 is impossible to signal that the error occurs without adding global
37 state variables and error checks around every object construction.
38
39 The act of causing this event to occur is referred to as "throwing
40 an exception". (Alternate terms include "raising an exception" or
41 "signaling an exception".) The term "throw" is used because control
42 is returned to the callers of the function that is signaling the
43 exception, and thus there is the concept of "throwing" the
44 exception up the call stack.
45
46 There are two major codegen options for exception handling. The
47 flag -fsjlj-exceptions can be used to select the setjmp/longjmp
48 approach, which is the default. -fno-sjlj-exceptions can be used to
49 get the PC range table approach. While this is a compile time
50 flag, an entire application must be compiled with the same codegen
51 option. The first is a PC range table approach, the second is a
52 setjmp/longjmp based scheme. We will first discuss the PC range
53 table approach, after that, we will discuss the setjmp/longjmp
54 based approach.
55
56 It is appropriate to speak of the "context of a throw". This
57 context refers to the address where the exception is thrown from,
58 and is used to determine which exception region will handle the
59 exception.
60
61 Regions of code within a function can be marked such that if it
62 contains the context of a throw, control will be passed to a
63 designated "exception handler". These areas are known as "exception
64 regions". Exception regions cannot overlap, but they can be nested
65 to any arbitrary depth. Also, exception regions cannot cross
66 function boundaries.
67
68 Exception handlers can either be specified by the user (which we
69 will call a "user-defined handler") or generated by the compiler
70 (which we will designate as a "cleanup"). Cleanups are used to
71 perform tasks such as destruction of objects allocated on the
72 stack.
73
74 In the current implementaion, cleanups are handled by allocating an
75 exception region for the area that the cleanup is designated for,
76 and the handler for the region performs the cleanup and then
77 rethrows the exception to the outer exception region. From the
78 standpoint of the current implementation, there is little
79 distinction made between a cleanup and a user-defined handler, and
80 the phrase "exception handler" can be used to refer to either one
81 equally well. (The section "Future Directions" below discusses how
82 this will change).
83
84 Each object file that is compiled with exception handling contains
85 a static array of exception handlers named __EXCEPTION_TABLE__.
86 Each entry contains the starting and ending addresses of the
87 exception region, and the address of the handler designated for
88 that region.
89
90 If the target does not use the DWARF 2 frame unwind information, at
91 program startup each object file invokes a function named
92 __register_exceptions with the address of its local
93 __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
94 is responsible for recording all of the exception regions into one list
95 (which is kept in a static variable named exception_table_list).
96
97 On targets that support crtstuff.c, the unwind information
98 is stored in a section named .eh_frame and the information for the
99 entire shared object or program is registered with a call to
100 __register_frame. On other targets, the information for each
101 translation unit is registered from the file generated by collect2.
102 __register_frame is defined in frame.c, and is responsible for
103 recording all of the unwind regions into one list (which is kept in a
104 static variable named unwind_table_list).
105
106 The function __throw is actually responsible for doing the
107 throw. On machines that have unwind info support, __throw is generated
108 by code in libgcc2.c, otherwise __throw is generated on a
109 per-object-file basis for each source file compiled with
110 -fexceptions by the the C++ frontend. Before __throw is invoked,
111 the current context of the throw needs to be placed in the global
112 variable __eh_pc.
113
114 __throw attempts to find the appropriate exception handler for the
115 PC value stored in __eh_pc by calling __find_first_exception_table_match
116 (which is defined in libgcc2.c). If __find_first_exception_table_match
117 finds a relevant handler, __throw transfers control directly to it.
118
119 If a handler for the context being thrown from can't be found, __throw
120 walks (see Walking the stack below) the stack up the dynamic call chain to
121 continue searching for an appropriate exception handler based upon the
122 caller of the function it last sought a exception handler for. It stops
123 then either an exception handler is found, or when the top of the
124 call chain is reached.
125
126 If no handler is found, an external library function named
127 __terminate is called. If a handler is found, then we restart
128 our search for a handler at the end of the call chain, and repeat
129 the search process, but instead of just walking up the call chain,
130 we unwind the call chain as we walk up it.
131
132 Internal implementation details:
133
134 To associate a user-defined handler with a block of statements, the
135 function expand_start_try_stmts is used to mark the start of the
136 block of statements with which the handler is to be associated
137 (which is known as a "try block"). All statements that appear
138 afterwards will be associated with the try block.
139
140 A call to expand_start_all_catch marks the end of the try block,
141 and also marks the start of the "catch block" (the user-defined
142 handler) associated with the try block.
143
144 This user-defined handler will be invoked for *every* exception
145 thrown with the context of the try block. It is up to the handler
146 to decide whether or not it wishes to handle any given exception,
147 as there is currently no mechanism in this implementation for doing
148 this. (There are plans for conditionally processing an exception
149 based on its "type", which will provide a language-independent
150 mechanism).
151
152 If the handler chooses not to process the exception (perhaps by
153 looking at an "exception type" or some other additional data
154 supplied with the exception), it can fall through to the end of the
155 handler. expand_end_all_catch and expand_leftover_cleanups
156 add additional code to the end of each handler to take care of
157 rethrowing to the outer exception handler.
158
159 The handler also has the option to continue with "normal flow of
160 code", or in other words to resume executing at the statement
161 immediately after the end of the exception region. The variable
162 caught_return_label_stack contains a stack of labels, and jumping
163 to the topmost entry's label via expand_goto will resume normal
164 flow to the statement immediately after the end of the exception
165 region. If the handler falls through to the end, the exception will
166 be rethrown to the outer exception region.
167
168 The instructions for the catch block are kept as a separate
169 sequence, and will be emitted at the end of the function along with
170 the handlers specified via expand_eh_region_end. The end of the
171 catch block is marked with expand_end_all_catch.
172
173 Any data associated with the exception must currently be handled by
174 some external mechanism maintained in the frontend. For example,
175 the C++ exception mechanism passes an arbitrary value along with
176 the exception, and this is handled in the C++ frontend by using a
177 global variable to hold the value. (This will be changing in the
178 future.)
179
180 The mechanism in C++ for handling data associated with the
181 exception is clearly not thread-safe. For a thread-based
182 environment, another mechanism must be used (possibly using a
183 per-thread allocation mechanism if the size of the area that needs
184 to be allocated isn't known at compile time.)
185
186 Internally-generated exception regions (cleanups) are marked by
187 calling expand_eh_region_start to mark the start of the region,
188 and expand_eh_region_end (handler) is used to both designate the
189 end of the region and to associate a specified handler/cleanup with
190 the region. The rtl code in HANDLER will be invoked whenever an
191 exception occurs in the region between the calls to
192 expand_eh_region_start and expand_eh_region_end. After HANDLER is
193 executed, additional code is emitted to handle rethrowing the
194 exception to the outer exception handler. The code for HANDLER will
195 be emitted at the end of the function.
196
197 TARGET_EXPRs can also be used to designate exception regions. A
198 TARGET_EXPR gives an unwind-protect style interface commonly used
199 in functional languages such as LISP. The associated expression is
200 evaluated, and whether or not it (or any of the functions that it
201 calls) throws an exception, the protect expression is always
202 invoked. This implementation takes care of the details of
203 associating an exception table entry with the expression and
204 generating the necessary code (it actually emits the protect
205 expression twice, once for normal flow and once for the exception
206 case). As for the other handlers, the code for the exception case
207 will be emitted at the end of the function.
208
209 Cleanups can also be specified by using add_partial_entry (handler)
210 and end_protect_partials. add_partial_entry creates the start of
211 a new exception region; HANDLER will be invoked if an exception is
212 thrown with the context of the region between the calls to
213 add_partial_entry and end_protect_partials. end_protect_partials is
214 used to mark the end of these regions. add_partial_entry can be
215 called as many times as needed before calling end_protect_partials.
216 However, end_protect_partials should only be invoked once for each
217 group of calls to add_partial_entry as the entries are queued
218 and all of the outstanding entries are processed simultaneously
219 when end_protect_partials is invoked. Similarly to the other
220 handlers, the code for HANDLER will be emitted at the end of the
221 function.
222
223 The generated RTL for an exception region includes
224 NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
225 the start and end of the exception region. A unique label is also
226 generated at the start of the exception region, which is available
227 by looking at the ehstack variable. The topmost entry corresponds
228 to the current region.
229
230 In the current implementation, an exception can only be thrown from
231 a function call (since the mechanism used to actually throw an
232 exception involves calling __throw). If an exception region is
233 created but no function calls occur within that region, the region
234 can be safely optimized away (along with its exception handlers)
235 since no exceptions can ever be caught in that region. This
236 optimization is performed unless -fasynchronous-exceptions is
237 given. If the user wishes to throw from a signal handler, or other
238 asynchronous place, -fasynchronous-exceptions should be used when
239 compiling for maximally correct code, at the cost of additional
240 exception regions. Using -fasynchronous-exceptions only produces
241 code that is reasonably safe in such situations, but a correct
242 program cannot rely upon this working. It can be used in failsafe
243 code, where trying to continue on, and proceeding with potentially
244 incorrect results is better than halting the program.
245
246
247 Walking the stack:
248
249 The stack is walked by starting with a pointer to the current
250 frame, and finding the pointer to the callers frame. The unwind info
251 tells __throw how to find it.
252
253 Unwinding the stack:
254
255 When we use the term unwinding the stack, we mean undoing the
256 effects of the function prologue in a controlled fashion so that we
257 still have the flow of control. Otherwise, we could just return
258 (jump to the normal end of function epilogue).
259
260 This is done in __throw in libgcc2.c when we know that a handler exists
261 in a frame higher up the call stack than its immediate caller.
262
263 To unwind, we find the unwind data associated with the frame, if any.
264 If we don't find any, we call the library routine __terminate. If we do
265 find it, we use the information to copy the saved register values from
266 that frame into the register save area in the frame for __throw, return
267 into a stub which updates the stack pointer, and jump to the handler.
268 The normal function epilogue for __throw handles restoring the saved
269 values into registers.
270
271 When unwinding, we use this method if we know it will
272 work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
273 an inline unwinder will have been emitted for any function that
274 __unwind_function cannot unwind. The inline unwinder appears as a
275 normal exception handler for the entire function, for any function
276 that we know cannot be unwound by __unwind_function. We inform the
277 compiler of whether a function can be unwound with
278 __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
279 when the unwinder isn't needed. __unwind_function is used as an
280 action of last resort. If no other method can be used for
281 unwinding, __unwind_function is used. If it cannot unwind, it
282 should call __teminate.
283
284 By default, if the target-specific backend doesn't supply a definition
285 for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
286 unwinders will be used instead. The main tradeoff here is in text space
287 utilization. Obviously, if inline unwinders have to be generated
288 repeatedly, this uses much more space than if a single routine is used.
289
290 However, it is simply not possible on some platforms to write a
291 generalized routine for doing stack unwinding without having some
292 form of additional data associated with each function. The current
293 implementation can encode this data in the form of additional
294 machine instructions or as static data in tabular form. The later
295 is called the unwind data.
296
297 The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
298 or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
299 defined and has a non-zero value, a per-function unwinder is not emitted
300 for the current function. If the static unwind data is supported, then
301 a per-function unwinder is not emitted.
302
303 On some platforms it is possible that neither __unwind_function
304 nor inlined unwinders are available. For these platforms it is not
305 possible to throw through a function call, and abort will be
306 invoked instead of performing the throw.
307
308 The reason the unwind data may be needed is that on some platforms
309 the order and types of data stored on the stack can vary depending
310 on the type of function, its arguments and returned values, and the
311 compilation options used (optimization versus non-optimization,
312 -fomit-frame-pointer, processor variations, etc).
313
314 Unfortunately, this also means that throwing through functions that
315 aren't compiled with exception handling support will still not be
316 possible on some platforms. This problem is currently being
317 investigated, but no solutions have been found that do not imply
318 some unacceptable performance penalties.
319
320 Future directions:
321
322 Currently __throw makes no differentiation between cleanups and
323 user-defined exception regions. While this makes the implementation
324 simple, it also implies that it is impossible to determine if a
325 user-defined exception handler exists for a given exception without
326 completely unwinding the stack in the process. This is undesirable
327 from the standpoint of debugging, as ideally it would be possible
328 to trap unhandled exceptions in the debugger before the process of
329 unwinding has even started.
330
331 This problem can be solved by marking user-defined handlers in a
332 special way (probably by adding additional bits to exception_table_list).
333 A two-pass scheme could then be used by __throw to iterate
334 through the table. The first pass would search for a relevant
335 user-defined handler for the current context of the throw, and if
336 one is found, the second pass would then invoke all needed cleanups
337 before jumping to the user-defined handler.
338
339 Many languages (including C++ and Ada) make execution of a
340 user-defined handler conditional on the "type" of the exception
341 thrown. (The type of the exception is actually the type of the data
342 that is thrown with the exception.) It will thus be necessary for
343 __throw to be able to determine if a given user-defined
344 exception handler will actually be executed, given the type of
345 exception.
346
347 One scheme is to add additional information to exception_table_list
348 as to the types of exceptions accepted by each handler. __throw
349 can do the type comparisons and then determine if the handler is
350 actually going to be executed.
351
352 There is currently no significant level of debugging support
353 available, other than to place a breakpoint on __throw. While
354 this is sufficient in most cases, it would be helpful to be able to
355 know where a given exception was going to be thrown to before it is
356 actually thrown, and to be able to choose between stopping before
357 every exception region (including cleanups), or just user-defined
358 exception regions. This should be possible to do in the two-pass
359 scheme by adding additional labels to __throw for appropriate
360 breakpoints, and additional debugger commands could be added to
361 query various state variables to determine what actions are to be
362 performed next.
363
364 Another major problem that is being worked on is the issue with stack
365 unwinding on various platforms. Currently the only platforms that have
366 support for the generation of a generic unwinder are the SPARC and MIPS.
367 All other ports require per-function unwinders, which produce large
368 amounts of code bloat.
369
370 For setjmp/longjmp based exception handling, some of the details
371 are as above, but there are some additional details. This section
372 discusses the details.
373
374 We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
375 optimize EH regions yet. We don't have to worry about machine
376 specific issues with unwinding the stack, as we rely upon longjmp
377 for all the machine specific details. There is no variable context
378 of a throw, just the one implied by the dynamic handler stack
379 pointed to by the dynamic handler chain. There is no exception
380 table, and no calls to __register_excetpions. __sjthrow is used
381 instead of __throw, and it works by using the dynamic handler
382 chain, and longjmp. -fasynchronous-exceptions has no effect, as
383 the elimination of trivial exception regions is not yet performed.
384
385 A frontend can set protect_cleanup_actions_with_terminate when all
386 the cleanup actions should be protected with an EH region that
387 calls terminate when an unhandled exception is throw. C++ does
388 this, Ada does not. */
389
390
391 #include "config.h"
392 #include "defaults.h"
393 #include <stdio.h>
394 #include "rtl.h"
395 #include "tree.h"
396 #include "flags.h"
397 #include "except.h"
398 #include "function.h"
399 #include "insn-flags.h"
400 #include "expr.h"
401 #include "insn-codes.h"
402 #include "regs.h"
403 #include "hard-reg-set.h"
404 #include "insn-config.h"
405 #include "recog.h"
406 #include "output.h"
407
408 /* One to use setjmp/longjmp method of generating code for exception
409 handling. */
410
411 int exceptions_via_longjmp = 2;
412
413 /* One to enable asynchronous exception support. */
414
415 int asynchronous_exceptions = 0;
416
417 /* One to protect cleanup actions with a handler that calls
418 __terminate, zero otherwise. */
419
420 int protect_cleanup_actions_with_terminate = 0;
421
422 /* A list of labels used for exception handlers. Created by
423 find_exception_handler_labels for the optimization passes. */
424
425 rtx exception_handler_labels;
426
427 /* Nonzero means that __throw was invoked.
428
429 This is used by the C++ frontend to know if code needs to be emitted
430 for __throw or not. */
431
432 int throw_used;
433
434 /* The dynamic handler chain. Nonzero if the function has already
435 fetched a pointer to the dynamic handler chain for exception
436 handling. */
437
438 rtx current_function_dhc;
439
440 /* The dynamic cleanup chain. Nonzero if the function has already
441 fetched a pointer to the dynamic cleanup chain for exception
442 handling. */
443
444 rtx current_function_dcc;
445
446 /* A stack used for keeping track of the currectly active exception
447 handling region. As each exception region is started, an entry
448 describing the region is pushed onto this stack. The current
449 region can be found by looking at the top of the stack, and as we
450 exit regions, the corresponding entries are popped.
451
452 Entries cannot overlap; they can be nested. So there is only one
453 entry at most that corresponds to the current instruction, and that
454 is the entry on the top of the stack. */
455
456 static struct eh_stack ehstack;
457
458 /* A queue used for tracking which exception regions have closed but
459 whose handlers have not yet been expanded. Regions are emitted in
460 groups in an attempt to improve paging performance.
461
462 As we exit a region, we enqueue a new entry. The entries are then
463 dequeued during expand_leftover_cleanups and expand_start_all_catch,
464
465 We should redo things so that we either take RTL for the handler,
466 or we expand the handler expressed as a tree immediately at region
467 end time. */
468
469 static struct eh_queue ehqueue;
470
471 /* Insns for all of the exception handlers for the current function.
472 They are currently emitted by the frontend code. */
473
474 rtx catch_clauses;
475
476 /* A TREE_CHAINed list of handlers for regions that are not yet
477 closed. The TREE_VALUE of each entry contains the handler for the
478 corresponding entry on the ehstack. */
479
480 static tree protect_list;
481
482 /* Stacks to keep track of various labels. */
483
484 /* Keeps track of the label to resume to should one want to resume
485 normal control flow out of a handler (instead of, say, returning to
486 the caller of the current function or exiting the program). Also
487 used as the context of a throw to rethrow an exception to the outer
488 exception region. */
489
490 struct label_node *caught_return_label_stack = NULL;
491
492 /* A random data area for the front end's own use. */
493
494 struct label_node *false_label_stack = NULL;
495
496 /* The rtx and the tree for the saved PC value. */
497
498 rtx eh_saved_pc_rtx;
499 tree eh_saved_pc;
500
501 rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
502 \f
503 /* Various support routines to manipulate the various data structures
504 used by the exception handling code. */
505
506 /* Push a label entry onto the given STACK. */
507
508 void
509 push_label_entry (stack, rlabel, tlabel)
510 struct label_node **stack;
511 rtx rlabel;
512 tree tlabel;
513 {
514 struct label_node *newnode
515 = (struct label_node *) xmalloc (sizeof (struct label_node));
516
517 if (rlabel)
518 newnode->u.rlabel = rlabel;
519 else
520 newnode->u.tlabel = tlabel;
521 newnode->chain = *stack;
522 *stack = newnode;
523 }
524
525 /* Pop a label entry from the given STACK. */
526
527 rtx
528 pop_label_entry (stack)
529 struct label_node **stack;
530 {
531 rtx label;
532 struct label_node *tempnode;
533
534 if (! *stack)
535 return NULL_RTX;
536
537 tempnode = *stack;
538 label = tempnode->u.rlabel;
539 *stack = (*stack)->chain;
540 free (tempnode);
541
542 return label;
543 }
544
545 /* Return the top element of the given STACK. */
546
547 tree
548 top_label_entry (stack)
549 struct label_node **stack;
550 {
551 if (! *stack)
552 return NULL_TREE;
553
554 return (*stack)->u.tlabel;
555 }
556
557 /* Make a copy of ENTRY using xmalloc to allocate the space. */
558
559 static struct eh_entry *
560 copy_eh_entry (entry)
561 struct eh_entry *entry;
562 {
563 struct eh_entry *newentry;
564
565 newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
566 bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry));
567
568 return newentry;
569 }
570
571 /* Push a new eh_node entry onto STACK. */
572
573 static void
574 push_eh_entry (stack)
575 struct eh_stack *stack;
576 {
577 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
578 struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
579
580 entry->outer_context = gen_label_rtx ();
581 entry->exception_handler_label = gen_label_rtx ();
582 entry->finalization = NULL_TREE;
583
584 node->entry = entry;
585 node->chain = stack->top;
586 stack->top = node;
587 }
588
589 /* Pop an entry from the given STACK. */
590
591 static struct eh_entry *
592 pop_eh_entry (stack)
593 struct eh_stack *stack;
594 {
595 struct eh_node *tempnode;
596 struct eh_entry *tempentry;
597
598 tempnode = stack->top;
599 tempentry = tempnode->entry;
600 stack->top = stack->top->chain;
601 free (tempnode);
602
603 return tempentry;
604 }
605
606 /* Enqueue an ENTRY onto the given QUEUE. */
607
608 static void
609 enqueue_eh_entry (queue, entry)
610 struct eh_queue *queue;
611 struct eh_entry *entry;
612 {
613 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
614
615 node->entry = entry;
616 node->chain = NULL;
617
618 if (queue->head == NULL)
619 {
620 queue->head = node;
621 }
622 else
623 {
624 queue->tail->chain = node;
625 }
626 queue->tail = node;
627 }
628
629 /* Dequeue an entry from the given QUEUE. */
630
631 static struct eh_entry *
632 dequeue_eh_entry (queue)
633 struct eh_queue *queue;
634 {
635 struct eh_node *tempnode;
636 struct eh_entry *tempentry;
637
638 if (queue->head == NULL)
639 return NULL;
640
641 tempnode = queue->head;
642 queue->head = queue->head->chain;
643
644 tempentry = tempnode->entry;
645 free (tempnode);
646
647 return tempentry;
648 }
649 \f
650 /* Routine to see if exception exception handling is turned on.
651 DO_WARN is non-zero if we want to inform the user that exception
652 handling is turned off.
653
654 This is used to ensure that -fexceptions has been specified if the
655 compiler tries to use any exception-specific functions. */
656
657 int
658 doing_eh (do_warn)
659 int do_warn;
660 {
661 if (! flag_exceptions)
662 {
663 static int warned = 0;
664 if (! warned && do_warn)
665 {
666 error ("exception handling disabled, use -fexceptions to enable");
667 warned = 1;
668 }
669 return 0;
670 }
671 return 1;
672 }
673
674 /* Given a return address in ADDR, determine the address we should use
675 to find the corresponding EH region. */
676
677 rtx
678 eh_outer_context (addr)
679 rtx addr;
680 {
681 /* First mask out any unwanted bits. */
682 #ifdef MASK_RETURN_ADDR
683 expand_and (addr, MASK_RETURN_ADDR, addr);
684 #endif
685
686 /* Then adjust to find the real return address. */
687 #if defined (RETURN_ADDR_OFFSET)
688 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
689 #endif
690
691 return addr;
692 }
693
694 /* Start a new exception region for a region of code that has a
695 cleanup action and push the HANDLER for the region onto
696 protect_list. All of the regions created with add_partial_entry
697 will be ended when end_protect_partials is invoked. */
698
699 void
700 add_partial_entry (handler)
701 tree handler;
702 {
703 expand_eh_region_start ();
704
705 /* Make sure the entry is on the correct obstack. */
706 push_obstacks_nochange ();
707 resume_temporary_allocation ();
708
709 /* Because this is a cleanup action, we may have to protect the handler
710 with __terminate. */
711 handler = protect_with_terminate (handler);
712
713 protect_list = tree_cons (NULL_TREE, handler, protect_list);
714 pop_obstacks ();
715 }
716
717 /* Get a reference to the dynamic handler chain. It points to the
718 pointer to the next element in the dynamic handler chain. It ends
719 when there are no more elements in the dynamic handler chain, when
720 the value is &top_elt from libgcc2.c. Immediately after the
721 pointer, is an area suitable for setjmp/longjmp when
722 DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
723 __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
724 isn't defined.
725
726 This routine is here to facilitate the porting of this code to
727 systems with threads. One can either replace the routine we emit a
728 call for here in libgcc2.c, or one can modify this routine to work
729 with their thread system. */
730
731 rtx
732 get_dynamic_handler_chain ()
733 {
734 #if 0
735 /* Do this once we figure out how to get this to the front of the
736 function, and we really only want one per real function, not one
737 per inlined function. */
738 if (current_function_dhc == 0)
739 {
740 rtx dhc, insns;
741 start_sequence ();
742
743 dhc = emit_library_call_value (get_dynamic_handler_chain_libfunc,
744 NULL_RTX, 1,
745 Pmode, 0);
746 current_function_dhc = copy_to_reg (dhc);
747 insns = get_insns ();
748 end_sequence ();
749 emit_insns_before (insns, get_first_nonparm_insn ());
750 }
751 #else
752 rtx dhc;
753 dhc = emit_library_call_value (get_dynamic_handler_chain_libfunc,
754 NULL_RTX, 1,
755 Pmode, 0);
756 current_function_dhc = copy_to_reg (dhc);
757 #endif
758
759 /* We don't want a copy of the dhc, but rather, the single dhc. */
760 return gen_rtx (MEM, Pmode, current_function_dhc);
761 }
762
763 /* Get a reference to the dynamic cleanup chain. It points to the
764 pointer to the next element in the dynamic cleanup chain.
765 Immediately after the pointer, are two Pmode variables, one for a
766 pointer to a function that performs the cleanup action, and the
767 second, the argument to pass to that function. */
768
769 rtx
770 get_dynamic_cleanup_chain ()
771 {
772 rtx dhc, dcc;
773
774 dhc = get_dynamic_handler_chain ();
775 dcc = plus_constant (dhc, GET_MODE_SIZE (Pmode));
776
777 current_function_dcc = copy_to_reg (dcc);
778
779 /* We don't want a copy of the dcc, but rather, the single dcc. */
780 return gen_rtx (MEM, Pmode, current_function_dcc);
781 }
782
783 /* Generate code to evaluate X and jump to LABEL if the value is nonzero.
784 LABEL is an rtx of code CODE_LABEL, in this function. */
785
786 void
787 jumpif_rtx (x, label)
788 rtx x;
789 rtx label;
790 {
791 jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
792 }
793
794 /* Generate code to evaluate X and jump to LABEL if the value is zero.
795 LABEL is an rtx of code CODE_LABEL, in this function. */
796
797 void
798 jumpifnot_rtx (x, label)
799 rtx x;
800 rtx label;
801 {
802 jumpifnot (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
803 }
804
805 /* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
806 We just need to create an element for the cleanup list, and push it
807 into the chain.
808
809 A dynamic cleanup is a cleanup action implied by the presence of an
810 element on the EH runtime dynamic cleanup stack that is to be
811 performed when an exception is thrown. The cleanup action is
812 performed by __sjthrow when an exception is thrown. Only certain
813 actions can be optimized into dynamic cleanup actions. For the
814 restrictions on what actions can be performed using this routine,
815 see expand_eh_region_start_tree. */
816
817 static void
818 start_dynamic_cleanup (func, arg)
819 tree func;
820 tree arg;
821 {
822 rtx dhc, dcc;
823 rtx new_func, new_arg;
824 rtx x, buf;
825 int size;
826
827 /* We allocate enough room for a pointer to the function, and
828 one argument. */
829 size = 2;
830
831 /* XXX, FIXME: The stack space allocated this way is too long lived,
832 but there is no allocation routine that allocates at the level of
833 the last binding contour. */
834 buf = assign_stack_local (BLKmode,
835 GET_MODE_SIZE (Pmode)*(size+1),
836 0);
837
838 buf = change_address (buf, Pmode, NULL_RTX);
839
840 /* Store dcc into the first word of the newly allocated buffer. */
841
842 dcc = get_dynamic_cleanup_chain ();
843 emit_move_insn (buf, dcc);
844
845 /* Store func and arg into the cleanup list element. */
846
847 new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
848 GET_MODE_SIZE (Pmode)));
849 new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
850 GET_MODE_SIZE (Pmode)*2));
851 x = expand_expr (func, new_func, Pmode, 0);
852 if (x != new_func)
853 emit_move_insn (new_func, x);
854
855 x = expand_expr (arg, new_arg, Pmode, 0);
856 if (x != new_arg)
857 emit_move_insn (new_arg, x);
858
859 /* Update the cleanup chain. */
860
861 emit_move_insn (dcc, XEXP (buf, 0));
862 }
863
864 /* Emit RTL to start a dynamic handler on the EH runtime dynamic
865 handler stack. This should only be used by expand_eh_region_start
866 or expand_eh_region_start_tree. */
867
868 static void
869 start_dynamic_handler ()
870 {
871 rtx dhc, dcc;
872 rtx x, arg, buf;
873 int size;
874
875 #ifndef DONT_USE_BUILTIN_SETJMP
876 /* The number of Pmode words for the setjmp buffer, when using the
877 builtin setjmp/longjmp, see expand_builtin, case
878 BUILT_IN_LONGJMP. */
879 size = 5;
880 #else
881 #ifdef JMP_BUF_SIZE
882 size = JMP_BUF_SIZE;
883 #else
884 /* Should be large enough for most systems, if it is not,
885 JMP_BUF_SIZE should be defined with the proper value. It will
886 also tend to be larger than necessary for most systems, a more
887 optimal port will define JMP_BUF_SIZE. */
888 size = FIRST_PSEUDO_REGISTER+2;
889 #endif
890 #endif
891 /* XXX, FIXME: The stack space allocated this way is too long lived,
892 but there is no allocation routine that allocates at the level of
893 the last binding contour. */
894 arg = assign_stack_local (BLKmode,
895 GET_MODE_SIZE (Pmode)*(size+1),
896 0);
897
898 arg = change_address (arg, Pmode, NULL_RTX);
899
900 /* Store dhc into the first word of the newly allocated buffer. */
901
902 dhc = get_dynamic_handler_chain ();
903 dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
904 GET_MODE_SIZE (Pmode)));
905 emit_move_insn (arg, dhc);
906
907 /* Zero out the start of the cleanup chain. */
908 emit_move_insn (dcc, const0_rtx);
909
910 /* The jmpbuf starts two words into the area allocated. */
911 buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
912
913 #ifdef DONT_USE_BUILTIN_SETJMP
914 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
915 buf, Pmode);
916 #else
917 x = expand_builtin_setjmp (buf, NULL_RTX);
918 #endif
919
920 /* If we come back here for a catch, transfer control to the
921 handler. */
922
923 jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
924
925 /* We are committed to this, so update the handler chain. */
926
927 emit_move_insn (dhc, XEXP (arg, 0));
928 }
929
930 /* Start an exception handling region for the given cleanup action.
931 All instructions emitted after this point are considered to be part
932 of the region until expand_eh_region_end is invoked. CLEANUP is
933 the cleanup action to perform. The return value is true if the
934 exception region was optimized away. If that case,
935 expand_eh_region_end does not need to be called for this cleanup,
936 nor should it be.
937
938 This routine notices one particular common case in C++ code
939 generation, and optimizes it so as to not need the exception
940 region. It works by creating a dynamic cleanup action, instead of
941 of a using an exception region. */
942
943 int
944 expand_eh_region_start_tree (decl, cleanup)
945 tree decl;
946 tree cleanup;
947 {
948 rtx note;
949
950 /* This is the old code. */
951 if (! doing_eh (0))
952 return 0;
953
954 /* The optimization only applies to actions protected with
955 terminate, and only applies if we are using the setjmp/longjmp
956 codegen method. */
957 if (exceptions_via_longjmp
958 && protect_cleanup_actions_with_terminate)
959 {
960 tree func, arg;
961 tree args;
962
963 /* Ignore any UNSAVE_EXPR. */
964 if (TREE_CODE (cleanup) == UNSAVE_EXPR)
965 cleanup = TREE_OPERAND (cleanup, 0);
966
967 /* Further, it only applies if the action is a call, if there
968 are 2 arguments, and if the second argument is 2. */
969
970 if (TREE_CODE (cleanup) == CALL_EXPR
971 && (args = TREE_OPERAND (cleanup, 1))
972 && (func = TREE_OPERAND (cleanup, 0))
973 && (arg = TREE_VALUE (args))
974 && (args = TREE_CHAIN (args))
975
976 /* is the second argument 2? */
977 && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
978 && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
979 && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
980
981 /* Make sure there are no other arguments. */
982 && TREE_CHAIN (args) == NULL_TREE)
983 {
984 /* Arrange for returns and gotos to pop the entry we make on the
985 dynamic cleanup stack. */
986 expand_dcc_cleanup (decl);
987 start_dynamic_cleanup (func, arg);
988 return 1;
989 }
990 }
991
992 expand_eh_region_start_for_decl (decl);
993
994 return 0;
995 }
996
997 /* Just like expand_eh_region_start, except if a cleanup action is
998 entered on the cleanup chain, the TREE_PURPOSE of the element put
999 on the chain is DECL. DECL should be the associated VAR_DECL, if
1000 any, otherwise it should be NULL_TREE. */
1001
1002 void
1003 expand_eh_region_start_for_decl (decl)
1004 tree decl;
1005 {
1006 rtx note;
1007
1008 /* This is the old code. */
1009 if (! doing_eh (0))
1010 return;
1011
1012 if (exceptions_via_longjmp)
1013 {
1014 /* We need a new block to record the start and end of the
1015 dynamic handler chain. We could always do this, but we
1016 really want to permit jumping into such a block, and we want
1017 to avoid any errors or performance impact in the SJ EH code
1018 for now. */
1019 expand_start_bindings (0);
1020
1021 /* But we don't need or want a new temporary level. */
1022 pop_temp_slots ();
1023
1024 /* Mark this block as created by expand_eh_region_start. This
1025 is so that we can pop the block with expand_end_bindings
1026 automatically. */
1027 mark_block_as_eh_region ();
1028
1029 /* Arrange for returns and gotos to pop the entry we make on the
1030 dynamic handler stack. */
1031 expand_dhc_cleanup (decl);
1032 }
1033
1034 push_eh_entry (&ehstack);
1035 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1036 NOTE_BLOCK_NUMBER (note)
1037 = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1038 if (exceptions_via_longjmp)
1039 start_dynamic_handler ();
1040 }
1041
1042 /* Start an exception handling region. All instructions emitted after
1043 this point are considered to be part of the region until
1044 expand_eh_region_end is invoked. */
1045
1046 void
1047 expand_eh_region_start ()
1048 {
1049 expand_eh_region_start_for_decl (NULL_TREE);
1050 }
1051
1052 /* End an exception handling region. The information about the region
1053 is found on the top of ehstack.
1054
1055 HANDLER is either the cleanup for the exception region, or if we're
1056 marking the end of a try block, HANDLER is integer_zero_node.
1057
1058 HANDLER will be transformed to rtl when expand_leftover_cleanups
1059 is invoked. */
1060
1061 void
1062 expand_eh_region_end (handler)
1063 tree handler;
1064 {
1065 struct eh_entry *entry;
1066 rtx note;
1067
1068 if (! doing_eh (0))
1069 return;
1070
1071 entry = pop_eh_entry (&ehstack);
1072
1073 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1074 NOTE_BLOCK_NUMBER (note)
1075 = CODE_LABEL_NUMBER (entry->exception_handler_label);
1076 if (exceptions_via_longjmp == 0)
1077 {
1078 rtx label;
1079
1080 label = gen_label_rtx ();
1081 emit_jump (label);
1082
1083 /* Emit a label marking the end of this exception region that
1084 is used for rethrowing into the outer context. */
1085 emit_label (entry->outer_context);
1086
1087 /* Put in something that takes up space, as otherwise the end
1088 address for this EH region could have the exact same address as
1089 its outer region. This would cause us to miss the fact that
1090 resuming exception handling with this PC value would be inside
1091 the outer region. */
1092 emit_insn (gen_nop ());
1093 emit_barrier ();
1094 emit_label (label);
1095 }
1096
1097 entry->finalization = handler;
1098
1099 enqueue_eh_entry (&ehqueue, entry);
1100
1101 /* If we have already started ending the bindings, don't recurse.
1102 This only happens when exceptions_via_longjmp is true. */
1103 if (is_eh_region ())
1104 {
1105 /* Because we don't need or want a new temporary level and
1106 because we didn't create one in expand_eh_region_start,
1107 create a fake one now to avoid removing one in
1108 expand_end_bindings. */
1109 push_temp_slots ();
1110
1111 mark_block_as_not_eh_region ();
1112
1113 /* Maybe do this to prevent jumping in and so on... */
1114 expand_end_bindings (NULL_TREE, 0, 0);
1115 }
1116 }
1117
1118 /* If we are using the setjmp/longjmp EH codegen method, we emit a
1119 call to __sjthrow.
1120
1121 Otherwise, we emit a call to __throw and note that we threw
1122 something, so we know we need to generate the necessary code for
1123 __throw.
1124
1125 Before invoking throw, the __eh_pc variable must have been set up
1126 to contain the PC being thrown from. This address is used by
1127 __throw to determine which exception region (if any) is
1128 responsible for handling the exception. */
1129
1130 void
1131 emit_throw ()
1132 {
1133 if (exceptions_via_longjmp)
1134 {
1135 emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1136 }
1137 else
1138 {
1139 #ifdef JUMP_TO_THROW
1140 emit_indirect_jump (throw_libfunc);
1141 #else
1142 #ifndef DWARF2_UNWIND_INFO
1143 /* Prevent assemble_external from doing anything with this symbol. */
1144 SYMBOL_REF_USED (throw_libfunc) = 1;
1145 #endif
1146 emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1147 #endif
1148 throw_used = 1;
1149 }
1150 emit_barrier ();
1151 }
1152
1153 /* An internal throw with an indirect CONTEXT we want to throw from.
1154 CONTEXT evaluates to the context of the throw. */
1155
1156 static void
1157 expand_internal_throw_indirect (context)
1158 rtx context;
1159 {
1160 assemble_external (eh_saved_pc);
1161 emit_move_insn (eh_saved_pc_rtx, context);
1162 emit_throw ();
1163 }
1164
1165 /* An internal throw with a direct CONTEXT we want to throw from.
1166 CONTEXT must be a label; its address will be used as the context of
1167 the throw. */
1168
1169 void
1170 expand_internal_throw (context)
1171 rtx context;
1172 {
1173 expand_internal_throw_indirect (gen_rtx (LABEL_REF, Pmode, context));
1174 }
1175
1176 /* Called from expand_exception_blocks and expand_end_catch_block to
1177 emit any pending handlers/cleanups queued from expand_eh_region_end. */
1178
1179 void
1180 expand_leftover_cleanups ()
1181 {
1182 struct eh_entry *entry;
1183
1184 while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
1185 {
1186 rtx prev;
1187
1188 /* A leftover try block. Shouldn't be one here. */
1189 if (entry->finalization == integer_zero_node)
1190 abort ();
1191
1192 /* Output the label for the start of the exception handler. */
1193 emit_label (entry->exception_handler_label);
1194
1195 #ifdef HAVE_exception_receiver
1196 if (! exceptions_via_longjmp)
1197 if (HAVE_exception_receiver)
1198 emit_insn (gen_exception_receiver ());
1199 #endif
1200
1201 #ifdef HAVE_nonlocal_goto_receiver
1202 if (! exceptions_via_longjmp)
1203 if (HAVE_nonlocal_goto_receiver)
1204 emit_insn (gen_nonlocal_goto_receiver ());
1205 #endif
1206
1207 /* And now generate the insns for the handler. */
1208 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1209
1210 prev = get_last_insn ();
1211 if (prev == NULL || GET_CODE (prev) != BARRIER)
1212 {
1213 if (exceptions_via_longjmp)
1214 emit_throw ();
1215 else
1216 {
1217 /* The below can be optimized away, and we could just
1218 fall into the next EH handler, if we are certain they
1219 are nested. */
1220 /* Emit code to throw to the outer context if we fall off
1221 the end of the handler. */
1222 expand_internal_throw (entry->outer_context);
1223 }
1224 }
1225
1226 do_pending_stack_adjust ();
1227 free (entry);
1228 }
1229 }
1230
1231 /* Called at the start of a block of try statements. */
1232 void
1233 expand_start_try_stmts ()
1234 {
1235 if (! doing_eh (1))
1236 return;
1237
1238 expand_eh_region_start ();
1239 }
1240
1241 /* Generate RTL for the start of a group of catch clauses.
1242
1243 It is responsible for starting a new instruction sequence for the
1244 instructions in the catch block, and expanding the handlers for the
1245 internally-generated exception regions nested within the try block
1246 corresponding to this catch block. */
1247
1248 void
1249 expand_start_all_catch ()
1250 {
1251 struct eh_entry *entry;
1252 tree label;
1253
1254 if (! doing_eh (1))
1255 return;
1256
1257 /* End the try block. */
1258 expand_eh_region_end (integer_zero_node);
1259
1260 emit_line_note (input_filename, lineno);
1261 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1262
1263 /* The label for the exception handling block that we will save.
1264 This is Lresume in the documention. */
1265 expand_label (label);
1266
1267 if (exceptions_via_longjmp == 0)
1268 {
1269 /* Put in something that takes up space, as otherwise the end
1270 address for the EH region could have the exact same address as
1271 the outer region, causing us to miss the fact that resuming
1272 exception handling with this PC value would be inside the outer
1273 region. */
1274 emit_insn (gen_nop ());
1275 }
1276
1277 /* Push the label that points to where normal flow is resumed onto
1278 the top of the label stack. */
1279 push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1280
1281 /* Start a new sequence for all the catch blocks. We will add this
1282 to the global sequence catch_clauses when we have completed all
1283 the handlers in this handler-seq. */
1284 start_sequence ();
1285
1286 while (1)
1287 {
1288 rtx prev;
1289
1290 entry = dequeue_eh_entry (&ehqueue);
1291 /* Emit the label for the exception handler for this region, and
1292 expand the code for the handler.
1293
1294 Note that a catch region is handled as a side-effect here;
1295 for a try block, entry->finalization will contain
1296 integer_zero_node, so no code will be generated in the
1297 expand_expr call below. But, the label for the handler will
1298 still be emitted, so any code emitted after this point will
1299 end up being the handler. */
1300 emit_label (entry->exception_handler_label);
1301
1302 #ifdef HAVE_exception_receiver
1303 if (! exceptions_via_longjmp)
1304 if (HAVE_exception_receiver)
1305 emit_insn (gen_exception_receiver ());
1306 #endif
1307
1308 #ifdef HAVE_nonlocal_goto_receiver
1309 if (! exceptions_via_longjmp)
1310 if (HAVE_nonlocal_goto_receiver)
1311 emit_insn (gen_nonlocal_goto_receiver ());
1312 #endif
1313
1314 /* When we get down to the matching entry for this try block, stop. */
1315 if (entry->finalization == integer_zero_node)
1316 {
1317 /* Don't forget to free this entry. */
1318 free (entry);
1319 break;
1320 }
1321
1322 /* And now generate the insns for the handler. */
1323 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1324
1325 prev = get_last_insn ();
1326 if (prev == NULL || GET_CODE (prev) != BARRIER)
1327 {
1328 if (exceptions_via_longjmp)
1329 emit_throw ();
1330 else
1331 {
1332 /* Code to throw out to outer context when we fall off end
1333 of the handler. We can't do this here for catch blocks,
1334 so it's done in expand_end_all_catch instead.
1335
1336 The below can be optimized away (and we could just fall
1337 into the next EH handler) if we are certain they are
1338 nested. */
1339
1340 expand_internal_throw (entry->outer_context);
1341 }
1342 }
1343 free (entry);
1344 }
1345 }
1346
1347 /* Finish up the catch block. At this point all the insns for the
1348 catch clauses have already been generated, so we only have to add
1349 them to the catch_clauses list. We also want to make sure that if
1350 we fall off the end of the catch clauses that we rethrow to the
1351 outer EH region. */
1352
1353 void
1354 expand_end_all_catch ()
1355 {
1356 rtx new_catch_clause;
1357
1358 if (! doing_eh (1))
1359 return;
1360
1361 if (exceptions_via_longjmp)
1362 emit_throw ();
1363 else
1364 {
1365 /* Code to throw out to outer context, if we fall off end of catch
1366 handlers. This is rethrow (Lresume, same id, same obj) in the
1367 documentation. We use Lresume because we know that it will throw
1368 to the correct context.
1369
1370 In other words, if the catch handler doesn't exit or return, we
1371 do a "throw" (using the address of Lresume as the point being
1372 thrown from) so that the outer EH region can then try to process
1373 the exception. */
1374
1375 expand_internal_throw (DECL_RTL (top_label_entry (&caught_return_label_stack)));
1376 }
1377
1378 /* Now we have the complete catch sequence. */
1379 new_catch_clause = get_insns ();
1380 end_sequence ();
1381
1382 /* This level of catch blocks is done, so set up the successful
1383 catch jump label for the next layer of catch blocks. */
1384 pop_label_entry (&caught_return_label_stack);
1385
1386 /* Add the new sequence of catches to the main one for this function. */
1387 push_to_sequence (catch_clauses);
1388 emit_insns (new_catch_clause);
1389 catch_clauses = get_insns ();
1390 end_sequence ();
1391
1392 /* Here we fall through into the continuation code. */
1393 }
1394
1395 /* End all the pending exception regions on protect_list. The handlers
1396 will be emitted when expand_leftover_cleanups is invoked. */
1397
1398 void
1399 end_protect_partials ()
1400 {
1401 while (protect_list)
1402 {
1403 expand_eh_region_end (TREE_VALUE (protect_list));
1404 protect_list = TREE_CHAIN (protect_list);
1405 }
1406 }
1407
1408 /* Arrange for __terminate to be called if there is an unhandled throw
1409 from within E. */
1410
1411 tree
1412 protect_with_terminate (e)
1413 tree e;
1414 {
1415 /* We only need to do this when using setjmp/longjmp EH and the
1416 language requires it, as otherwise we protect all of the handlers
1417 at once, if we need to. */
1418 if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
1419 {
1420 tree handler, result;
1421
1422 /* All cleanups must be on the function_obstack. */
1423 push_obstacks_nochange ();
1424 resume_temporary_allocation ();
1425
1426 handler = make_node (RTL_EXPR);
1427 TREE_TYPE (handler) = void_type_node;
1428 RTL_EXPR_RTL (handler) = const0_rtx;
1429 TREE_SIDE_EFFECTS (handler) = 1;
1430 start_sequence_for_rtl_expr (handler);
1431
1432 emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
1433 emit_barrier ();
1434
1435 RTL_EXPR_SEQUENCE (handler) = get_insns ();
1436 end_sequence ();
1437
1438 result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
1439 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
1440 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
1441 TREE_READONLY (result) = TREE_READONLY (e);
1442
1443 pop_obstacks ();
1444
1445 e = result;
1446 }
1447
1448 return e;
1449 }
1450 \f
1451 /* The exception table that we build that is used for looking up and
1452 dispatching exceptions, the current number of entries, and its
1453 maximum size before we have to extend it.
1454
1455 The number in eh_table is the code label number of the exception
1456 handler for the region. This is added by add_eh_table_entry and
1457 used by output_exception_table_entry. */
1458
1459 static int *eh_table;
1460 static int eh_table_size;
1461 static int eh_table_max_size;
1462
1463 /* Note the need for an exception table entry for region N. If we
1464 don't need to output an explicit exception table, avoid all of the
1465 extra work.
1466
1467 Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
1468 N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
1469 label number of the exception handler for the region. */
1470
1471 void
1472 add_eh_table_entry (n)
1473 int n;
1474 {
1475 #ifndef OMIT_EH_TABLE
1476 if (eh_table_size >= eh_table_max_size)
1477 {
1478 if (eh_table)
1479 {
1480 eh_table_max_size += eh_table_max_size>>1;
1481
1482 if (eh_table_max_size < 0)
1483 abort ();
1484
1485 eh_table = (int *) xrealloc (eh_table,
1486 eh_table_max_size * sizeof (int));
1487 }
1488 else
1489 {
1490 eh_table_max_size = 252;
1491 eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
1492 }
1493 }
1494 eh_table[eh_table_size++] = n;
1495 #endif
1496 }
1497
1498 /* Return a non-zero value if we need to output an exception table.
1499
1500 On some platforms, we don't have to output a table explicitly.
1501 This routine doesn't mean we don't have one. */
1502
1503 int
1504 exception_table_p ()
1505 {
1506 if (eh_table)
1507 return 1;
1508
1509 return 0;
1510 }
1511
1512 /* 1 if we need a static constructor to register EH table info. */
1513
1514 int
1515 register_exception_table_p ()
1516 {
1517 #if defined (DWARF2_UNWIND_INFO)
1518 return 0;
1519 #endif
1520
1521 return exception_table_p ();
1522 }
1523
1524 /* Output the entry of the exception table corresponding to to the
1525 exception region numbered N to file FILE.
1526
1527 N is the code label number corresponding to the handler of the
1528 region. */
1529
1530 static void
1531 output_exception_table_entry (file, n)
1532 FILE *file;
1533 int n;
1534 {
1535 char buf[256];
1536 rtx sym;
1537
1538 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
1539 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1540 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1541
1542 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
1543 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1544 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1545
1546 ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
1547 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1548 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1549
1550 putc ('\n', file); /* blank line */
1551 }
1552
1553 /* Output the exception table if we have and need one. */
1554
1555 void
1556 output_exception_table ()
1557 {
1558 int i;
1559 extern FILE *asm_out_file;
1560
1561 if (! doing_eh (0) || ! eh_table)
1562 return;
1563
1564 exception_section ();
1565
1566 /* Beginning marker for table. */
1567 assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
1568 assemble_label ("__EXCEPTION_TABLE__");
1569
1570 for (i = 0; i < eh_table_size; ++i)
1571 output_exception_table_entry (asm_out_file, eh_table[i]);
1572
1573 free (eh_table);
1574
1575 /* Ending marker for table. */
1576 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1577 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1578 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1579 putc ('\n', asm_out_file); /* blank line */
1580 }
1581
1582 /* Generate code to initialize the exception table at program startup
1583 time. */
1584
1585 void
1586 register_exception_table ()
1587 {
1588 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
1589 VOIDmode, 1,
1590 gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
1591 Pmode);
1592 }
1593 \f
1594 /* Emit the RTL for the start of the per-function unwinder for the
1595 current function. See emit_unwinder for further information.
1596
1597 DOESNT_NEED_UNWINDER is a target-specific macro that determines if
1598 the current function actually needs a per-function unwinder or not.
1599 By default, all functions need one. */
1600
1601 void
1602 start_eh_unwinder ()
1603 {
1604 #ifdef DOESNT_NEED_UNWINDER
1605 if (DOESNT_NEED_UNWINDER)
1606 return;
1607 #endif
1608
1609 /* If we are using the setjmp/longjmp implementation, we don't need a
1610 per function unwinder. */
1611
1612 if (exceptions_via_longjmp)
1613 return;
1614
1615 #ifdef DWARF2_UNWIND_INFO
1616 return;
1617 #endif
1618
1619 expand_eh_region_start ();
1620 }
1621
1622 /* Emit insns for the end of the per-function unwinder for the
1623 current function. */
1624
1625 void
1626 end_eh_unwinder ()
1627 {
1628 tree expr;
1629 rtx return_val_rtx, ret_val, label, end, insns;
1630
1631 if (! doing_eh (0))
1632 return;
1633
1634 #ifdef DOESNT_NEED_UNWINDER
1635 if (DOESNT_NEED_UNWINDER)
1636 return;
1637 #endif
1638
1639 /* If we are using the setjmp/longjmp implementation, we don't need a
1640 per function unwinder. */
1641
1642 if (exceptions_via_longjmp)
1643 return;
1644
1645 #ifdef DWARF2_UNWIND_INFO
1646 return;
1647 #else /* DWARF2_UNWIND_INFO */
1648
1649 assemble_external (eh_saved_pc);
1650
1651 expr = make_node (RTL_EXPR);
1652 TREE_TYPE (expr) = void_type_node;
1653 RTL_EXPR_RTL (expr) = const0_rtx;
1654 TREE_SIDE_EFFECTS (expr) = 1;
1655 start_sequence_for_rtl_expr (expr);
1656
1657 /* ret_val will contain the address of the code where the call
1658 to the current function occurred. */
1659 ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
1660 0, hard_frame_pointer_rtx);
1661 return_val_rtx = copy_to_reg (ret_val);
1662
1663 /* Get the address we need to use to determine what exception
1664 handler should be invoked, and store it in __eh_pc. */
1665 return_val_rtx = eh_outer_context (return_val_rtx);
1666 return_val_rtx = expand_binop (Pmode, sub_optab, return_val_rtx, GEN_INT (1),
1667 NULL_RTX, 0, OPTAB_LIB_WIDEN);
1668 emit_move_insn (eh_saved_pc_rtx, return_val_rtx);
1669
1670 /* Either set things up so we do a return directly to __throw, or
1671 we return here instead. */
1672 #ifdef JUMP_TO_THROW
1673 emit_move_insn (ret_val, throw_libfunc);
1674 #else
1675 label = gen_label_rtx ();
1676 emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label));
1677 #endif
1678
1679 #ifdef RETURN_ADDR_OFFSET
1680 return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET);
1681 if (return_val_rtx != ret_val)
1682 emit_move_insn (ret_val, return_val_rtx);
1683 #endif
1684
1685 end = gen_label_rtx ();
1686 emit_jump (end);
1687
1688 RTL_EXPR_SEQUENCE (expr) = get_insns ();
1689 end_sequence ();
1690
1691 expand_eh_region_end (expr);
1692
1693 emit_jump (end);
1694
1695 #ifndef JUMP_TO_THROW
1696 emit_label (label);
1697 emit_throw ();
1698 #endif
1699
1700 expand_leftover_cleanups ();
1701
1702 emit_label (end);
1703
1704 #ifdef HAVE_return
1705 if (HAVE_return)
1706 {
1707 emit_jump_insn (gen_return ());
1708 emit_barrier ();
1709 }
1710 #endif
1711 #endif /* DWARF2_UNWIND_INFO */
1712 }
1713
1714 /* If necessary, emit insns for the per function unwinder for the
1715 current function. Called after all the code that needs unwind
1716 protection is output.
1717
1718 The unwinder takes care of catching any exceptions that have not
1719 been previously caught within the function, unwinding the stack to
1720 the next frame, and rethrowing using the address of the current
1721 function's caller as the context of the throw.
1722
1723 On some platforms __throw can do this by itself (or with the help
1724 of __unwind_function) so the per-function unwinder is
1725 unnecessary.
1726
1727 We cannot place the unwinder into the function until after we know
1728 we are done inlining, as we don't want to have more than one
1729 unwinder per non-inlined function. */
1730
1731 void
1732 emit_unwinder ()
1733 {
1734 rtx insns, insn;
1735
1736 start_sequence ();
1737 start_eh_unwinder ();
1738 insns = get_insns ();
1739 end_sequence ();
1740
1741 /* We place the start of the exception region associated with the
1742 per function unwinder at the top of the function. */
1743 if (insns)
1744 emit_insns_after (insns, get_insns ());
1745
1746 start_sequence ();
1747 end_eh_unwinder ();
1748 insns = get_insns ();
1749 end_sequence ();
1750
1751 /* And we place the end of the exception region before the USE and
1752 CLOBBER insns that may come at the end of the function. */
1753 if (insns == 0)
1754 return;
1755
1756 insn = get_last_insn ();
1757 while (GET_CODE (insn) == NOTE
1758 || (GET_CODE (insn) == INSN
1759 && (GET_CODE (PATTERN (insn)) == USE
1760 || GET_CODE (PATTERN (insn)) == CLOBBER)))
1761 insn = PREV_INSN (insn);
1762
1763 if (GET_CODE (insn) == CODE_LABEL
1764 && GET_CODE (PREV_INSN (insn)) == BARRIER)
1765 {
1766 insn = PREV_INSN (insn);
1767 }
1768 else
1769 {
1770 rtx label = gen_label_rtx ();
1771 emit_label_after (label, insn);
1772 insn = emit_jump_insn_after (gen_jump (label), insn);
1773 insn = emit_barrier_after (insn);
1774 }
1775
1776 emit_insns_after (insns, insn);
1777 }
1778
1779 /* Scan the current insns and build a list of handler labels. The
1780 resulting list is placed in the global variable exception_handler_labels.
1781
1782 It is called after the last exception handling region is added to
1783 the current function (when the rtl is almost all built for the
1784 current function) and before the jump optimization pass. */
1785
1786 void
1787 find_exception_handler_labels ()
1788 {
1789 rtx insn;
1790 int max_labelno = max_label_num ();
1791 int min_labelno = get_first_label_num ();
1792 rtx *labels;
1793
1794 exception_handler_labels = NULL_RTX;
1795
1796 /* If we aren't doing exception handling, there isn't much to check. */
1797 if (! doing_eh (0))
1798 return;
1799
1800 /* Generate a handy reference to each label. */
1801
1802 /* We call xmalloc here instead of alloca; we did the latter in the past,
1803 but found that it can sometimes end up being asked to allocate space
1804 for more than 1 million labels. */
1805 labels = (rtx *) xmalloc ((max_labelno - min_labelno) * sizeof (rtx));
1806 bzero ((char *) labels, (max_labelno - min_labelno) * sizeof (rtx));
1807
1808 /* Arrange for labels to be indexed directly by CODE_LABEL_NUMBER. */
1809 labels -= min_labelno;
1810
1811 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1812 {
1813 if (GET_CODE (insn) == CODE_LABEL)
1814 if (CODE_LABEL_NUMBER (insn) >= min_labelno
1815 && CODE_LABEL_NUMBER (insn) < max_labelno)
1816 labels[CODE_LABEL_NUMBER (insn)] = insn;
1817 }
1818
1819 /* For each start of a region, add its label to the list. */
1820
1821 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1822 {
1823 if (GET_CODE (insn) == NOTE
1824 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1825 {
1826 rtx label = NULL_RTX;
1827
1828 if (NOTE_BLOCK_NUMBER (insn) >= min_labelno
1829 && NOTE_BLOCK_NUMBER (insn) < max_labelno)
1830 {
1831 label = labels[NOTE_BLOCK_NUMBER (insn)];
1832
1833 if (label)
1834 exception_handler_labels
1835 = gen_rtx (EXPR_LIST, VOIDmode,
1836 label, exception_handler_labels);
1837 else
1838 warning ("didn't find handler for EH region %d",
1839 NOTE_BLOCK_NUMBER (insn));
1840 }
1841 else
1842 warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn));
1843 }
1844 }
1845
1846 free (labels + min_labelno);
1847 }
1848
1849 /* Perform sanity checking on the exception_handler_labels list.
1850
1851 Can be called after find_exception_handler_labels is called to
1852 build the list of exception handlers for the current function and
1853 before we finish processing the current function. */
1854
1855 void
1856 check_exception_handler_labels ()
1857 {
1858 rtx insn, handler;
1859
1860 /* If we aren't doing exception handling, there isn't much to check. */
1861 if (! doing_eh (0))
1862 return;
1863
1864 /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point
1865 in each handler corresponds to the CODE_LABEL_NUMBER of the
1866 handler. */
1867
1868 for (handler = exception_handler_labels;
1869 handler;
1870 handler = XEXP (handler, 1))
1871 {
1872 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1873 {
1874 if (GET_CODE (insn) == CODE_LABEL)
1875 {
1876 if (CODE_LABEL_NUMBER (insn)
1877 == CODE_LABEL_NUMBER (XEXP (handler, 0)))
1878 {
1879 if (insn != XEXP (handler, 0))
1880 warning ("mismatched handler %d",
1881 CODE_LABEL_NUMBER (insn));
1882 break;
1883 }
1884 }
1885 }
1886 if (insn == NULL_RTX)
1887 warning ("handler not found %d",
1888 CODE_LABEL_NUMBER (XEXP (handler, 0)));
1889 }
1890
1891 /* Now go through and make sure that for each region there is a
1892 corresponding label. */
1893 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1894 {
1895 if (GET_CODE (insn) == NOTE
1896 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1897 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1898 {
1899 for (handler = exception_handler_labels;
1900 handler;
1901 handler = XEXP (handler, 1))
1902 {
1903 if (CODE_LABEL_NUMBER (XEXP (handler, 0))
1904 == NOTE_BLOCK_NUMBER (insn))
1905 break;
1906 }
1907 if (handler == NULL_RTX)
1908 warning ("region exists, no handler %d",
1909 NOTE_BLOCK_NUMBER (insn));
1910 }
1911 }
1912 }
1913 \f
1914 /* This group of functions initializes the exception handling data
1915 structures at the start of the compilation, initializes the data
1916 structures at the start of a function, and saves and restores the
1917 exception handling data structures for the start/end of a nested
1918 function. */
1919
1920 /* Toplevel initialization for EH things. */
1921
1922 void
1923 init_eh ()
1924 {
1925 /* Generate rtl to reference the variable in which the PC of the
1926 current context is saved. */
1927 tree type = build_pointer_type (make_node (VOID_TYPE));
1928
1929 eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type);
1930 DECL_EXTERNAL (eh_saved_pc) = 1;
1931 TREE_PUBLIC (eh_saved_pc) = 1;
1932 make_decl_rtl (eh_saved_pc, NULL_PTR, 1);
1933 eh_saved_pc_rtx = DECL_RTL (eh_saved_pc);
1934 }
1935
1936 /* Initialize the per-function EH information. */
1937
1938 void
1939 init_eh_for_function ()
1940 {
1941 ehstack.top = 0;
1942 ehqueue.head = ehqueue.tail = 0;
1943 catch_clauses = NULL_RTX;
1944 false_label_stack = 0;
1945 caught_return_label_stack = 0;
1946 protect_list = NULL_TREE;
1947 current_function_dhc = NULL_RTX;
1948 current_function_dcc = NULL_RTX;
1949 }
1950
1951 /* Save some of the per-function EH info into the save area denoted by
1952 P.
1953
1954 This is currently called from save_stmt_status. */
1955
1956 void
1957 save_eh_status (p)
1958 struct function *p;
1959 {
1960 if (p == NULL)
1961 abort ();
1962
1963 p->ehstack = ehstack;
1964 p->ehqueue = ehqueue;
1965 p->catch_clauses = catch_clauses;
1966 p->false_label_stack = false_label_stack;
1967 p->caught_return_label_stack = caught_return_label_stack;
1968 p->protect_list = protect_list;
1969 p->dhc = current_function_dhc;
1970 p->dcc = current_function_dcc;
1971
1972 init_eh ();
1973 }
1974
1975 /* Restore the per-function EH info saved into the area denoted by P.
1976
1977 This is currently called from restore_stmt_status. */
1978
1979 void
1980 restore_eh_status (p)
1981 struct function *p;
1982 {
1983 if (p == NULL)
1984 abort ();
1985
1986 protect_list = p->protect_list;
1987 caught_return_label_stack = p->caught_return_label_stack;
1988 false_label_stack = p->false_label_stack;
1989 catch_clauses = p->catch_clauses;
1990 ehqueue = p->ehqueue;
1991 ehstack = p->ehstack;
1992 current_function_dhc = p->dhc;
1993 current_function_dcc = p->dcc;
1994 }
1995 \f
1996 /* This section is for the exception handling specific optimization
1997 pass. First are the internal routines, and then the main
1998 optimization pass. */
1999
2000 /* Determine if the given INSN can throw an exception. */
2001
2002 static int
2003 can_throw (insn)
2004 rtx insn;
2005 {
2006 /* Calls can always potentially throw exceptions. */
2007 if (GET_CODE (insn) == CALL_INSN)
2008 return 1;
2009
2010 if (asynchronous_exceptions)
2011 {
2012 /* If we wanted asynchronous exceptions, then everything but NOTEs
2013 and CODE_LABELs could throw. */
2014 if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
2015 return 1;
2016 }
2017
2018 return 0;
2019 }
2020
2021 /* Scan a exception region looking for the matching end and then
2022 remove it if possible. INSN is the start of the region, N is the
2023 region number, and DELETE_OUTER is to note if anything in this
2024 region can throw.
2025
2026 Regions are removed if they cannot possibly catch an exception.
2027 This is determined by invoking can_throw on each insn within the
2028 region; if can_throw returns true for any of the instructions, the
2029 region can catch an exception, since there is an insn within the
2030 region that is capable of throwing an exception.
2031
2032 Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
2033 calls abort if it can't find one.
2034
2035 Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
2036 correspond to the region number, or if DELETE_OUTER is NULL. */
2037
2038 static rtx
2039 scan_region (insn, n, delete_outer)
2040 rtx insn;
2041 int n;
2042 int *delete_outer;
2043 {
2044 rtx start = insn;
2045
2046 /* Assume we can delete the region. */
2047 int delete = 1;
2048
2049 if (insn == NULL_RTX
2050 || GET_CODE (insn) != NOTE
2051 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG
2052 || NOTE_BLOCK_NUMBER (insn) != n
2053 || delete_outer == NULL)
2054 abort ();
2055
2056 insn = NEXT_INSN (insn);
2057
2058 /* Look for the matching end. */
2059 while (! (GET_CODE (insn) == NOTE
2060 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2061 {
2062 /* If anything can throw, we can't remove the region. */
2063 if (delete && can_throw (insn))
2064 {
2065 delete = 0;
2066 }
2067
2068 /* Watch out for and handle nested regions. */
2069 if (GET_CODE (insn) == NOTE
2070 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2071 {
2072 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
2073 }
2074
2075 insn = NEXT_INSN (insn);
2076 }
2077
2078 /* The _BEG/_END NOTEs must match and nest. */
2079 if (NOTE_BLOCK_NUMBER (insn) != n)
2080 abort ();
2081
2082 /* If anything in this exception region can throw, we can throw. */
2083 if (! delete)
2084 *delete_outer = 0;
2085 else
2086 {
2087 /* Delete the start and end of the region. */
2088 delete_insn (start);
2089 delete_insn (insn);
2090
2091 /* Only do this part if we have built the exception handler
2092 labels. */
2093 if (exception_handler_labels)
2094 {
2095 rtx x, *prev = &exception_handler_labels;
2096
2097 /* Find it in the list of handlers. */
2098 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2099 {
2100 rtx label = XEXP (x, 0);
2101 if (CODE_LABEL_NUMBER (label) == n)
2102 {
2103 /* If we are the last reference to the handler,
2104 delete it. */
2105 if (--LABEL_NUSES (label) == 0)
2106 delete_insn (label);
2107
2108 if (optimize)
2109 {
2110 /* Remove it from the list of exception handler
2111 labels, if we are optimizing. If we are not, then
2112 leave it in the list, as we are not really going to
2113 remove the region. */
2114 *prev = XEXP (x, 1);
2115 XEXP (x, 1) = 0;
2116 XEXP (x, 0) = 0;
2117 }
2118
2119 break;
2120 }
2121 prev = &XEXP (x, 1);
2122 }
2123 }
2124 }
2125 return insn;
2126 }
2127
2128 /* Perform various interesting optimizations for exception handling
2129 code.
2130
2131 We look for empty exception regions and make them go (away). The
2132 jump optimization code will remove the handler if nothing else uses
2133 it. */
2134
2135 void
2136 exception_optimize ()
2137 {
2138 rtx insn, regions = NULL_RTX;
2139 int n;
2140
2141 /* Remove empty regions. */
2142 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2143 {
2144 if (GET_CODE (insn) == NOTE
2145 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2146 {
2147 /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2148 insn, we will indirectly skip through all the insns
2149 inbetween. We are also guaranteed that the value of insn
2150 returned will be valid, as otherwise scan_region won't
2151 return. */
2152 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
2153 }
2154 }
2155 }
2156 \f
2157 /* Various hooks for the DWARF 2 __throw routine. */
2158
2159 /* Do any necessary initialization to access arbitrary stack frames.
2160 On the SPARC, this means flushing the register windows. */
2161
2162 void
2163 expand_builtin_unwind_init ()
2164 {
2165 /* Set this so all the registers get saved in our frame; we need to be
2166 able to copy the saved values for any registers from frames we unwind. */
2167 current_function_has_nonlocal_label = 1;
2168
2169 #ifdef SETUP_FRAME_ADDRESSES
2170 SETUP_FRAME_ADDRESSES ();
2171 #endif
2172 }
2173
2174 /* Given a value extracted from the return address register or stack slot,
2175 return the actual address encoded in that value. */
2176
2177 rtx
2178 expand_builtin_extract_return_addr (addr_tree)
2179 tree addr_tree;
2180 {
2181 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2182 return eh_outer_context (addr);
2183 }
2184
2185 /* Given an actual address in addr_tree, do any necessary encoding
2186 and return the value to be stored in the return address register or
2187 stack slot so the epilogue will return to that address. */
2188
2189 rtx
2190 expand_builtin_frob_return_addr (addr_tree)
2191 tree addr_tree;
2192 {
2193 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2194 #ifdef RETURN_ADDR_OFFSET
2195 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2196 #endif
2197 return addr;
2198 }
2199
2200 /* Given an actual address in addr_tree, set the return address register up
2201 so the epilogue will return to that address. If the return address is
2202 not in a register, do nothing. */
2203
2204 void
2205 expand_builtin_set_return_addr_reg (addr_tree)
2206 tree addr_tree;
2207 {
2208 rtx tmp;
2209 rtx ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
2210 0, hard_frame_pointer_rtx);
2211
2212 if (GET_CODE (ra) != REG || REGNO (ra) >= FIRST_PSEUDO_REGISTER)
2213 return;
2214
2215 tmp = force_operand (expand_builtin_frob_return_addr (addr_tree), ra);
2216 if (tmp != ra)
2217 emit_move_insn (ra, tmp);
2218 }
2219
2220 /* Choose two registers for communication between the main body of
2221 __throw and the stub for adjusting the stack pointer. The first register
2222 is used to pass the address of the exception handler; the second register
2223 is used to pass the stack pointer offset.
2224
2225 For register 1 we use the return value register for a void *.
2226 For register 2 we use the static chain register if it exists and is
2227 different from register 1, otherwise some arbitrary call-clobbered
2228 register. */
2229
2230 static void
2231 eh_regs (r1, r2, outgoing)
2232 rtx *r1, *r2;
2233 int outgoing;
2234 {
2235 rtx reg1, reg2;
2236
2237 #ifdef FUNCTION_OUTGOING_VALUE
2238 if (outgoing)
2239 reg1 = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
2240 current_function_decl);
2241 else
2242 #endif
2243 reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2244 current_function_decl);
2245
2246 #ifdef STATIC_CHAIN_REGNUM
2247 if (outgoing)
2248 reg2 = static_chain_incoming_rtx;
2249 else
2250 reg2 = static_chain_rtx;
2251 if (REGNO (reg2) == REGNO (reg1))
2252 #endif /* STATIC_CHAIN_REGNUM */
2253 reg2 = NULL_RTX;
2254
2255 if (reg2 == NULL_RTX)
2256 {
2257 int i;
2258 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2259 if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
2260 {
2261 reg2 = gen_rtx (REG, Pmode, i);
2262 break;
2263 }
2264
2265 if (reg2 == NULL_RTX)
2266 abort ();
2267 }
2268
2269 *r1 = reg1;
2270 *r2 = reg2;
2271 }
2272
2273 /* Emit inside of __throw a stub which adjusts the stack pointer and jumps
2274 to the exception handler. __throw will set up the necessary values
2275 and then return to the stub. */
2276
2277 rtx
2278 expand_builtin_eh_stub ()
2279 {
2280 rtx stub_start = gen_label_rtx ();
2281 rtx after_stub = gen_label_rtx ();
2282 rtx handler, offset, temp;
2283
2284 emit_jump (after_stub);
2285 emit_label (stub_start);
2286
2287 eh_regs (&handler, &offset, 0);
2288
2289 adjust_stack (offset);
2290 emit_indirect_jump (handler);
2291
2292 emit_label (after_stub);
2293 return gen_rtx (LABEL_REF, Pmode, stub_start);
2294 }
2295
2296 /* Set up the registers for passing the handler address and stack offset
2297 to the stub above. */
2298
2299 void
2300 expand_builtin_set_eh_regs (handler, offset)
2301 tree handler, offset;
2302 {
2303 rtx reg1, reg2;
2304
2305 eh_regs (&reg1, &reg2, 1);
2306
2307 store_expr (offset, reg2, 0);
2308 store_expr (handler, reg1, 0);
2309
2310 /* These will be used by the stub. */
2311 emit_insn (gen_rtx (USE, VOIDmode, reg1));
2312 emit_insn (gen_rtx (USE, VOIDmode, reg2));
2313 }