builtins.c (compareAndSwapInt_builtin): Use flag_use_atomic_builtins.
[gcc.git] / gcc / java / except.c
1 /* Handle exceptions for GNU compiler for the Java(TM) language.
2 Copyright (C) 1997, 1998, 1999, 2000, 2002, 2003, 2004, 2005,
3 2007, 2008 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
20
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "real.h"
31 #include "rtl.h"
32 #include "java-tree.h"
33 #include "javaop.h"
34 #include "java-opcodes.h"
35 #include "jcf.h"
36 #include "function.h"
37 #include "except.h"
38 #include "java-except.h"
39 #include "toplev.h"
40
41 static void expand_start_java_handler (struct eh_range *);
42 static struct eh_range *find_handler_in_range (int, struct eh_range *,
43 struct eh_range *);
44 static void check_start_handlers (struct eh_range *, int);
45 static void free_eh_ranges (struct eh_range *range);
46
47 struct eh_range *current_method_handlers;
48
49 struct eh_range *current_try_block = NULL;
50
51 /* These variables are used to speed up find_handler. */
52
53 static int cache_range_start, cache_range_end;
54 static struct eh_range *cache_range;
55 static struct eh_range *cache_next_child;
56
57 /* A dummy range that represents the entire method. */
58
59 struct eh_range whole_range;
60
61 /* Check the invariants of the structure we're using to contain
62 exception regions. Either returns true or fails an assertion
63 check. */
64
65 bool
66 sanity_check_exception_range (struct eh_range *range)
67 {
68 struct eh_range *ptr = range->first_child;
69 for (; ptr; ptr = ptr->next_sibling)
70 {
71 gcc_assert (ptr->outer == range
72 && ptr->end_pc > ptr->start_pc);
73 if (ptr->next_sibling)
74 gcc_assert (ptr->next_sibling->start_pc >= ptr->end_pc);
75 gcc_assert (ptr->start_pc >= ptr->outer->start_pc
76 && ptr->end_pc <= ptr->outer->end_pc);
77 (void) sanity_check_exception_range (ptr);
78 }
79 return true;
80 }
81
82 #if defined(DEBUG_JAVA_BINDING_LEVELS)
83 extern int is_class_level;
84 extern int current_pc;
85 extern int binding_depth;
86 extern void indent (void);
87 static void
88 print_ranges (struct eh_range *range)
89 {
90 if (! range)
91 return;
92
93 struct eh_range *child = range->first_child;
94
95 indent ();
96 fprintf (stderr, "handler pc %d --> %d ", range->start_pc, range->end_pc);
97
98 tree handler = range->handlers;
99 for ( ; handler != NULL_TREE; handler = TREE_CHAIN (handler))
100 {
101 tree type = TREE_PURPOSE (handler);
102 if (type == NULL)
103 type = throwable_type_node;
104 fprintf (stderr, " type=%s ", IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))));
105 }
106 fprintf (stderr, "\n");
107
108 int saved = binding_depth;
109 binding_depth++;
110 print_ranges (child);
111 binding_depth = saved;
112
113 print_ranges (range->next_sibling);
114 }
115 #endif
116
117 /* Search for the most specific eh_range containing PC.
118 Assume PC is within RANGE.
119 CHILD is a list of children of RANGE such that any
120 previous children have end_pc values that are too low. */
121
122 static struct eh_range *
123 find_handler_in_range (int pc, struct eh_range *range, struct eh_range *child)
124 {
125 for (; child != NULL; child = child->next_sibling)
126 {
127 if (pc < child->start_pc)
128 break;
129 if (pc < child->end_pc)
130 return find_handler_in_range (pc, child, child->first_child);
131 }
132 cache_range = range;
133 cache_range_start = pc;
134 cache_next_child = child;
135 cache_range_end = child == NULL ? range->end_pc : child->start_pc;
136 return range;
137 }
138
139 /* Find the inner-most handler that contains PC. */
140
141 struct eh_range *
142 find_handler (int pc)
143 {
144 struct eh_range *h;
145 if (pc >= cache_range_start)
146 {
147 h = cache_range;
148 if (pc < cache_range_end)
149 return h;
150 while (pc >= h->end_pc)
151 {
152 cache_next_child = h->next_sibling;
153 h = h->outer;
154 }
155 }
156 else
157 {
158 h = &whole_range;
159 cache_next_child = h->first_child;
160 }
161 return find_handler_in_range (pc, h, cache_next_child);
162 }
163
164 static void
165 free_eh_ranges (struct eh_range *range)
166 {
167 while (range)
168 {
169 struct eh_range *next = range->next_sibling;
170 free_eh_ranges (range->first_child);
171 if (range != &whole_range)
172 free (range);
173 range = next;
174 }
175 }
176
177 /* Called to re-initialize the exception machinery for a new method. */
178
179 void
180 method_init_exceptions (void)
181 {
182 free_eh_ranges (&whole_range);
183 whole_range.start_pc = 0;
184 whole_range.end_pc = DECL_CODE_LENGTH (current_function_decl) + 1;
185 whole_range.outer = NULL;
186 whole_range.first_child = NULL;
187 whole_range.next_sibling = NULL;
188 cache_range_start = 0xFFFFFF;
189 }
190
191 /* Split an exception range into two at PC. The sub-ranges that
192 belong to the range are split and distributed between the two new
193 ranges. */
194
195 static void
196 split_range (struct eh_range *range, int pc)
197 {
198 struct eh_range *ptr;
199 struct eh_range **first_child, **second_child;
200 struct eh_range *h;
201
202 /* First, split all the sub-ranges. */
203 for (ptr = range->first_child; ptr; ptr = ptr->next_sibling)
204 {
205 if (pc > ptr->start_pc
206 && pc < ptr->end_pc)
207 {
208 split_range (ptr, pc);
209 }
210 }
211
212 /* Create a new range. */
213 h = XNEW (struct eh_range);
214
215 h->start_pc = pc;
216 h->end_pc = range->end_pc;
217 h->next_sibling = range->next_sibling;
218 range->next_sibling = h;
219 range->end_pc = pc;
220 h->handlers = build_tree_list (TREE_PURPOSE (range->handlers),
221 TREE_VALUE (range->handlers));
222 h->next_sibling = NULL;
223 h->expanded = 0;
224 h->stmt = NULL;
225 h->outer = range->outer;
226 h->first_child = NULL;
227
228 ptr = range->first_child;
229 first_child = &range->first_child;
230 second_child = &h->first_child;
231
232 /* Distribute the sub-ranges between the two new ranges. */
233 for (ptr = range->first_child; ptr; ptr = ptr->next_sibling)
234 {
235 if (ptr->start_pc < pc)
236 {
237 *first_child = ptr;
238 ptr->outer = range;
239 first_child = &ptr->next_sibling;
240 }
241 else
242 {
243 *second_child = ptr;
244 ptr->outer = h;
245 second_child = &ptr->next_sibling;
246 }
247 }
248 *first_child = NULL;
249 *second_child = NULL;
250 }
251
252
253 /* Add an exception range.
254
255 There are some missed optimization opportunities here. For
256 example, some bytecode obfuscators generate seemingly
257 nonoverlapping exception ranges which, when coalesced, do in fact
258 nest correctly. We could merge these, but we'd have to fix up all
259 the enclosed regions first and perhaps create a new range anyway if
260 it overlapped existing ranges.
261
262 Also, we don't attempt to detect the case where two previously
263 added disjoint ranges could be coalesced by a new range. */
264
265 void
266 add_handler (int start_pc, int end_pc, tree handler, tree type)
267 {
268 struct eh_range *ptr, *h;
269 struct eh_range **first_child, **prev;
270
271 /* First, split all the existing ranges that we need to enclose. */
272 for (ptr = whole_range.first_child; ptr; ptr = ptr->next_sibling)
273 {
274 if (start_pc > ptr->start_pc
275 && start_pc < ptr->end_pc)
276 {
277 split_range (ptr, start_pc);
278 }
279
280 if (end_pc > ptr->start_pc
281 && end_pc < ptr->end_pc)
282 {
283 split_range (ptr, end_pc);
284 }
285
286 if (ptr->start_pc >= end_pc)
287 break;
288 }
289
290 /* Create the new range. */
291 h = XNEW (struct eh_range);
292 first_child = &h->first_child;
293
294 h->start_pc = start_pc;
295 h->end_pc = end_pc;
296 h->first_child = NULL;
297 h->outer = NULL_EH_RANGE;
298 h->handlers = build_tree_list (type, handler);
299 h->next_sibling = NULL;
300 h->expanded = 0;
301 h->stmt = NULL;
302
303 /* Find every range at the top level that will be a sub-range of the
304 range we're inserting and make it so. */
305 {
306 struct eh_range **prev = &whole_range.first_child;
307 for (ptr = *prev; ptr;)
308 {
309 struct eh_range *next = ptr->next_sibling;
310
311 if (ptr->start_pc >= end_pc)
312 break;
313
314 if (ptr->start_pc < start_pc)
315 {
316 prev = &ptr->next_sibling;
317 }
318 else if (ptr->start_pc >= start_pc
319 && ptr->start_pc < end_pc)
320 {
321 *prev = next;
322 *first_child = ptr;
323 first_child = &ptr->next_sibling;
324 ptr->outer = h;
325 ptr->next_sibling = NULL;
326 }
327
328 ptr = next;
329 }
330 }
331
332 /* Find the right place to insert the new range. */
333 prev = &whole_range.first_child;
334 for (ptr = *prev; ptr; prev = &ptr->next_sibling, ptr = ptr->next_sibling)
335 {
336 gcc_assert (ptr->outer == NULL_EH_RANGE);
337 if (ptr->start_pc >= start_pc)
338 break;
339 }
340
341 /* And insert it there. */
342 *prev = h;
343 if (ptr)
344 {
345 h->next_sibling = ptr;
346 h->outer = ptr->outer;
347 }
348 }
349
350
351 /* if there are any handlers for this range, issue start of region */
352 static void
353 expand_start_java_handler (struct eh_range *range)
354 {
355 #if defined(DEBUG_JAVA_BINDING_LEVELS)
356 indent ();
357 fprintf (stderr, "expand start handler pc %d --> %d\n",
358 current_pc, range->end_pc);
359 #endif /* defined(DEBUG_JAVA_BINDING_LEVELS) */
360 pushlevel (0);
361 register_exception_range (range, range->start_pc, range->end_pc);
362 range->expanded = 1;
363 }
364
365 tree
366 prepare_eh_table_type (tree type)
367 {
368 tree exp;
369 tree *slot;
370 const char *name;
371 char *buf;
372 tree decl;
373 tree utf8_ref;
374
375 /* The "type" (match_info) in a (Java) exception table is a pointer to:
376 * a) NULL - meaning match any type in a try-finally.
377 * b) a pointer to a pointer to a class.
378 * c) a pointer to a pointer to a utf8_ref. The pointer is
379 * rewritten to point to the appropriate class. */
380
381 if (type == NULL_TREE)
382 return NULL_TREE;
383
384 if (TYPE_TO_RUNTIME_MAP (output_class) == NULL)
385 TYPE_TO_RUNTIME_MAP (output_class) = java_treetreehash_create (10, 1);
386
387 slot = java_treetreehash_new (TYPE_TO_RUNTIME_MAP (output_class), type);
388 if (*slot != NULL)
389 return TREE_VALUE (*slot);
390
391 if (is_compiled_class (type) && !flag_indirect_dispatch)
392 {
393 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
394 buf = (char *) alloca (strlen (name) + 5);
395 sprintf (buf, "%s_ref", name);
396 decl = build_decl (input_location,
397 VAR_DECL, get_identifier (buf), ptr_type_node);
398 TREE_STATIC (decl) = 1;
399 DECL_ARTIFICIAL (decl) = 1;
400 DECL_IGNORED_P (decl) = 1;
401 TREE_READONLY (decl) = 1;
402 TREE_THIS_VOLATILE (decl) = 0;
403 DECL_INITIAL (decl) = build_class_ref (type);
404 layout_decl (decl, 0);
405 pushdecl (decl);
406 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl);
407 }
408 else
409 {
410 utf8_ref = build_utf8_ref (DECL_NAME (TYPE_NAME (type)));
411 name = IDENTIFIER_POINTER (DECL_NAME (TREE_OPERAND (utf8_ref, 0)));
412 buf = (char *) alloca (strlen (name) + 5);
413 sprintf (buf, "%s_ref", name);
414 decl = build_decl (input_location,
415 VAR_DECL, get_identifier (buf), utf8const_ptr_type);
416 TREE_STATIC (decl) = 1;
417 DECL_ARTIFICIAL (decl) = 1;
418 DECL_IGNORED_P (decl) = 1;
419 TREE_READONLY (decl) = 1;
420 TREE_THIS_VOLATILE (decl) = 0;
421 layout_decl (decl, 0);
422 pushdecl (decl);
423 exp = build1 (ADDR_EXPR, build_pointer_type (utf8const_ptr_type), decl);
424 TYPE_CATCH_CLASSES (output_class) =
425 tree_cons (NULL, make_catch_class_record (exp, utf8_ref),
426 TYPE_CATCH_CLASSES (output_class));
427 }
428
429 exp = convert (ptr_type_node, exp);
430
431 *slot = tree_cons (type, exp, NULL_TREE);
432
433 return exp;
434 }
435
436 static int
437 expand_catch_class (void **entry, void *x ATTRIBUTE_UNUSED)
438 {
439 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
440 tree addr = TREE_VALUE ((tree)ite->value);
441 tree decl;
442 STRIP_NOPS (addr);
443 decl = TREE_OPERAND (addr, 0);
444 rest_of_decl_compilation (decl, global_bindings_p (), 0);
445 return true;
446 }
447
448 /* For every class in the TYPE_TO_RUNTIME_MAP, expand the
449 corresponding object that is used by the runtime type matcher. */
450
451 void
452 java_expand_catch_classes (tree this_class)
453 {
454 if (TYPE_TO_RUNTIME_MAP (this_class))
455 htab_traverse
456 (TYPE_TO_RUNTIME_MAP (this_class),
457 expand_catch_class, NULL);
458 }
459
460 /* Build a reference to the jthrowable object being carried in the
461 exception header. */
462
463 tree
464 build_exception_object_ref (tree type)
465 {
466 tree obj;
467
468 /* Java only passes object via pointer and doesn't require adjusting.
469 The java object is immediately before the generic exception header. */
470 obj = build0 (EXC_PTR_EXPR, build_pointer_type (type));
471 obj = build2 (POINTER_PLUS_EXPR, TREE_TYPE (obj), obj,
472 fold_build1 (NEGATE_EXPR, sizetype,
473 TYPE_SIZE_UNIT (TREE_TYPE (obj))));
474 obj = build1 (INDIRECT_REF, type, obj);
475
476 return obj;
477 }
478
479 /* If there are any handlers for this range, issue end of range,
480 and then all handler blocks */
481 void
482 expand_end_java_handler (struct eh_range *range)
483 {
484 tree handler = range->handlers;
485
486 for ( ; handler != NULL_TREE; handler = TREE_CHAIN (handler))
487 {
488 /* For bytecode we treat exceptions a little unusually. A
489 `finally' clause looks like an ordinary exception handler for
490 Throwable. The reason for this is that the bytecode has
491 already expanded the finally logic, and we would have to do
492 extra (and difficult) work to get this to look like a
493 gcc-style finally clause. */
494 tree type = TREE_PURPOSE (handler);
495 if (type == NULL)
496 type = throwable_type_node;
497 type = prepare_eh_table_type (type);
498
499 {
500 tree catch_expr = build2 (CATCH_EXPR, void_type_node, type,
501 build1 (GOTO_EXPR, void_type_node,
502 TREE_VALUE (handler)));
503 tree try_catch_expr = build2 (TRY_CATCH_EXPR, void_type_node,
504 *get_stmts (), catch_expr);
505 *get_stmts () = try_catch_expr;
506 }
507 }
508 #if defined(DEBUG_JAVA_BINDING_LEVELS)
509 indent ();
510 fprintf (stderr, "expand end handler pc %d <-- %d\n",
511 current_pc, range->start_pc);
512 #endif /* defined(DEBUG_JAVA_BINDING_LEVELS) */
513 }
514
515 /* Recursive helper routine for maybe_start_handlers. */
516
517 static void
518 check_start_handlers (struct eh_range *range, int pc)
519 {
520 if (range != NULL_EH_RANGE && range->start_pc == pc)
521 {
522 check_start_handlers (range->outer, pc);
523 if (!range->expanded)
524 expand_start_java_handler (range);
525 }
526 }
527
528
529 static struct eh_range *current_range;
530
531 /* Emit any start-of-try-range starting at start_pc and ending after
532 end_pc. */
533
534 void
535 maybe_start_try (int start_pc, int end_pc)
536 {
537 struct eh_range *range;
538 if (! doing_eh (1))
539 return;
540
541 range = find_handler (start_pc);
542 while (range != NULL_EH_RANGE && range->start_pc == start_pc
543 && range->end_pc < end_pc)
544 range = range->outer;
545
546 current_range = range;
547 check_start_handlers (range, start_pc);
548 }
549