In gcc/objc/: 2010-11-05 Nicola Pero <nicola.pero@meta-innovation.com>
[gcc.git] / gcc / java / expr.c
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2010 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
20
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "java-tree.h"
33 #include "javaop.h"
34 #include "java-opcodes.h"
35 #include "jcf.h"
36 #include "java-except.h"
37 #include "parse.h"
38 #include "diagnostic-core.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tree-iterator.h"
42 #include "target.h"
43
44 static void flush_quick_stack (void);
45 static void push_value (tree);
46 static tree pop_value (tree);
47 static void java_stack_swap (void);
48 static void java_stack_dup (int, int);
49 static void build_java_athrow (tree);
50 static void build_java_jsr (int, int);
51 static void build_java_ret (tree);
52 static void expand_java_multianewarray (tree, int);
53 static void expand_java_arraystore (tree);
54 static void expand_java_arrayload (tree);
55 static void expand_java_array_length (void);
56 static tree build_java_monitor (tree, tree);
57 static void expand_java_pushc (int, tree);
58 static void expand_java_return (tree);
59 static void expand_load_internal (int, tree, int);
60 static void expand_java_NEW (tree);
61 static void expand_java_INSTANCEOF (tree);
62 static void expand_java_CHECKCAST (tree);
63 static void expand_iinc (unsigned int, int, int);
64 static void expand_java_binop (tree, enum tree_code);
65 static void note_label (int, int);
66 static void expand_compare (enum tree_code, tree, tree, int);
67 static void expand_test (enum tree_code, tree, int);
68 static void expand_cond (enum tree_code, tree, int);
69 static void expand_java_goto (int);
70 static tree expand_java_switch (tree, int);
71 static void expand_java_add_case (tree, int, int);
72 static VEC(tree,gc) *pop_arguments (tree);
73 static void expand_invoke (int, int, int);
74 static void expand_java_field_op (int, int, int);
75 static void java_push_constant_from_pool (struct JCF *, int);
76 static void java_stack_pop (int);
77 static tree build_java_throw_out_of_bounds_exception (tree);
78 static tree build_java_check_indexed_type (tree, tree);
79 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
80 static void promote_arguments (void);
81 static void cache_cpool_data_ref (void);
82
83 static GTY(()) tree operand_type[59];
84
85 static GTY(()) tree methods_ident;
86 static GTY(()) tree ncode_ident;
87 tree dtable_ident = NULL_TREE;
88
89 /* Set to nonzero value in order to emit class initialization code
90 before static field references. */
91 int always_initialize_class_p = 0;
92
93 /* We store the stack state in two places:
94 Within a basic block, we use the quick_stack, which is a VEC of expression
95 nodes.
96 This is the top part of the stack; below that we use find_stack_slot.
97 At the end of a basic block, the quick_stack must be flushed
98 to the stack slot array (as handled by find_stack_slot).
99 Using quick_stack generates better code (especially when
100 compiled without optimization), because we do not have to
101 explicitly store and load trees to temporary variables.
102
103 If a variable is on the quick stack, it means the value of variable
104 when the quick stack was last flushed. Conceptually, flush_quick_stack
105 saves all the quick_stack elements in parallel. However, that is
106 complicated, so it actually saves them (i.e. copies each stack value
107 to is home virtual register) from low indexes. This allows a quick_stack
108 element at index i (counting from the bottom of stack the) to references
109 slot virtuals for register that are >= i, but not those that are deeper.
110 This convention makes most operations easier. For example iadd works
111 even when the stack contains (reg[0], reg[1]): It results in the
112 stack containing (reg[0]+reg[1]), which is OK. However, some stack
113 operations are more complicated. For example dup given a stack
114 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
115 the convention, since stack value 1 would refer to a register with
116 lower index (reg[0]), which flush_quick_stack does not safely handle.
117 So dup cannot just add an extra element to the quick_stack, but iadd can.
118 */
119
120 static GTY(()) VEC(tree,gc) *quick_stack;
121
122 /* The physical memory page size used in this computer. See
123 build_field_ref(). */
124 static GTY(()) tree page_size;
125
126 /* The stack pointer of the Java virtual machine.
127 This does include the size of the quick_stack. */
128
129 int stack_pointer;
130
131 const unsigned char *linenumber_table;
132 int linenumber_count;
133
134 /* Largest pc so far in this method that has been passed to lookup_label. */
135 int highest_label_pc_this_method = -1;
136
137 /* Base value for this method to add to pc to get generated label. */
138 int start_label_pc_this_method = 0;
139
140 void
141 init_expr_processing (void)
142 {
143 operand_type[21] = operand_type[54] = int_type_node;
144 operand_type[22] = operand_type[55] = long_type_node;
145 operand_type[23] = operand_type[56] = float_type_node;
146 operand_type[24] = operand_type[57] = double_type_node;
147 operand_type[25] = operand_type[58] = ptr_type_node;
148 }
149
150 tree
151 java_truthvalue_conversion (tree expr)
152 {
153 /* It is simpler and generates better code to have only TRUTH_*_EXPR
154 or comparison expressions as truth values at this level.
155
156 This function should normally be identity for Java. */
157
158 switch (TREE_CODE (expr))
159 {
160 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
161 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
162 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
163 case ORDERED_EXPR: case UNORDERED_EXPR:
164 case TRUTH_ANDIF_EXPR:
165 case TRUTH_ORIF_EXPR:
166 case TRUTH_AND_EXPR:
167 case TRUTH_OR_EXPR:
168 case TRUTH_XOR_EXPR:
169 case TRUTH_NOT_EXPR:
170 case ERROR_MARK:
171 return expr;
172
173 case INTEGER_CST:
174 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
175
176 case REAL_CST:
177 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
178
179 /* are these legal? XXX JH */
180 case NEGATE_EXPR:
181 case ABS_EXPR:
182 case FLOAT_EXPR:
183 /* These don't change whether an object is nonzero or zero. */
184 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
185
186 case COND_EXPR:
187 /* Distribute the conversion into the arms of a COND_EXPR. */
188 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
189 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
190 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
191
192 case NOP_EXPR:
193 /* If this is widening the argument, we can ignore it. */
194 if (TYPE_PRECISION (TREE_TYPE (expr))
195 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
196 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
197 /* fall through to default */
198
199 default:
200 return fold_build2 (NE_EXPR, boolean_type_node,
201 expr, boolean_false_node);
202 }
203 }
204
205 /* Save any stack slots that happen to be in the quick_stack into their
206 home virtual register slots.
207
208 The copy order is from low stack index to high, to support the invariant
209 that the expression for a slot may contain decls for stack slots with
210 higher (or the same) index, but not lower. */
211
212 static void
213 flush_quick_stack (void)
214 {
215 int stack_index = stack_pointer;
216 unsigned ix;
217 tree t;
218
219 /* Count the number of slots the quick stack is holding. */
220 for (ix = 0; VEC_iterate(tree, quick_stack, ix, t); ix++)
221 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
222
223 for (ix = 0; VEC_iterate(tree, quick_stack, ix, t); ix++)
224 {
225 tree decl, type = TREE_TYPE (t);
226
227 decl = find_stack_slot (stack_index, type);
228 if (decl != t)
229 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t));
230 stack_index += 1 + TYPE_IS_WIDE (type);
231 }
232
233 VEC_truncate (tree, quick_stack, 0);
234 }
235
236 /* Push TYPE on the type stack.
237 Return true on success, 0 on overflow. */
238
239 int
240 push_type_0 (tree type)
241 {
242 int n_words;
243 type = promote_type (type);
244 n_words = 1 + TYPE_IS_WIDE (type);
245 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
246 return 0;
247 /* Allocate decl for this variable now, so we get a temporary that
248 survives the whole method. */
249 find_stack_slot (stack_pointer, type);
250 stack_type_map[stack_pointer++] = type;
251 n_words--;
252 while (--n_words >= 0)
253 stack_type_map[stack_pointer++] = TYPE_SECOND;
254 return 1;
255 }
256
257 void
258 push_type (tree type)
259 {
260 int r = push_type_0 (type);
261 gcc_assert (r);
262 }
263
264 static void
265 push_value (tree value)
266 {
267 tree type = TREE_TYPE (value);
268 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
269 {
270 type = promote_type (type);
271 value = convert (type, value);
272 }
273 push_type (type);
274 VEC_safe_push (tree, gc, quick_stack, value);
275
276 /* If the value has a side effect, then we need to evaluate it
277 whether or not the result is used. If the value ends up on the
278 quick stack and is then popped, this won't happen -- so we flush
279 the quick stack. It is safest to simply always flush, though,
280 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
281 the latter we may need to strip conversions. */
282 flush_quick_stack ();
283 }
284
285 /* Pop a type from the type stack.
286 TYPE is the expected type. Return the actual type, which must be
287 convertible to TYPE.
288 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
289
290 tree
291 pop_type_0 (tree type, char **messagep)
292 {
293 int n_words;
294 tree t;
295 *messagep = NULL;
296 if (TREE_CODE (type) == RECORD_TYPE)
297 type = promote_type (type);
298 n_words = 1 + TYPE_IS_WIDE (type);
299 if (stack_pointer < n_words)
300 {
301 *messagep = xstrdup ("stack underflow");
302 return type;
303 }
304 while (--n_words > 0)
305 {
306 if (stack_type_map[--stack_pointer] != void_type_node)
307 {
308 *messagep = xstrdup ("Invalid multi-word value on type stack");
309 return type;
310 }
311 }
312 t = stack_type_map[--stack_pointer];
313 if (type == NULL_TREE || t == type)
314 return t;
315 if (TREE_CODE (t) == TREE_LIST)
316 {
317 do
318 {
319 tree tt = TREE_PURPOSE (t);
320 if (! can_widen_reference_to (tt, type))
321 {
322 t = tt;
323 goto fail;
324 }
325 t = TREE_CHAIN (t);
326 }
327 while (t);
328 return t;
329 }
330 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
331 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
332 return t;
333 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
334 {
335 /* If the expected type we've been passed is object or ptr
336 (i.e. void*), the caller needs to know the real type. */
337 if (type == ptr_type_node || type == object_ptr_type_node)
338 return t;
339
340 /* Since the verifier has already run, we know that any
341 types we see will be compatible. In BC mode, this fact
342 may be checked at runtime, but if that is so then we can
343 assume its truth here as well. So, we always succeed
344 here, with the expected type. */
345 return type;
346 }
347
348 if (! flag_verify_invocations && flag_indirect_dispatch
349 && t == object_ptr_type_node)
350 {
351 if (type != ptr_type_node)
352 warning (0, "need to insert runtime check for %s",
353 xstrdup (lang_printable_name (type, 0)));
354 return type;
355 }
356
357 /* lang_printable_name uses a static buffer, so we must save the result
358 from calling it the first time. */
359 fail:
360 {
361 char *temp = xstrdup (lang_printable_name (type, 0));
362 /* If the stack contains a multi-word type, keep popping the stack until
363 the real type is found. */
364 while (t == void_type_node)
365 t = stack_type_map[--stack_pointer];
366 *messagep = concat ("expected type '", temp,
367 "' but stack contains '", lang_printable_name (t, 0),
368 "'", NULL);
369 free (temp);
370 }
371 return type;
372 }
373
374 /* Pop a type from the type stack.
375 TYPE is the expected type. Return the actual type, which must be
376 convertible to TYPE, otherwise call error. */
377
378 tree
379 pop_type (tree type)
380 {
381 char *message = NULL;
382 type = pop_type_0 (type, &message);
383 if (message != NULL)
384 {
385 error ("%s", message);
386 free (message);
387 }
388 return type;
389 }
390
391 \f
392 /* Return true if two type assertions are equal. */
393
394 static int
395 type_assertion_eq (const void * k1_p, const void * k2_p)
396 {
397 const type_assertion k1 = *(const type_assertion *)k1_p;
398 const type_assertion k2 = *(const type_assertion *)k2_p;
399 return (k1.assertion_code == k2.assertion_code
400 && k1.op1 == k2.op1
401 && k1.op2 == k2.op2);
402 }
403
404 /* Hash a type assertion. */
405
406 static hashval_t
407 type_assertion_hash (const void *p)
408 {
409 const type_assertion *k_p = (const type_assertion *) p;
410 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
411 k_p->assertion_code, 0);
412
413 switch (k_p->assertion_code)
414 {
415 case JV_ASSERT_TYPES_COMPATIBLE:
416 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
417 hash);
418 /* Fall through. */
419
420 case JV_ASSERT_IS_INSTANTIABLE:
421 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
422 hash);
423 /* Fall through. */
424
425 case JV_ASSERT_END_OF_TABLE:
426 break;
427
428 default:
429 gcc_unreachable ();
430 }
431
432 return hash;
433 }
434
435 /* Add an entry to the type assertion table for the given class.
436 KLASS is the class for which this assertion will be evaluated by the
437 runtime during loading/initialization.
438 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
439 OP1 and OP2 are the operands. The tree type of these arguments may be
440 specific to each assertion_code. */
441
442 void
443 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
444 {
445 htab_t assertions_htab;
446 type_assertion as;
447 void **as_pp;
448
449 assertions_htab = TYPE_ASSERTIONS (klass);
450 if (assertions_htab == NULL)
451 {
452 assertions_htab = htab_create_ggc (7, type_assertion_hash,
453 type_assertion_eq, NULL);
454 TYPE_ASSERTIONS (current_class) = assertions_htab;
455 }
456
457 as.assertion_code = assertion_code;
458 as.op1 = op1;
459 as.op2 = op2;
460
461 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
462
463 /* Don't add the same assertion twice. */
464 if (*as_pp)
465 return;
466
467 *as_pp = ggc_alloc_type_assertion ();
468 **(type_assertion **)as_pp = as;
469 }
470
471 \f
472 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
473 Handles array types and interfaces. */
474
475 int
476 can_widen_reference_to (tree source_type, tree target_type)
477 {
478 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
479 return 1;
480
481 /* Get rid of pointers */
482 if (TREE_CODE (source_type) == POINTER_TYPE)
483 source_type = TREE_TYPE (source_type);
484 if (TREE_CODE (target_type) == POINTER_TYPE)
485 target_type = TREE_TYPE (target_type);
486
487 if (source_type == target_type)
488 return 1;
489
490 /* FIXME: This is very pessimistic, in that it checks everything,
491 even if we already know that the types are compatible. If we're
492 to support full Java class loader semantics, we need this.
493 However, we could do something more optimal. */
494 if (! flag_verify_invocations)
495 {
496 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
497 source_type, target_type);
498
499 if (!quiet_flag)
500 warning (0, "assert: %s is assign compatible with %s",
501 xstrdup (lang_printable_name (target_type, 0)),
502 xstrdup (lang_printable_name (source_type, 0)));
503 /* Punt everything to runtime. */
504 return 1;
505 }
506
507 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
508 {
509 return 1;
510 }
511 else
512 {
513 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
514 {
515 HOST_WIDE_INT source_length, target_length;
516 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
517 {
518 /* An array implements Cloneable and Serializable. */
519 tree name = DECL_NAME (TYPE_NAME (target_type));
520 return (name == java_lang_cloneable_identifier_node
521 || name == java_io_serializable_identifier_node);
522 }
523 target_length = java_array_type_length (target_type);
524 if (target_length >= 0)
525 {
526 source_length = java_array_type_length (source_type);
527 if (source_length != target_length)
528 return 0;
529 }
530 source_type = TYPE_ARRAY_ELEMENT (source_type);
531 target_type = TYPE_ARRAY_ELEMENT (target_type);
532 if (source_type == target_type)
533 return 1;
534 if (TREE_CODE (source_type) != POINTER_TYPE
535 || TREE_CODE (target_type) != POINTER_TYPE)
536 return 0;
537 return can_widen_reference_to (source_type, target_type);
538 }
539 else
540 {
541 int source_depth = class_depth (source_type);
542 int target_depth = class_depth (target_type);
543
544 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
545 {
546 if (! quiet_flag)
547 warning (0, "assert: %s is assign compatible with %s",
548 xstrdup (lang_printable_name (target_type, 0)),
549 xstrdup (lang_printable_name (source_type, 0)));
550 return 1;
551 }
552
553 /* class_depth can return a negative depth if an error occurred */
554 if (source_depth < 0 || target_depth < 0)
555 return 0;
556
557 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
558 {
559 /* target_type is OK if source_type or source_type ancestors
560 implement target_type. We handle multiple sub-interfaces */
561 tree binfo, base_binfo;
562 int i;
563
564 for (binfo = TYPE_BINFO (source_type), i = 0;
565 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
566 if (can_widen_reference_to
567 (BINFO_TYPE (base_binfo), target_type))
568 return 1;
569
570 if (!i)
571 return 0;
572 }
573
574 for ( ; source_depth > target_depth; source_depth--)
575 {
576 source_type
577 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
578 }
579 return source_type == target_type;
580 }
581 }
582 }
583
584 static tree
585 pop_value (tree type)
586 {
587 type = pop_type (type);
588 if (VEC_length (tree, quick_stack) != 0)
589 return VEC_pop (tree, quick_stack);
590 else
591 return find_stack_slot (stack_pointer, promote_type (type));
592 }
593
594
595 /* Pop and discard the top COUNT stack slots. */
596
597 static void
598 java_stack_pop (int count)
599 {
600 while (count > 0)
601 {
602 tree type;
603
604 gcc_assert (stack_pointer != 0);
605
606 type = stack_type_map[stack_pointer - 1];
607 if (type == TYPE_SECOND)
608 {
609 count--;
610 gcc_assert (stack_pointer != 1 && count > 0);
611
612 type = stack_type_map[stack_pointer - 2];
613 }
614 pop_value (type);
615 count--;
616 }
617 }
618
619 /* Implement the 'swap' operator (to swap two top stack slots). */
620
621 static void
622 java_stack_swap (void)
623 {
624 tree type1, type2;
625 tree temp;
626 tree decl1, decl2;
627
628 if (stack_pointer < 2
629 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
630 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
631 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
632 /* Bad stack swap. */
633 abort ();
634 /* Bad stack swap. */
635
636 flush_quick_stack ();
637 decl1 = find_stack_slot (stack_pointer - 1, type1);
638 decl2 = find_stack_slot (stack_pointer - 2, type2);
639 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
640 java_add_local_var (temp);
641 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
642 java_add_stmt (build2 (MODIFY_EXPR, type2,
643 find_stack_slot (stack_pointer - 1, type2),
644 decl2));
645 java_add_stmt (build2 (MODIFY_EXPR, type1,
646 find_stack_slot (stack_pointer - 2, type1),
647 temp));
648 stack_type_map[stack_pointer - 1] = type2;
649 stack_type_map[stack_pointer - 2] = type1;
650 }
651
652 static void
653 java_stack_dup (int size, int offset)
654 {
655 int low_index = stack_pointer - size - offset;
656 int dst_index;
657 if (low_index < 0)
658 error ("stack underflow - dup* operation");
659
660 flush_quick_stack ();
661
662 stack_pointer += size;
663 dst_index = stack_pointer;
664
665 for (dst_index = stack_pointer; --dst_index >= low_index; )
666 {
667 tree type;
668 int src_index = dst_index - size;
669 if (src_index < low_index)
670 src_index = dst_index + size + offset;
671 type = stack_type_map [src_index];
672 if (type == TYPE_SECOND)
673 {
674 /* Dup operation splits 64-bit number. */
675 gcc_assert (src_index > low_index);
676
677 stack_type_map[dst_index] = type;
678 src_index--; dst_index--;
679 type = stack_type_map[src_index];
680 gcc_assert (TYPE_IS_WIDE (type));
681 }
682 else
683 gcc_assert (! TYPE_IS_WIDE (type));
684
685 if (src_index != dst_index)
686 {
687 tree src_decl = find_stack_slot (src_index, type);
688 tree dst_decl = find_stack_slot (dst_index, type);
689
690 java_add_stmt
691 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
692 stack_type_map[dst_index] = type;
693 }
694 }
695 }
696
697 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
698 value stack. */
699
700 static void
701 build_java_athrow (tree node)
702 {
703 tree call;
704
705 call = build_call_nary (void_type_node,
706 build_address_of (throw_node),
707 1, node);
708 TREE_SIDE_EFFECTS (call) = 1;
709 java_add_stmt (call);
710 java_stack_pop (stack_pointer);
711 }
712
713 /* Implementation for jsr/ret */
714
715 static void
716 build_java_jsr (int target_pc, int return_pc)
717 {
718 tree where = lookup_label (target_pc);
719 tree ret = lookup_label (return_pc);
720 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
721 push_value (ret_label);
722 flush_quick_stack ();
723 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
724
725 /* Do not need to emit the label here. We noted the existence of the
726 label as a jump target in note_instructions; we'll emit the label
727 for real at the beginning of the expand_byte_code loop. */
728 }
729
730 static void
731 build_java_ret (tree location)
732 {
733 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
734 }
735
736 /* Implementation of operations on array: new, load, store, length */
737
738 tree
739 decode_newarray_type (int atype)
740 {
741 switch (atype)
742 {
743 case 4: return boolean_type_node;
744 case 5: return char_type_node;
745 case 6: return float_type_node;
746 case 7: return double_type_node;
747 case 8: return byte_type_node;
748 case 9: return short_type_node;
749 case 10: return int_type_node;
750 case 11: return long_type_node;
751 default: return NULL_TREE;
752 }
753 }
754
755 /* Map primitive type to the code used by OPCODE_newarray. */
756
757 int
758 encode_newarray_type (tree type)
759 {
760 if (type == boolean_type_node)
761 return 4;
762 else if (type == char_type_node)
763 return 5;
764 else if (type == float_type_node)
765 return 6;
766 else if (type == double_type_node)
767 return 7;
768 else if (type == byte_type_node)
769 return 8;
770 else if (type == short_type_node)
771 return 9;
772 else if (type == int_type_node)
773 return 10;
774 else if (type == long_type_node)
775 return 11;
776 else
777 gcc_unreachable ();
778 }
779
780 /* Build a call to _Jv_ThrowBadArrayIndex(), the
781 ArrayIndexOfBoundsException exception handler. */
782
783 static tree
784 build_java_throw_out_of_bounds_exception (tree index)
785 {
786 tree node;
787
788 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
789 has void return type. We cannot just set the type of the CALL_EXPR below
790 to int_type_node because we would lose it during gimplification. */
791 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
792 node = build_call_nary (void_type_node,
793 build_address_of (soft_badarrayindex_node),
794 1, index);
795 TREE_SIDE_EFFECTS (node) = 1;
796
797 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
798 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
799
800 return (node);
801 }
802
803 /* Return the length of an array. Doesn't perform any checking on the nature
804 or value of the array NODE. May be used to implement some bytecodes. */
805
806 tree
807 build_java_array_length_access (tree node)
808 {
809 tree type = TREE_TYPE (node);
810 tree array_type = TREE_TYPE (type);
811 HOST_WIDE_INT length;
812
813 if (!is_array_type_p (type))
814 {
815 /* With the new verifier, we will see an ordinary pointer type
816 here. In this case, we just use an arbitrary array type. */
817 array_type = build_java_array_type (object_ptr_type_node, -1);
818 type = promote_type (array_type);
819 }
820
821 length = java_array_type_length (type);
822 if (length >= 0)
823 return build_int_cst (NULL_TREE, length);
824
825 node = build3 (COMPONENT_REF, int_type_node,
826 build_java_indirect_ref (array_type, node,
827 flag_check_references),
828 lookup_field (&array_type, get_identifier ("length")),
829 NULL_TREE);
830 IS_ARRAY_LENGTH_ACCESS (node) = 1;
831 return node;
832 }
833
834 /* Optionally checks a reference against the NULL pointer. ARG1: the
835 expr, ARG2: we should check the reference. Don't generate extra
836 checks if we're not generating code. */
837
838 tree
839 java_check_reference (tree expr, int check)
840 {
841 if (!flag_syntax_only && check)
842 {
843 expr = save_expr (expr);
844 expr = build3 (COND_EXPR, TREE_TYPE (expr),
845 build2 (EQ_EXPR, boolean_type_node,
846 expr, null_pointer_node),
847 build_call_nary (void_type_node,
848 build_address_of (soft_nullpointer_node),
849 0),
850 expr);
851 }
852
853 return expr;
854 }
855
856 /* Reference an object: just like an INDIRECT_REF, but with checking. */
857
858 tree
859 build_java_indirect_ref (tree type, tree expr, int check)
860 {
861 tree t;
862 t = java_check_reference (expr, check);
863 t = convert (build_pointer_type (type), t);
864 return build1 (INDIRECT_REF, type, t);
865 }
866
867 /* Implement array indexing (either as l-value or r-value).
868 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
869 Optionally performs bounds checking and/or test to NULL.
870 At this point, ARRAY should have been verified as an array. */
871
872 tree
873 build_java_arrayaccess (tree array, tree type, tree index)
874 {
875 tree node, throw_expr = NULL_TREE;
876 tree data_field;
877 tree ref;
878 tree array_type = TREE_TYPE (TREE_TYPE (array));
879 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
880
881 if (!is_array_type_p (TREE_TYPE (array)))
882 {
883 /* With the new verifier, we will see an ordinary pointer type
884 here. In this case, we just use the correct array type. */
885 array_type = build_java_array_type (type, -1);
886 }
887
888 if (flag_bounds_check)
889 {
890 /* Generate:
891 * (unsigned jint) INDEX >= (unsigned jint) LEN
892 * && throw ArrayIndexOutOfBoundsException.
893 * Note this is equivalent to and more efficient than:
894 * INDEX < 0 || INDEX >= LEN && throw ... */
895 tree test;
896 tree len = convert (unsigned_int_type_node,
897 build_java_array_length_access (array));
898 test = fold_build2 (GE_EXPR, boolean_type_node,
899 convert (unsigned_int_type_node, index),
900 len);
901 if (! integer_zerop (test))
902 {
903 throw_expr
904 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
905 build_java_throw_out_of_bounds_exception (index));
906 /* allows expansion within COMPOUND */
907 TREE_SIDE_EFFECTS( throw_expr ) = 1;
908 }
909 }
910
911 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
912 to have the bounds check evaluated first. */
913 if (throw_expr != NULL_TREE)
914 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
915
916 data_field = lookup_field (&array_type, get_identifier ("data"));
917
918 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
919 build_java_indirect_ref (array_type, array,
920 flag_check_references),
921 data_field, NULL_TREE);
922
923 /* Take the address of the data field and convert it to a pointer to
924 the element type. */
925 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
926
927 /* Multiply the index by the size of an element to obtain a byte
928 offset. Convert the result to a pointer to the element type. */
929 index = build2 (MULT_EXPR, sizetype,
930 fold_convert (sizetype, index),
931 size_exp);
932
933 /* Sum the byte offset and the address of the data field. */
934 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
935
936 /* Finally, return
937
938 *((&array->data) + index*size_exp)
939
940 */
941 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
942 }
943
944 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
945 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
946 determine that no check is required. */
947
948 tree
949 build_java_arraystore_check (tree array, tree object)
950 {
951 tree check, element_type, source;
952 tree array_type_p = TREE_TYPE (array);
953 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
954
955 if (! flag_verify_invocations)
956 {
957 /* With the new verifier, we don't track precise types. FIXME:
958 performance regression here. */
959 element_type = TYPE_NAME (object_type_node);
960 }
961 else
962 {
963 gcc_assert (is_array_type_p (array_type_p));
964
965 /* Get the TYPE_DECL for ARRAY's element type. */
966 element_type
967 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
968 }
969
970 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
971 && TREE_CODE (object_type) == TYPE_DECL);
972
973 if (!flag_store_check)
974 return build1 (NOP_EXPR, array_type_p, array);
975
976 /* No check is needed if the element type is final. Also check that
977 element_type matches object_type, since in the bytecode
978 compilation case element_type may be the actual element type of
979 the array rather than its declared type. However, if we're doing
980 indirect dispatch, we can't do the `final' optimization. */
981 if (element_type == object_type
982 && ! flag_indirect_dispatch
983 && CLASS_FINAL (element_type))
984 return build1 (NOP_EXPR, array_type_p, array);
985
986 /* OBJECT might be wrapped by a SAVE_EXPR. */
987 if (TREE_CODE (object) == SAVE_EXPR)
988 source = TREE_OPERAND (object, 0);
989 else
990 source = object;
991
992 /* Avoid the check if OBJECT was just loaded from the same array. */
993 if (TREE_CODE (source) == ARRAY_REF)
994 {
995 tree target;
996 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
997 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
998 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
999 if (TREE_CODE (source) == SAVE_EXPR)
1000 source = TREE_OPERAND (source, 0);
1001
1002 target = array;
1003 if (TREE_CODE (target) == SAVE_EXPR)
1004 target = TREE_OPERAND (target, 0);
1005
1006 if (source == target)
1007 return build1 (NOP_EXPR, array_type_p, array);
1008 }
1009
1010 /* Build an invocation of _Jv_CheckArrayStore */
1011 check = build_call_nary (void_type_node,
1012 build_address_of (soft_checkarraystore_node),
1013 2, array, object);
1014 TREE_SIDE_EFFECTS (check) = 1;
1015
1016 return check;
1017 }
1018
1019 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1020 ARRAY_NODE. This function is used to retrieve something less vague than
1021 a pointer type when indexing the first dimension of something like [[<t>.
1022 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1023 return unchanged. */
1024
1025 static tree
1026 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1027 tree indexed_type)
1028 {
1029 /* We used to check to see if ARRAY_NODE really had array type.
1030 However, with the new verifier, this is not necessary, as we know
1031 that the object will be an array of the appropriate type. */
1032
1033 return indexed_type;
1034 }
1035
1036 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1037 called with an integer code (the type of array to create), and the length
1038 of the array to create. */
1039
1040 tree
1041 build_newarray (int atype_value, tree length)
1042 {
1043 tree type_arg;
1044
1045 tree prim_type = decode_newarray_type (atype_value);
1046 tree type
1047 = build_java_array_type (prim_type,
1048 host_integerp (length, 0) == INTEGER_CST
1049 ? tree_low_cst (length, 0) : -1);
1050
1051 /* Pass a reference to the primitive type class and save the runtime
1052 some work. */
1053 type_arg = build_class_ref (prim_type);
1054
1055 return build_call_nary (promote_type (type),
1056 build_address_of (soft_newarray_node),
1057 2, type_arg, length);
1058 }
1059
1060 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1061 of the dimension. */
1062
1063 tree
1064 build_anewarray (tree class_type, tree length)
1065 {
1066 tree type
1067 = build_java_array_type (class_type,
1068 host_integerp (length, 0)
1069 ? tree_low_cst (length, 0) : -1);
1070
1071 return build_call_nary (promote_type (type),
1072 build_address_of (soft_anewarray_node),
1073 3,
1074 length,
1075 build_class_ref (class_type),
1076 null_pointer_node);
1077 }
1078
1079 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1080
1081 tree
1082 build_new_array (tree type, tree length)
1083 {
1084 if (JPRIMITIVE_TYPE_P (type))
1085 return build_newarray (encode_newarray_type (type), length);
1086 else
1087 return build_anewarray (TREE_TYPE (type), length);
1088 }
1089
1090 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1091 class pointer, a number of dimensions and the matching number of
1092 dimensions. The argument list is NULL terminated. */
1093
1094 static void
1095 expand_java_multianewarray (tree class_type, int ndim)
1096 {
1097 int i;
1098 VEC(tree,gc) *args = NULL;
1099
1100 VEC_safe_grow (tree, gc, args, 3 + ndim);
1101
1102 VEC_replace (tree, args, 0, build_class_ref (class_type));
1103 VEC_replace (tree, args, 1, build_int_cst (NULL_TREE, ndim));
1104
1105 for(i = ndim - 1; i >= 0; i-- )
1106 VEC_replace (tree, args, (unsigned)(2 + i), pop_value (int_type_node));
1107
1108 VEC_replace (tree, args, 2 + ndim, null_pointer_node);
1109
1110 push_value (build_call_vec (promote_type (class_type),
1111 build_address_of (soft_multianewarray_node),
1112 args));
1113 }
1114
1115 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1116 ARRAY is an array type. May expand some bound checking and NULL
1117 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1118 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1119 INT. In those cases, we make the conversion.
1120
1121 if ARRAy is a reference type, the assignment is checked at run-time
1122 to make sure that the RHS can be assigned to the array element
1123 type. It is not necessary to generate this code if ARRAY is final. */
1124
1125 static void
1126 expand_java_arraystore (tree rhs_type_node)
1127 {
1128 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1129 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1130 int_type_node : rhs_type_node);
1131 tree index = pop_value (int_type_node);
1132 tree array_type, array, temp, access;
1133
1134 /* If we're processing an `aaload' we might as well just pick
1135 `Object'. */
1136 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1137 {
1138 array_type = build_java_array_type (object_ptr_type_node, -1);
1139 rhs_type_node = object_ptr_type_node;
1140 }
1141 else
1142 array_type = build_java_array_type (rhs_type_node, -1);
1143
1144 array = pop_value (array_type);
1145 array = build1 (NOP_EXPR, promote_type (array_type), array);
1146
1147 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1148
1149 flush_quick_stack ();
1150
1151 index = save_expr (index);
1152 array = save_expr (array);
1153
1154 /* We want to perform the bounds check (done by
1155 build_java_arrayaccess) before the type check (done by
1156 build_java_arraystore_check). So, we call build_java_arrayaccess
1157 -- which returns an ARRAY_REF lvalue -- and we then generate code
1158 to stash the address of that lvalue in a temp. Then we call
1159 build_java_arraystore_check, and finally we generate a
1160 MODIFY_EXPR to set the array element. */
1161
1162 access = build_java_arrayaccess (array, rhs_type_node, index);
1163 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1164 build_pointer_type (TREE_TYPE (access)));
1165 java_add_local_var (temp);
1166 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1167 temp,
1168 build_fold_addr_expr (access)));
1169
1170 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1171 {
1172 tree check = build_java_arraystore_check (array, rhs_node);
1173 java_add_stmt (check);
1174 }
1175
1176 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1177 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1178 rhs_node));
1179 }
1180
1181 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1182 sure that LHS is an array type. May expand some bound checking and NULL
1183 pointer checking.
1184 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1185 BOOLEAN/SHORT, we push a promoted type back to the stack.
1186 */
1187
1188 static void
1189 expand_java_arrayload (tree lhs_type_node)
1190 {
1191 tree load_node;
1192 tree index_node = pop_value (int_type_node);
1193 tree array_type;
1194 tree array_node;
1195
1196 /* If we're processing an `aaload' we might as well just pick
1197 `Object'. */
1198 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1199 {
1200 array_type = build_java_array_type (object_ptr_type_node, -1);
1201 lhs_type_node = object_ptr_type_node;
1202 }
1203 else
1204 array_type = build_java_array_type (lhs_type_node, -1);
1205 array_node = pop_value (array_type);
1206 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1207
1208 index_node = save_expr (index_node);
1209 array_node = save_expr (array_node);
1210
1211 lhs_type_node = build_java_check_indexed_type (array_node,
1212 lhs_type_node);
1213 load_node = build_java_arrayaccess (array_node,
1214 lhs_type_node,
1215 index_node);
1216 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1217 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1218 push_value (load_node);
1219 }
1220
1221 /* Expands .length. Makes sure that we deal with and array and may expand
1222 a NULL check on the array object. */
1223
1224 static void
1225 expand_java_array_length (void)
1226 {
1227 tree array = pop_value (ptr_type_node);
1228 tree length = build_java_array_length_access (array);
1229
1230 push_value (length);
1231 }
1232
1233 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1234 either soft_monitorenter_node or soft_monitorexit_node. */
1235
1236 static tree
1237 build_java_monitor (tree call, tree object)
1238 {
1239 return build_call_nary (void_type_node,
1240 build_address_of (call),
1241 1, object);
1242 }
1243
1244 /* Emit code for one of the PUSHC instructions. */
1245
1246 static void
1247 expand_java_pushc (int ival, tree type)
1248 {
1249 tree value;
1250 if (type == ptr_type_node && ival == 0)
1251 value = null_pointer_node;
1252 else if (type == int_type_node || type == long_type_node)
1253 value = build_int_cst (type, ival);
1254 else if (type == float_type_node || type == double_type_node)
1255 {
1256 REAL_VALUE_TYPE x;
1257 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1258 value = build_real (type, x);
1259 }
1260 else
1261 gcc_unreachable ();
1262
1263 push_value (value);
1264 }
1265
1266 static void
1267 expand_java_return (tree type)
1268 {
1269 if (type == void_type_node)
1270 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1271 else
1272 {
1273 tree retval = pop_value (type);
1274 tree res = DECL_RESULT (current_function_decl);
1275 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1276
1277 /* Handle the situation where the native integer type is smaller
1278 than the JVM integer. It can happen for many cross compilers.
1279 The whole if expression just goes away if INT_TYPE_SIZE < 32
1280 is false. */
1281 if (INT_TYPE_SIZE < 32
1282 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1283 < GET_MODE_SIZE (TYPE_MODE (type))))
1284 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1285
1286 TREE_SIDE_EFFECTS (retval) = 1;
1287 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1288 }
1289 }
1290
1291 static void
1292 expand_load_internal (int index, tree type, int pc)
1293 {
1294 tree copy;
1295 tree var = find_local_variable (index, type, pc);
1296
1297 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1298 on the stack. If there is an assignment to this VAR_DECL between
1299 the stack push and the use, then the wrong code could be
1300 generated. To avoid this we create a new local and copy our
1301 value into it. Then we push this new local on the stack.
1302 Hopefully this all gets optimized out. */
1303 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1304 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1305 && TREE_TYPE (copy) != TREE_TYPE (var))
1306 var = convert (type, var);
1307 java_add_local_var (copy);
1308 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1309
1310 push_value (copy);
1311 }
1312
1313 tree
1314 build_address_of (tree value)
1315 {
1316 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1317 }
1318
1319 bool
1320 class_has_finalize_method (tree type)
1321 {
1322 tree super = CLASSTYPE_SUPER (type);
1323
1324 if (super == NULL_TREE)
1325 return false; /* Every class with a real finalizer inherits */
1326 /* from java.lang.Object. */
1327 else
1328 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1329 }
1330
1331 tree
1332 java_create_object (tree type)
1333 {
1334 tree alloc_node = (class_has_finalize_method (type)
1335 ? alloc_object_node
1336 : alloc_no_finalizer_node);
1337
1338 return build_call_nary (promote_type (type),
1339 build_address_of (alloc_node),
1340 1, build_class_ref (type));
1341 }
1342
1343 static void
1344 expand_java_NEW (tree type)
1345 {
1346 tree alloc_node;
1347
1348 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1349 : alloc_no_finalizer_node);
1350 if (! CLASS_LOADED_P (type))
1351 load_class (type, 1);
1352 safe_layout_class (type);
1353 push_value (build_call_nary (promote_type (type),
1354 build_address_of (alloc_node),
1355 1, build_class_ref (type)));
1356 }
1357
1358 /* This returns an expression which will extract the class of an
1359 object. */
1360
1361 tree
1362 build_get_class (tree value)
1363 {
1364 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1365 tree vtable_field = lookup_field (&object_type_node,
1366 get_identifier ("vtable"));
1367 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1368 build_java_indirect_ref (object_type_node, value,
1369 flag_check_references),
1370 vtable_field, NULL_TREE);
1371 return build3 (COMPONENT_REF, class_ptr_type,
1372 build1 (INDIRECT_REF, dtable_type, tmp),
1373 class_field, NULL_TREE);
1374 }
1375
1376 /* This builds the tree representation of the `instanceof' operator.
1377 It tries various tricks to optimize this in cases where types are
1378 known. */
1379
1380 tree
1381 build_instanceof (tree value, tree type)
1382 {
1383 tree expr;
1384 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1385 tree valtype = TREE_TYPE (TREE_TYPE (value));
1386 tree valclass = TYPE_NAME (valtype);
1387 tree klass;
1388
1389 /* When compiling from bytecode, we need to ensure that TYPE has
1390 been loaded. */
1391 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1392 {
1393 load_class (type, 1);
1394 safe_layout_class (type);
1395 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1396 return error_mark_node;
1397 }
1398 klass = TYPE_NAME (type);
1399
1400 if (type == object_type_node || inherits_from_p (valtype, type))
1401 {
1402 /* Anything except `null' is an instance of Object. Likewise,
1403 if the object is known to be an instance of the class, then
1404 we only need to check for `null'. */
1405 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1406 }
1407 else if (flag_verify_invocations
1408 && ! TYPE_ARRAY_P (type)
1409 && ! TYPE_ARRAY_P (valtype)
1410 && DECL_P (klass) && DECL_P (valclass)
1411 && ! CLASS_INTERFACE (valclass)
1412 && ! CLASS_INTERFACE (klass)
1413 && ! inherits_from_p (type, valtype)
1414 && (CLASS_FINAL (klass)
1415 || ! inherits_from_p (valtype, type)))
1416 {
1417 /* The classes are from different branches of the derivation
1418 tree, so we immediately know the answer. */
1419 expr = boolean_false_node;
1420 }
1421 else if (DECL_P (klass) && CLASS_FINAL (klass))
1422 {
1423 tree save = save_expr (value);
1424 expr = build3 (COND_EXPR, itype,
1425 build2 (NE_EXPR, boolean_type_node,
1426 save, null_pointer_node),
1427 build2 (EQ_EXPR, itype,
1428 build_get_class (save),
1429 build_class_ref (type)),
1430 boolean_false_node);
1431 }
1432 else
1433 {
1434 expr = build_call_nary (itype,
1435 build_address_of (soft_instanceof_node),
1436 2, value, build_class_ref (type));
1437 }
1438 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1439 return expr;
1440 }
1441
1442 static void
1443 expand_java_INSTANCEOF (tree type)
1444 {
1445 tree value = pop_value (object_ptr_type_node);
1446 value = build_instanceof (value, type);
1447 push_value (value);
1448 }
1449
1450 static void
1451 expand_java_CHECKCAST (tree type)
1452 {
1453 tree value = pop_value (ptr_type_node);
1454 value = build_call_nary (promote_type (type),
1455 build_address_of (soft_checkcast_node),
1456 2, build_class_ref (type), value);
1457 push_value (value);
1458 }
1459
1460 static void
1461 expand_iinc (unsigned int local_var_index, int ival, int pc)
1462 {
1463 tree local_var, res;
1464 tree constant_value;
1465
1466 flush_quick_stack ();
1467 local_var = find_local_variable (local_var_index, int_type_node, pc);
1468 constant_value = build_int_cst (NULL_TREE, ival);
1469 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1470 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1471 }
1472
1473
1474 tree
1475 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1476 {
1477 tree call = NULL;
1478 tree arg1 = convert (type, op1);
1479 tree arg2 = convert (type, op2);
1480
1481 if (type == int_type_node)
1482 {
1483 switch (op)
1484 {
1485 case TRUNC_DIV_EXPR:
1486 call = soft_idiv_node;
1487 break;
1488 case TRUNC_MOD_EXPR:
1489 call = soft_irem_node;
1490 break;
1491 default:
1492 break;
1493 }
1494 }
1495 else if (type == long_type_node)
1496 {
1497 switch (op)
1498 {
1499 case TRUNC_DIV_EXPR:
1500 call = soft_ldiv_node;
1501 break;
1502 case TRUNC_MOD_EXPR:
1503 call = soft_lrem_node;
1504 break;
1505 default:
1506 break;
1507 }
1508 }
1509
1510 gcc_assert (call);
1511 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1512 return call;
1513 }
1514
1515 tree
1516 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1517 {
1518 tree mask;
1519 switch (op)
1520 {
1521 case URSHIFT_EXPR:
1522 {
1523 tree u_type = unsigned_type_for (type);
1524 arg1 = convert (u_type, arg1);
1525 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1526 return convert (type, arg1);
1527 }
1528 case LSHIFT_EXPR:
1529 case RSHIFT_EXPR:
1530 mask = build_int_cst (NULL_TREE,
1531 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1532 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1533 break;
1534
1535 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1536 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1537 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1538 {
1539 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1540 boolean_type_node, arg1, arg2);
1541 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1542 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1543 ifexp2, integer_zero_node,
1544 op == COMPARE_L_EXPR
1545 ? integer_minus_one_node
1546 : integer_one_node);
1547 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1548 op == COMPARE_L_EXPR ? integer_one_node
1549 : integer_minus_one_node,
1550 second_compare);
1551 }
1552 case COMPARE_EXPR:
1553 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1554 {
1555 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1556 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1557 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1558 ifexp2, integer_one_node,
1559 integer_zero_node);
1560 return fold_build3 (COND_EXPR, int_type_node,
1561 ifexp1, integer_minus_one_node, second_compare);
1562 }
1563 case TRUNC_DIV_EXPR:
1564 case TRUNC_MOD_EXPR:
1565 if (TREE_CODE (type) == REAL_TYPE
1566 && op == TRUNC_MOD_EXPR)
1567 {
1568 tree call;
1569 if (type != double_type_node)
1570 {
1571 arg1 = convert (double_type_node, arg1);
1572 arg2 = convert (double_type_node, arg2);
1573 }
1574 call = build_call_nary (double_type_node,
1575 build_address_of (soft_fmod_node),
1576 2, arg1, arg2);
1577 if (type != double_type_node)
1578 call = convert (type, call);
1579 return call;
1580 }
1581
1582 if (TREE_CODE (type) == INTEGER_TYPE
1583 && flag_use_divide_subroutine
1584 && ! flag_syntax_only)
1585 return build_java_soft_divmod (op, type, arg1, arg2);
1586
1587 break;
1588 default: ;
1589 }
1590 return fold_build2 (op, type, arg1, arg2);
1591 }
1592
1593 static void
1594 expand_java_binop (tree type, enum tree_code op)
1595 {
1596 tree larg, rarg;
1597 tree ltype = type;
1598 tree rtype = type;
1599 switch (op)
1600 {
1601 case LSHIFT_EXPR:
1602 case RSHIFT_EXPR:
1603 case URSHIFT_EXPR:
1604 rtype = int_type_node;
1605 rarg = pop_value (rtype);
1606 break;
1607 default:
1608 rarg = pop_value (rtype);
1609 }
1610 larg = pop_value (ltype);
1611 push_value (build_java_binop (op, type, larg, rarg));
1612 }
1613
1614 /* Lookup the field named NAME in *TYPEP or its super classes.
1615 If not found, return NULL_TREE.
1616 (If the *TYPEP is not found, or if the field reference is
1617 ambiguous, return error_mark_node.)
1618 If found, return the FIELD_DECL, and set *TYPEP to the
1619 class containing the field. */
1620
1621 tree
1622 lookup_field (tree *typep, tree name)
1623 {
1624 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1625 {
1626 load_class (*typep, 1);
1627 safe_layout_class (*typep);
1628 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1629 return error_mark_node;
1630 }
1631 do
1632 {
1633 tree field, binfo, base_binfo;
1634 tree save_field;
1635 int i;
1636
1637 for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field))
1638 if (DECL_NAME (field) == name)
1639 return field;
1640
1641 /* Process implemented interfaces. */
1642 save_field = NULL_TREE;
1643 for (binfo = TYPE_BINFO (*typep), i = 0;
1644 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1645 {
1646 tree t = BINFO_TYPE (base_binfo);
1647 if ((field = lookup_field (&t, name)))
1648 {
1649 if (save_field == field)
1650 continue;
1651 if (save_field == NULL_TREE)
1652 save_field = field;
1653 else
1654 {
1655 tree i1 = DECL_CONTEXT (save_field);
1656 tree i2 = DECL_CONTEXT (field);
1657 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1658 IDENTIFIER_POINTER (name),
1659 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1660 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1661 return error_mark_node;
1662 }
1663 }
1664 }
1665
1666 if (save_field != NULL_TREE)
1667 return save_field;
1668
1669 *typep = CLASSTYPE_SUPER (*typep);
1670 } while (*typep);
1671 return NULL_TREE;
1672 }
1673
1674 /* Look up the field named NAME in object SELF_VALUE,
1675 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1676 SELF_VALUE is NULL_TREE if looking for a static field. */
1677
1678 tree
1679 build_field_ref (tree self_value, tree self_class, tree name)
1680 {
1681 tree base_class = self_class;
1682 tree field_decl = lookup_field (&base_class, name);
1683 if (field_decl == NULL_TREE)
1684 {
1685 error ("field %qs not found", IDENTIFIER_POINTER (name));
1686 return error_mark_node;
1687 }
1688 if (self_value == NULL_TREE)
1689 {
1690 return build_static_field_ref (field_decl);
1691 }
1692 else
1693 {
1694 tree base_type = promote_type (base_class);
1695
1696 /* CHECK is true if self_value is not the this pointer. */
1697 int check = (! (DECL_P (self_value)
1698 && DECL_NAME (self_value) == this_identifier_node));
1699
1700 /* Determine whether a field offset from NULL will lie within
1701 Page 0: this is necessary on those GNU/Linux/BSD systems that
1702 trap SEGV to generate NullPointerExceptions.
1703
1704 We assume that Page 0 will be mapped with NOPERM, and that
1705 memory may be allocated from any other page, so only field
1706 offsets < pagesize are guaranteed to trap. We also assume
1707 the smallest page size we'll encounter is 4k bytes. */
1708 if (! flag_syntax_only && check && ! flag_check_references
1709 && ! flag_indirect_dispatch)
1710 {
1711 tree field_offset = byte_position (field_decl);
1712 if (! page_size)
1713 page_size = size_int (4096);
1714 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1715 }
1716
1717 if (base_type != TREE_TYPE (self_value))
1718 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1719 if (! flag_syntax_only && flag_indirect_dispatch)
1720 {
1721 tree otable_index
1722 = build_int_cst (NULL_TREE, get_symbol_table_index
1723 (field_decl, NULL_TREE,
1724 &TYPE_OTABLE_METHODS (output_class)));
1725 tree field_offset
1726 = build4 (ARRAY_REF, integer_type_node,
1727 TYPE_OTABLE_DECL (output_class), otable_index,
1728 NULL_TREE, NULL_TREE);
1729 tree address;
1730
1731 if (DECL_CONTEXT (field_decl) != output_class)
1732 field_offset
1733 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1734 build2 (EQ_EXPR, boolean_type_node,
1735 field_offset, integer_zero_node),
1736 build_call_nary (void_type_node,
1737 build_address_of (soft_nosuchfield_node),
1738 1, otable_index),
1739 field_offset);
1740
1741 field_offset = fold (convert (sizetype, field_offset));
1742 self_value = java_check_reference (self_value, check);
1743 address
1744 = fold_build2 (POINTER_PLUS_EXPR,
1745 TREE_TYPE (self_value),
1746 self_value, field_offset);
1747 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1748 address);
1749 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1750 }
1751
1752 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1753 self_value, check);
1754 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1755 self_value, field_decl, NULL_TREE);
1756 }
1757 }
1758
1759 tree
1760 lookup_label (int pc)
1761 {
1762 tree name;
1763 char buf[32];
1764 if (pc > highest_label_pc_this_method)
1765 highest_label_pc_this_method = pc;
1766 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1767 name = get_identifier (buf);
1768 if (IDENTIFIER_LOCAL_VALUE (name))
1769 return IDENTIFIER_LOCAL_VALUE (name);
1770 else
1771 {
1772 /* The type of the address of a label is return_address_type_node. */
1773 tree decl = create_label_decl (name);
1774 return pushdecl (decl);
1775 }
1776 }
1777
1778 /* Generate a unique name for the purpose of loops and switches
1779 labels, and try-catch-finally blocks label or temporary variables. */
1780
1781 tree
1782 generate_name (void)
1783 {
1784 static int l_number = 0;
1785 char buff [32];
1786 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1787 l_number++;
1788 return get_identifier (buff);
1789 }
1790
1791 tree
1792 create_label_decl (tree name)
1793 {
1794 tree decl;
1795 decl = build_decl (input_location, LABEL_DECL, name,
1796 TREE_TYPE (return_address_type_node));
1797 DECL_CONTEXT (decl) = current_function_decl;
1798 DECL_IGNORED_P (decl) = 1;
1799 return decl;
1800 }
1801
1802 /* This maps a bytecode offset (PC) to various flags. */
1803 char *instruction_bits;
1804
1805 /* This is a vector of type states for the current method. It is
1806 indexed by PC. Each element is a tree vector holding the type
1807 state at that PC. We only note type states at basic block
1808 boundaries. */
1809 VEC(tree, gc) *type_states;
1810
1811 static void
1812 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1813 {
1814 lookup_label (target_pc);
1815 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1816 }
1817
1818 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1819 where CONDITION is one of one the compare operators. */
1820
1821 static void
1822 expand_compare (enum tree_code condition, tree value1, tree value2,
1823 int target_pc)
1824 {
1825 tree target = lookup_label (target_pc);
1826 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1827 java_add_stmt
1828 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1829 build1 (GOTO_EXPR, void_type_node, target),
1830 build_java_empty_stmt ()));
1831 }
1832
1833 /* Emit code for a TEST-type opcode. */
1834
1835 static void
1836 expand_test (enum tree_code condition, tree type, int target_pc)
1837 {
1838 tree value1, value2;
1839 flush_quick_stack ();
1840 value1 = pop_value (type);
1841 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1842 expand_compare (condition, value1, value2, target_pc);
1843 }
1844
1845 /* Emit code for a COND-type opcode. */
1846
1847 static void
1848 expand_cond (enum tree_code condition, tree type, int target_pc)
1849 {
1850 tree value1, value2;
1851 flush_quick_stack ();
1852 /* note: pop values in opposite order */
1853 value2 = pop_value (type);
1854 value1 = pop_value (type);
1855 /* Maybe should check value1 and value2 for type compatibility ??? */
1856 expand_compare (condition, value1, value2, target_pc);
1857 }
1858
1859 static void
1860 expand_java_goto (int target_pc)
1861 {
1862 tree target_label = lookup_label (target_pc);
1863 flush_quick_stack ();
1864 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1865 }
1866
1867 static tree
1868 expand_java_switch (tree selector, int default_pc)
1869 {
1870 tree switch_expr, x;
1871
1872 flush_quick_stack ();
1873 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1874 NULL_TREE, NULL_TREE);
1875 java_add_stmt (switch_expr);
1876
1877 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1878 create_artificial_label (input_location));
1879 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1880
1881 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1882 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1883
1884 return switch_expr;
1885 }
1886
1887 static void
1888 expand_java_add_case (tree switch_expr, int match, int target_pc)
1889 {
1890 tree value, x;
1891
1892 value = build_int_cst (TREE_TYPE (switch_expr), match);
1893
1894 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1895 create_artificial_label (input_location));
1896 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1897
1898 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1899 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1900 }
1901
1902 static VEC(tree,gc) *
1903 pop_arguments (tree method_type)
1904 {
1905 function_args_iterator fnai;
1906 tree type;
1907 VEC(tree,gc) *args = NULL;
1908 int arity;
1909
1910 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1911 {
1912 /* XXX: leaky abstraction. */
1913 if (type == void_type_node)
1914 break;
1915
1916 VEC_safe_push (tree, gc, args, type);
1917 }
1918
1919 arity = VEC_length (tree, args);
1920
1921 while (arity--)
1922 {
1923 tree arg = pop_value (VEC_index (tree, args, arity));
1924
1925 /* We simply cast each argument to its proper type. This is
1926 needed since we lose type information coming out of the
1927 verifier. We also have to do this when we pop an integer
1928 type that must be promoted for the function call. */
1929 if (TREE_CODE (type) == POINTER_TYPE)
1930 arg = build1 (NOP_EXPR, type, arg);
1931 else if (targetm.calls.promote_prototypes (type)
1932 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1933 && INTEGRAL_TYPE_P (type))
1934 arg = convert (integer_type_node, arg);
1935
1936 VEC_replace (tree, args, arity, arg);
1937 }
1938
1939 return args;
1940 }
1941
1942 /* Attach to PTR (a block) the declaration found in ENTRY. */
1943
1944 int
1945 attach_init_test_initialization_flags (void **entry, void *ptr)
1946 {
1947 tree block = (tree)ptr;
1948 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1949
1950 if (block != error_mark_node)
1951 {
1952 if (TREE_CODE (block) == BIND_EXPR)
1953 {
1954 tree body = BIND_EXPR_BODY (block);
1955 DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1956 BIND_EXPR_VARS (block) = ite->value;
1957 body = build2 (COMPOUND_EXPR, void_type_node,
1958 build1 (DECL_EXPR, void_type_node, ite->value), body);
1959 BIND_EXPR_BODY (block) = body;
1960 }
1961 else
1962 {
1963 tree body = BLOCK_SUBBLOCKS (block);
1964 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1965 BLOCK_EXPR_DECLS (block) = ite->value;
1966 body = build2 (COMPOUND_EXPR, void_type_node,
1967 build1 (DECL_EXPR, void_type_node, ite->value), body);
1968 BLOCK_SUBBLOCKS (block) = body;
1969 }
1970
1971 }
1972 return true;
1973 }
1974
1975 /* Build an expression to initialize the class CLAS.
1976 if EXPR is non-NULL, returns an expression to first call the initializer
1977 (if it is needed) and then calls EXPR. */
1978
1979 tree
1980 build_class_init (tree clas, tree expr)
1981 {
1982 tree init;
1983
1984 /* An optimization: if CLAS is a superclass of the class we're
1985 compiling, we don't need to initialize it. However, if CLAS is
1986 an interface, it won't necessarily be initialized, even if we
1987 implement it. */
1988 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1989 && inherits_from_p (current_class, clas))
1990 || current_class == clas)
1991 return expr;
1992
1993 if (always_initialize_class_p)
1994 {
1995 init = build_call_nary (void_type_node,
1996 build_address_of (soft_initclass_node),
1997 1, build_class_ref (clas));
1998 TREE_SIDE_EFFECTS (init) = 1;
1999 }
2000 else
2001 {
2002 tree *init_test_decl;
2003 tree decl;
2004 init_test_decl = java_treetreehash_new
2005 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2006
2007 if (*init_test_decl == NULL)
2008 {
2009 /* Build a declaration and mark it as a flag used to track
2010 static class initializations. */
2011 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2012 boolean_type_node);
2013 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2014 DECL_CONTEXT (decl) = current_function_decl;
2015 DECL_INITIAL (decl) = boolean_false_node;
2016 /* Don't emit any symbolic debugging info for this decl. */
2017 DECL_IGNORED_P (decl) = 1;
2018 *init_test_decl = decl;
2019 }
2020
2021 init = build_call_nary (void_type_node,
2022 build_address_of (soft_initclass_node),
2023 1, build_class_ref (clas));
2024 TREE_SIDE_EFFECTS (init) = 1;
2025 init = build3 (COND_EXPR, void_type_node,
2026 build2 (EQ_EXPR, boolean_type_node,
2027 *init_test_decl, boolean_false_node),
2028 init, integer_zero_node);
2029 TREE_SIDE_EFFECTS (init) = 1;
2030 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2031 build2 (MODIFY_EXPR, boolean_type_node,
2032 *init_test_decl, boolean_true_node));
2033 TREE_SIDE_EFFECTS (init) = 1;
2034 }
2035
2036 if (expr != NULL_TREE)
2037 {
2038 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2039 TREE_SIDE_EFFECTS (expr) = 1;
2040 return expr;
2041 }
2042 return init;
2043 }
2044
2045 \f
2046
2047 /* Rewrite expensive calls that require stack unwinding at runtime to
2048 cheaper alternatives. The logic here performs these
2049 transformations:
2050
2051 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2052 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2053
2054 */
2055
2056 typedef struct
2057 {
2058 const char *classname;
2059 const char *method;
2060 const char *signature;
2061 const char *new_classname;
2062 const char *new_signature;
2063 int flags;
2064 void (*rewrite_arglist) (VEC(tree,gc) **);
2065 } rewrite_rule;
2066
2067 /* Add __builtin_return_address(0) to the end of an arglist. */
2068
2069
2070 static void
2071 rewrite_arglist_getcaller (VEC(tree,gc) **arglist)
2072 {
2073 tree retaddr
2074 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2075 1, integer_zero_node);
2076
2077 DECL_UNINLINABLE (current_function_decl) = 1;
2078
2079 VEC_safe_push (tree, gc, *arglist, retaddr);
2080 }
2081
2082 /* Add this.class to the end of an arglist. */
2083
2084 static void
2085 rewrite_arglist_getclass (VEC(tree,gc) **arglist)
2086 {
2087 VEC_safe_push (tree, gc, *arglist, build_class_ref (output_class));
2088 }
2089
2090 static rewrite_rule rules[] =
2091 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2092 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2093 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2094
2095 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2096 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2097 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2098
2099 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2100 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2101 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2102
2103 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2104 "()Ljava/lang/ClassLoader;",
2105 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2106 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2107
2108 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2109 "java.lang.String", "([CII)Ljava/lang/String;",
2110 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2111
2112 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2113
2114 /* True if this method is special, i.e. it's a private method that
2115 should be exported from a DSO. */
2116
2117 bool
2118 special_method_p (tree candidate_method)
2119 {
2120 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2121 tree method = DECL_NAME (candidate_method);
2122 rewrite_rule *p;
2123
2124 for (p = rules; p->classname; p++)
2125 {
2126 if (get_identifier (p->classname) == context
2127 && get_identifier (p->method) == method)
2128 return true;
2129 }
2130 return false;
2131 }
2132
2133 /* Scan the rules list for replacements for *METHOD_P and replace the
2134 args accordingly. If the rewrite results in an access to a private
2135 method, update SPECIAL.*/
2136
2137 void
2138 maybe_rewrite_invocation (tree *method_p, VEC(tree,gc) **arg_list_p,
2139 tree *method_signature_p, tree *special)
2140 {
2141 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2142 rewrite_rule *p;
2143 *special = NULL_TREE;
2144
2145 for (p = rules; p->classname; p++)
2146 {
2147 if (get_identifier (p->classname) == context)
2148 {
2149 tree method = DECL_NAME (*method_p);
2150 if (get_identifier (p->method) == method
2151 && get_identifier (p->signature) == *method_signature_p)
2152 {
2153 tree maybe_method;
2154 tree destination_class
2155 = lookup_class (get_identifier (p->new_classname));
2156 gcc_assert (destination_class);
2157 maybe_method
2158 = lookup_java_method (destination_class,
2159 method,
2160 get_identifier (p->new_signature));
2161 if (! maybe_method && ! flag_verify_invocations)
2162 {
2163 maybe_method
2164 = add_method (destination_class, p->flags,
2165 method, get_identifier (p->new_signature));
2166 DECL_EXTERNAL (maybe_method) = 1;
2167 }
2168 *method_p = maybe_method;
2169 gcc_assert (*method_p);
2170 if (p->rewrite_arglist)
2171 p->rewrite_arglist (arg_list_p);
2172 *method_signature_p = get_identifier (p->new_signature);
2173 *special = integer_one_node;
2174
2175 break;
2176 }
2177 }
2178 }
2179 }
2180
2181 \f
2182
2183 tree
2184 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2185 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2186 VEC(tree,gc) *arg_list ATTRIBUTE_UNUSED, tree special)
2187 {
2188 tree func;
2189 if (is_compiled_class (self_type))
2190 {
2191 /* With indirect dispatch we have to use indirect calls for all
2192 publicly visible methods or gcc will use PLT indirections
2193 to reach them. We also have to use indirect dispatch for all
2194 external methods. */
2195 if (! flag_indirect_dispatch
2196 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2197 {
2198 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2199 method);
2200 }
2201 else
2202 {
2203 tree table_index
2204 = build_int_cst (NULL_TREE,
2205 (get_symbol_table_index
2206 (method, special,
2207 &TYPE_ATABLE_METHODS (output_class))));
2208 func
2209 = build4 (ARRAY_REF,
2210 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2211 TYPE_ATABLE_DECL (output_class), table_index,
2212 NULL_TREE, NULL_TREE);
2213 }
2214 func = convert (method_ptr_type_node, func);
2215 }
2216 else
2217 {
2218 /* We don't know whether the method has been (statically) compiled.
2219 Compile this code to get a reference to the method's code:
2220
2221 SELF_TYPE->methods[METHOD_INDEX].ncode
2222
2223 */
2224
2225 int method_index = 0;
2226 tree meth, ref;
2227
2228 /* The method might actually be declared in some superclass, so
2229 we have to use its class context, not the caller's notion of
2230 where the method is. */
2231 self_type = DECL_CONTEXT (method);
2232 ref = build_class_ref (self_type);
2233 ref = build1 (INDIRECT_REF, class_type_node, ref);
2234 if (ncode_ident == NULL_TREE)
2235 ncode_ident = get_identifier ("ncode");
2236 if (methods_ident == NULL_TREE)
2237 methods_ident = get_identifier ("methods");
2238 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2239 lookup_field (&class_type_node, methods_ident),
2240 NULL_TREE);
2241 for (meth = TYPE_METHODS (self_type);
2242 ; meth = DECL_CHAIN (meth))
2243 {
2244 if (method == meth)
2245 break;
2246 if (meth == NULL_TREE)
2247 fatal_error ("method '%s' not found in class",
2248 IDENTIFIER_POINTER (DECL_NAME (method)));
2249 method_index++;
2250 }
2251 method_index *= int_size_in_bytes (method_type_node);
2252 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2253 ref, size_int (method_index));
2254 ref = build1 (INDIRECT_REF, method_type_node, ref);
2255 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2256 ref, lookup_field (&method_type_node, ncode_ident),
2257 NULL_TREE);
2258 }
2259 return func;
2260 }
2261
2262 tree
2263 invoke_build_dtable (int is_invoke_interface, VEC(tree,gc) *arg_list)
2264 {
2265 tree dtable, objectref;
2266 tree saved = save_expr (VEC_index (tree, arg_list, 0));
2267
2268 VEC_replace (tree, arg_list, 0, saved);
2269
2270 /* If we're dealing with interfaces and if the objectref
2271 argument is an array then get the dispatch table of the class
2272 Object rather than the one from the objectref. */
2273 objectref = (is_invoke_interface
2274 && is_array_type_p (TREE_TYPE (saved))
2275 ? build_class_ref (object_type_node) : saved);
2276
2277 if (dtable_ident == NULL_TREE)
2278 dtable_ident = get_identifier ("vtable");
2279 dtable = build_java_indirect_ref (object_type_node, objectref,
2280 flag_check_references);
2281 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2282 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2283
2284 return dtable;
2285 }
2286
2287 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2288 T. If this decl has not been seen before, it will be added to the
2289 [oa]table_methods. If it has, the existing table slot will be
2290 reused. */
2291
2292 int
2293 get_symbol_table_index (tree t, tree special,
2294 VEC(method_entry,gc) **symbol_table)
2295 {
2296 method_entry *e;
2297 unsigned i;
2298
2299 FOR_EACH_VEC_ELT (method_entry, *symbol_table, i, e)
2300 if (t == e->method && special == e->special)
2301 goto done;
2302
2303 e = VEC_safe_push (method_entry, gc, *symbol_table, NULL);
2304 e->method = t;
2305 e->special = special;
2306
2307 done:
2308 return i + 1;
2309 }
2310
2311 tree
2312 build_invokevirtual (tree dtable, tree method, tree special)
2313 {
2314 tree func;
2315 tree nativecode_ptr_ptr_type_node
2316 = build_pointer_type (nativecode_ptr_type_node);
2317 tree method_index;
2318 tree otable_index;
2319
2320 if (flag_indirect_dispatch)
2321 {
2322 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2323
2324 otable_index
2325 = build_int_cst (NULL_TREE, get_symbol_table_index
2326 (method, special,
2327 &TYPE_OTABLE_METHODS (output_class)));
2328 method_index = build4 (ARRAY_REF, integer_type_node,
2329 TYPE_OTABLE_DECL (output_class),
2330 otable_index, NULL_TREE, NULL_TREE);
2331 }
2332 else
2333 {
2334 /* We fetch the DECL_VINDEX field directly here, rather than
2335 using get_method_index(). DECL_VINDEX is the true offset
2336 from the vtable base to a method, regrdless of any extra
2337 words inserted at the start of the vtable. */
2338 method_index = DECL_VINDEX (method);
2339 method_index = size_binop (MULT_EXPR, method_index,
2340 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2341 if (TARGET_VTABLE_USES_DESCRIPTORS)
2342 method_index = size_binop (MULT_EXPR, method_index,
2343 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2344 }
2345
2346 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2347 convert (sizetype, method_index));
2348
2349 if (TARGET_VTABLE_USES_DESCRIPTORS)
2350 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2351 else
2352 {
2353 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2354 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2355 }
2356
2357 return func;
2358 }
2359
2360 static GTY(()) tree class_ident;
2361 tree
2362 build_invokeinterface (tree dtable, tree method)
2363 {
2364 tree interface;
2365 tree idx;
2366
2367 /* We expand invokeinterface here. */
2368
2369 if (class_ident == NULL_TREE)
2370 class_ident = get_identifier ("class");
2371
2372 dtable = build_java_indirect_ref (dtable_type, dtable,
2373 flag_check_references);
2374 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2375 lookup_field (&dtable_type, class_ident), NULL_TREE);
2376
2377 interface = DECL_CONTEXT (method);
2378 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2379 layout_class_methods (interface);
2380
2381 if (flag_indirect_dispatch)
2382 {
2383 int itable_index
2384 = 2 * (get_symbol_table_index
2385 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2386 interface
2387 = build4 (ARRAY_REF,
2388 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2389 TYPE_ITABLE_DECL (output_class),
2390 build_int_cst (NULL_TREE, itable_index-1),
2391 NULL_TREE, NULL_TREE);
2392 idx
2393 = build4 (ARRAY_REF,
2394 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2395 TYPE_ITABLE_DECL (output_class),
2396 build_int_cst (NULL_TREE, itable_index),
2397 NULL_TREE, NULL_TREE);
2398 interface = convert (class_ptr_type, interface);
2399 idx = convert (integer_type_node, idx);
2400 }
2401 else
2402 {
2403 idx = build_int_cst (NULL_TREE,
2404 get_interface_method_index (method, interface));
2405 interface = build_class_ref (interface);
2406 }
2407
2408 return build_call_nary (ptr_type_node,
2409 build_address_of (soft_lookupinterfacemethod_node),
2410 3, dtable, interface, idx);
2411 }
2412
2413 /* Expand one of the invoke_* opcodes.
2414 OPCODE is the specific opcode.
2415 METHOD_REF_INDEX is an index into the constant pool.
2416 NARGS is the number of arguments, or -1 if not specified. */
2417
2418 static void
2419 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2420 {
2421 tree method_signature
2422 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2423 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2424 method_ref_index);
2425 tree self_type
2426 = get_class_constant (current_jcf,
2427 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2428 method_ref_index));
2429 const char *const self_name
2430 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2431 tree call, func, method, method_type;
2432 VEC(tree,gc) *arg_list;
2433 tree check = NULL_TREE;
2434
2435 tree special = NULL_TREE;
2436
2437 if (! CLASS_LOADED_P (self_type))
2438 {
2439 load_class (self_type, 1);
2440 safe_layout_class (self_type);
2441 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2442 fatal_error ("failed to find class '%s'", self_name);
2443 }
2444 layout_class_methods (self_type);
2445
2446 if (ID_INIT_P (method_name))
2447 method = lookup_java_constructor (self_type, method_signature);
2448 else
2449 method = lookup_java_method (self_type, method_name, method_signature);
2450
2451 /* We've found a method in a class other than the one in which it
2452 was wanted. This can happen if, for instance, we're trying to
2453 compile invokespecial super.equals().
2454 FIXME: This is a kludge. Rather than nullifying the result, we
2455 should change lookup_java_method() so that it doesn't search the
2456 superclass chain when we're BC-compiling. */
2457 if (! flag_verify_invocations
2458 && method
2459 && ! TYPE_ARRAY_P (self_type)
2460 && self_type != DECL_CONTEXT (method))
2461 method = NULL_TREE;
2462
2463 /* We've found a method in an interface, but this isn't an interface
2464 call. */
2465 if (opcode != OPCODE_invokeinterface
2466 && method
2467 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2468 method = NULL_TREE;
2469
2470 /* We've found a non-interface method but we are making an
2471 interface call. This can happen if the interface overrides a
2472 method in Object. */
2473 if (! flag_verify_invocations
2474 && opcode == OPCODE_invokeinterface
2475 && method
2476 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2477 method = NULL_TREE;
2478
2479 if (method == NULL_TREE)
2480 {
2481 if (flag_verify_invocations || ! flag_indirect_dispatch)
2482 {
2483 error ("class '%s' has no method named '%s' matching signature '%s'",
2484 self_name,
2485 IDENTIFIER_POINTER (method_name),
2486 IDENTIFIER_POINTER (method_signature));
2487 }
2488 else
2489 {
2490 int flags = ACC_PUBLIC;
2491 if (opcode == OPCODE_invokestatic)
2492 flags |= ACC_STATIC;
2493 if (opcode == OPCODE_invokeinterface)
2494 {
2495 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2496 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2497 }
2498 method = add_method (self_type, flags, method_name,
2499 method_signature);
2500 DECL_ARTIFICIAL (method) = 1;
2501 METHOD_DUMMY (method) = 1;
2502 layout_class_method (self_type, NULL,
2503 method, NULL);
2504 }
2505 }
2506
2507 /* Invoke static can't invoke static/abstract method */
2508 if (method != NULL_TREE)
2509 {
2510 if (opcode == OPCODE_invokestatic)
2511 {
2512 if (!METHOD_STATIC (method))
2513 {
2514 error ("invokestatic on non static method");
2515 method = NULL_TREE;
2516 }
2517 else if (METHOD_ABSTRACT (method))
2518 {
2519 error ("invokestatic on abstract method");
2520 method = NULL_TREE;
2521 }
2522 }
2523 else
2524 {
2525 if (METHOD_STATIC (method))
2526 {
2527 error ("invoke[non-static] on static method");
2528 method = NULL_TREE;
2529 }
2530 }
2531 }
2532
2533 if (method == NULL_TREE)
2534 {
2535 /* If we got here, we emitted an error message above. So we
2536 just pop the arguments, push a properly-typed zero, and
2537 continue. */
2538 method_type = get_type_from_signature (method_signature);
2539 pop_arguments (method_type);
2540 if (opcode != OPCODE_invokestatic)
2541 pop_type (self_type);
2542 method_type = promote_type (TREE_TYPE (method_type));
2543 push_value (convert (method_type, integer_zero_node));
2544 return;
2545 }
2546
2547 method_type = TREE_TYPE (method);
2548 arg_list = pop_arguments (method_type);
2549 flush_quick_stack ();
2550
2551 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2552 &special);
2553
2554 func = NULL_TREE;
2555 if (opcode == OPCODE_invokestatic)
2556 func = build_known_method_ref (method, method_type, self_type,
2557 method_signature, arg_list, special);
2558 else if (opcode == OPCODE_invokespecial
2559 || (opcode == OPCODE_invokevirtual
2560 && (METHOD_PRIVATE (method)
2561 || METHOD_FINAL (method)
2562 || CLASS_FINAL (TYPE_NAME (self_type)))))
2563 {
2564 /* If the object for the method call is null, we throw an
2565 exception. We don't do this if the object is the current
2566 method's `this'. In other cases we just rely on an
2567 optimization pass to eliminate redundant checks. FIXME:
2568 Unfortunately there doesn't seem to be a way to determine
2569 what the current method is right now.
2570 We do omit the check if we're calling <init>. */
2571 /* We use a SAVE_EXPR here to make sure we only evaluate
2572 the new `self' expression once. */
2573 tree save_arg = save_expr (VEC_index (tree, arg_list, 0));
2574 VEC_replace (tree, arg_list, 0, save_arg);
2575 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2576 func = build_known_method_ref (method, method_type, self_type,
2577 method_signature, arg_list, special);
2578 }
2579 else
2580 {
2581 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2582 arg_list);
2583 if (opcode == OPCODE_invokevirtual)
2584 func = build_invokevirtual (dtable, method, special);
2585 else
2586 func = build_invokeinterface (dtable, method);
2587 }
2588
2589 if (TREE_CODE (func) == ADDR_EXPR)
2590 TREE_TYPE (func) = build_pointer_type (method_type);
2591 else
2592 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2593
2594 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2595 TREE_SIDE_EFFECTS (call) = 1;
2596 call = check_for_builtin (method, call);
2597
2598 if (check != NULL_TREE)
2599 {
2600 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2601 TREE_SIDE_EFFECTS (call) = 1;
2602 }
2603
2604 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2605 java_add_stmt (call);
2606 else
2607 {
2608 push_value (call);
2609 flush_quick_stack ();
2610 }
2611 }
2612
2613 /* Create a stub which will be put into the vtable but which will call
2614 a JNI function. */
2615
2616 tree
2617 build_jni_stub (tree method)
2618 {
2619 tree jnifunc, call, body, method_sig, arg_types;
2620 tree jniarg0, jniarg1, jniarg2, jniarg3;
2621 tree jni_func_type, tem;
2622 tree env_var, res_var = NULL_TREE, block;
2623 tree method_args;
2624 tree meth_var;
2625 tree bind;
2626 VEC(tree,gc) *args = NULL;
2627 int args_size = 0;
2628
2629 tree klass = DECL_CONTEXT (method);
2630 klass = build_class_ref (klass);
2631
2632 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2633
2634 DECL_ARTIFICIAL (method) = 1;
2635 DECL_EXTERNAL (method) = 0;
2636
2637 env_var = build_decl (input_location,
2638 VAR_DECL, get_identifier ("env"), ptr_type_node);
2639 DECL_CONTEXT (env_var) = method;
2640
2641 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2642 {
2643 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2644 TREE_TYPE (TREE_TYPE (method)));
2645 DECL_CONTEXT (res_var) = method;
2646 DECL_CHAIN (env_var) = res_var;
2647 }
2648
2649 method_args = DECL_ARGUMENTS (method);
2650 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2651 TREE_SIDE_EFFECTS (block) = 1;
2652 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2653
2654 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2655 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2656 build_call_nary (ptr_type_node,
2657 build_address_of (soft_getjnienvnewframe_node),
2658 1, klass));
2659
2660 /* The JNIEnv structure is the first argument to the JNI function. */
2661 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2662 VEC_safe_push (tree, gc, args, env_var);
2663
2664 /* For a static method the second argument is the class. For a
2665 non-static method the second argument is `this'; that is already
2666 available in the argument list. */
2667 if (METHOD_STATIC (method))
2668 {
2669 args_size += int_size_in_bytes (TREE_TYPE (klass));
2670 VEC_safe_push (tree, gc, args, klass);
2671 }
2672
2673 /* All the arguments to this method become arguments to the
2674 underlying JNI function. If we had to wrap object arguments in a
2675 special way, we would do that here. */
2676 for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem))
2677 {
2678 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2679 #ifdef PARM_BOUNDARY
2680 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2681 * PARM_BOUNDARY);
2682 #endif
2683 args_size += (arg_bits / BITS_PER_UNIT);
2684
2685 VEC_safe_push (tree, gc, args, tem);
2686 }
2687 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2688
2689 /* Argument types for static methods and the JNIEnv structure.
2690 FIXME: Write and use build_function_type_vec to avoid this. */
2691 if (METHOD_STATIC (method))
2692 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2693 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2694
2695 /* We call _Jv_LookupJNIMethod to find the actual underlying
2696 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2697 exception if this function is not found at runtime. */
2698 method_sig = build_java_signature (TREE_TYPE (method));
2699 jniarg0 = klass;
2700 jniarg1 = build_utf8_ref (DECL_NAME (method));
2701 jniarg2 = build_utf8_ref (unmangle_classname
2702 (IDENTIFIER_POINTER (method_sig),
2703 IDENTIFIER_LENGTH (method_sig)));
2704 jniarg3 = build_int_cst (NULL_TREE, args_size);
2705
2706 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2707
2708 #ifdef MODIFY_JNI_METHOD_CALL
2709 tem = MODIFY_JNI_METHOD_CALL (tem);
2710 #endif
2711
2712 jni_func_type = build_pointer_type (tem);
2713
2714 /* Use the actual function type, rather than a generic pointer type,
2715 such that this decl keeps the actual pointer type from being
2716 garbage-collected. If it is, we end up using canonical types
2717 with different uids for equivalent function types, and this in
2718 turn causes utf8 identifiers and output order to vary. */
2719 meth_var = build_decl (input_location,
2720 VAR_DECL, get_identifier ("meth"), jni_func_type);
2721 TREE_STATIC (meth_var) = 1;
2722 TREE_PUBLIC (meth_var) = 0;
2723 DECL_EXTERNAL (meth_var) = 0;
2724 DECL_CONTEXT (meth_var) = method;
2725 DECL_ARTIFICIAL (meth_var) = 1;
2726 DECL_INITIAL (meth_var) = null_pointer_node;
2727 TREE_USED (meth_var) = 1;
2728 chainon (env_var, meth_var);
2729 build_result_decl (method);
2730
2731 jnifunc = build3 (COND_EXPR, jni_func_type,
2732 build2 (NE_EXPR, boolean_type_node,
2733 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2734 meth_var,
2735 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2736 build1
2737 (NOP_EXPR, jni_func_type,
2738 build_call_nary (ptr_type_node,
2739 build_address_of
2740 (soft_lookupjnimethod_node),
2741 4,
2742 jniarg0, jniarg1,
2743 jniarg2, jniarg3))));
2744
2745 /* Now we make the actual JNI call via the resulting function
2746 pointer. */
2747 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2748
2749 /* If the JNI call returned a result, capture it here. If we had to
2750 unwrap JNI object results, we would do that here. */
2751 if (res_var != NULL_TREE)
2752 {
2753 /* If the call returns an object, it may return a JNI weak
2754 reference, in which case we must unwrap it. */
2755 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2756 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2757 build_address_of (soft_unwrapjni_node),
2758 1, call);
2759 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2760 res_var, call);
2761 }
2762
2763 TREE_SIDE_EFFECTS (call) = 1;
2764
2765 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2766 TREE_SIDE_EFFECTS (body) = 1;
2767
2768 /* Now free the environment we allocated. */
2769 call = build_call_nary (ptr_type_node,
2770 build_address_of (soft_jnipopsystemframe_node),
2771 1, env_var);
2772 TREE_SIDE_EFFECTS (call) = 1;
2773 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2774 TREE_SIDE_EFFECTS (body) = 1;
2775
2776 /* Finally, do the return. */
2777 if (res_var != NULL_TREE)
2778 {
2779 tree drt;
2780 gcc_assert (DECL_RESULT (method));
2781 /* Make sure we copy the result variable to the actual
2782 result. We use the type of the DECL_RESULT because it
2783 might be different from the return type of the function:
2784 it might be promoted. */
2785 drt = TREE_TYPE (DECL_RESULT (method));
2786 if (drt != TREE_TYPE (res_var))
2787 res_var = build1 (CONVERT_EXPR, drt, res_var);
2788 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2789 TREE_SIDE_EFFECTS (res_var) = 1;
2790 }
2791
2792 body = build2 (COMPOUND_EXPR, void_type_node, body,
2793 build1 (RETURN_EXPR, void_type_node, res_var));
2794 TREE_SIDE_EFFECTS (body) = 1;
2795
2796 /* Prepend class initialization for static methods reachable from
2797 other classes. */
2798 if (METHOD_STATIC (method)
2799 && (! METHOD_PRIVATE (method)
2800 || INNER_CLASS_P (DECL_CONTEXT (method))))
2801 {
2802 tree init = build_call_expr (soft_initclass_node, 1,
2803 klass);
2804 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2805 TREE_SIDE_EFFECTS (body) = 1;
2806 }
2807
2808 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2809 body, block);
2810 return bind;
2811 }
2812
2813
2814 /* Given lvalue EXP, return a volatile expression that references the
2815 same object. */
2816
2817 tree
2818 java_modify_addr_for_volatile (tree exp)
2819 {
2820 tree exp_type = TREE_TYPE (exp);
2821 tree v_type
2822 = build_qualified_type (exp_type,
2823 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2824 tree addr = build_fold_addr_expr (exp);
2825 v_type = build_pointer_type (v_type);
2826 addr = fold_convert (v_type, addr);
2827 exp = build_fold_indirect_ref (addr);
2828 return exp;
2829 }
2830
2831
2832 /* Expand an operation to extract from or store into a field.
2833 IS_STATIC is 1 iff the field is static.
2834 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2835 FIELD_REF_INDEX is an index into the constant pool. */
2836
2837 static void
2838 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2839 {
2840 tree self_type
2841 = get_class_constant (current_jcf,
2842 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2843 field_ref_index));
2844 const char *self_name
2845 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2846 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2847 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2848 field_ref_index);
2849 tree field_type = get_type_from_signature (field_signature);
2850 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2851 tree field_ref;
2852 int is_error = 0;
2853 tree original_self_type = self_type;
2854 tree field_decl;
2855 tree modify_expr;
2856
2857 if (! CLASS_LOADED_P (self_type))
2858 load_class (self_type, 1);
2859 field_decl = lookup_field (&self_type, field_name);
2860 if (field_decl == error_mark_node)
2861 {
2862 is_error = 1;
2863 }
2864 else if (field_decl == NULL_TREE)
2865 {
2866 if (! flag_verify_invocations)
2867 {
2868 int flags = ACC_PUBLIC;
2869 if (is_static)
2870 flags |= ACC_STATIC;
2871 self_type = original_self_type;
2872 field_decl = add_field (original_self_type, field_name,
2873 field_type, flags);
2874 DECL_ARTIFICIAL (field_decl) = 1;
2875 DECL_IGNORED_P (field_decl) = 1;
2876 #if 0
2877 /* FIXME: We should be pessimistic about volatility. We
2878 don't know one way or another, but this is safe.
2879 However, doing this has bad effects on code quality. We
2880 need to look at better ways to do this. */
2881 TREE_THIS_VOLATILE (field_decl) = 1;
2882 #endif
2883 }
2884 else
2885 {
2886 error ("missing field '%s' in '%s'",
2887 IDENTIFIER_POINTER (field_name), self_name);
2888 is_error = 1;
2889 }
2890 }
2891 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2892 {
2893 error ("mismatching signature for field '%s' in '%s'",
2894 IDENTIFIER_POINTER (field_name), self_name);
2895 is_error = 1;
2896 }
2897 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2898 if (is_error)
2899 {
2900 if (! is_putting)
2901 push_value (convert (field_type, integer_zero_node));
2902 flush_quick_stack ();
2903 return;
2904 }
2905
2906 field_ref = build_field_ref (field_ref, self_type, field_name);
2907 if (is_static
2908 && ! flag_indirect_dispatch)
2909 {
2910 tree context = DECL_CONTEXT (field_ref);
2911 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2912 field_ref = build_class_init (context, field_ref);
2913 else
2914 field_ref = build_class_init (self_type, field_ref);
2915 }
2916 if (is_putting)
2917 {
2918 flush_quick_stack ();
2919 if (FIELD_FINAL (field_decl))
2920 {
2921 if (DECL_CONTEXT (field_decl) != current_class)
2922 error ("assignment to final field %q+D not in field's class",
2923 field_decl);
2924 /* We used to check for assignments to final fields not
2925 occurring in the class initializer or in a constructor
2926 here. However, this constraint doesn't seem to be
2927 enforced by the JVM. */
2928 }
2929
2930 if (TREE_THIS_VOLATILE (field_decl))
2931 field_ref = java_modify_addr_for_volatile (field_ref);
2932
2933 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2934 field_ref, new_value);
2935
2936 if (TREE_THIS_VOLATILE (field_decl))
2937 java_add_stmt
2938 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2939
2940 java_add_stmt (modify_expr);
2941 }
2942 else
2943 {
2944 tree temp = build_decl (input_location,
2945 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2946 java_add_local_var (temp);
2947
2948 if (TREE_THIS_VOLATILE (field_decl))
2949 field_ref = java_modify_addr_for_volatile (field_ref);
2950
2951 modify_expr
2952 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2953 java_add_stmt (modify_expr);
2954
2955 if (TREE_THIS_VOLATILE (field_decl))
2956 java_add_stmt
2957 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2958
2959 push_value (temp);
2960 }
2961 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2962 }
2963
2964 static void
2965 load_type_state (int pc)
2966 {
2967 int i;
2968 tree vec = VEC_index (tree, type_states, pc);
2969 int cur_length = TREE_VEC_LENGTH (vec);
2970 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2971 for (i = 0; i < cur_length; i++)
2972 type_map [i] = TREE_VEC_ELT (vec, i);
2973 }
2974
2975 /* Go over METHOD's bytecode and note instruction starts in
2976 instruction_bits[]. */
2977
2978 void
2979 note_instructions (JCF *jcf, tree method)
2980 {
2981 int PC;
2982 unsigned char* byte_ops;
2983 long length = DECL_CODE_LENGTH (method);
2984
2985 int saw_index;
2986 jint INT_temp;
2987
2988 #undef RET /* Defined by config/i386/i386.h */
2989 #undef PTR
2990 #define BCODE byte_ops
2991 #define BYTE_type_node byte_type_node
2992 #define SHORT_type_node short_type_node
2993 #define INT_type_node int_type_node
2994 #define LONG_type_node long_type_node
2995 #define CHAR_type_node char_type_node
2996 #define PTR_type_node ptr_type_node
2997 #define FLOAT_type_node float_type_node
2998 #define DOUBLE_type_node double_type_node
2999 #define VOID_type_node void_type_node
3000 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3001 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3002 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3003 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3004
3005 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3006
3007 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3008 byte_ops = jcf->read_ptr;
3009 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3010 memset (instruction_bits, 0, length + 1);
3011 type_states = VEC_alloc (tree, gc, length + 1);
3012 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3013
3014 /* This pass figures out which PC can be the targets of jumps. */
3015 for (PC = 0; PC < length;)
3016 {
3017 int oldpc = PC; /* PC at instruction start. */
3018 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3019 switch (byte_ops[PC++])
3020 {
3021 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3022 case OPCODE: \
3023 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3024 break;
3025
3026 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3027
3028 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3029 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3030 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3031 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3032 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3033 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3034 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3035 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3036
3037 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3038 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3039 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3040 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3041 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3042 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3043 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3044 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3045
3046 /* two forms of wide instructions */
3047 #define PRE_SPECIAL_WIDE(IGNORE) \
3048 { \
3049 int modified_opcode = IMMEDIATE_u1; \
3050 if (modified_opcode == OPCODE_iinc) \
3051 { \
3052 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3053 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3054 } \
3055 else \
3056 { \
3057 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3058 } \
3059 }
3060
3061 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3062
3063 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3064
3065 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3066 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3067 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3068 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3069 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3070 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3071 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3072 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3073 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3074 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3075
3076 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3077 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3078 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3079 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3080 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3081 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3082 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3083 NOTE_LABEL (PC); \
3084 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3085
3086 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3087
3088 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3089 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3090
3091 #define PRE_LOOKUP_SWITCH \
3092 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3093 NOTE_LABEL (default_offset+oldpc); \
3094 if (npairs >= 0) \
3095 while (--npairs >= 0) { \
3096 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3097 jint offset = IMMEDIATE_s4; \
3098 NOTE_LABEL (offset+oldpc); } \
3099 }
3100
3101 #define PRE_TABLE_SWITCH \
3102 { jint default_offset = IMMEDIATE_s4; \
3103 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3104 NOTE_LABEL (default_offset+oldpc); \
3105 if (low <= high) \
3106 while (low++ <= high) { \
3107 jint offset = IMMEDIATE_s4; \
3108 NOTE_LABEL (offset+oldpc); } \
3109 }
3110
3111 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3112 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3113 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3114 (void)(IMMEDIATE_u2); \
3115 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3116
3117 #include "javaop.def"
3118 #undef JAVAOP
3119 }
3120 } /* for */
3121 }
3122
3123 void
3124 expand_byte_code (JCF *jcf, tree method)
3125 {
3126 int PC;
3127 int i;
3128 const unsigned char *linenumber_pointer;
3129 int dead_code_index = -1;
3130 unsigned char* byte_ops;
3131 long length = DECL_CODE_LENGTH (method);
3132 location_t max_location = input_location;
3133
3134 stack_pointer = 0;
3135 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3136 byte_ops = jcf->read_ptr;
3137
3138 /* We make an initial pass of the line number table, to note
3139 which instructions have associated line number entries. */
3140 linenumber_pointer = linenumber_table;
3141 for (i = 0; i < linenumber_count; i++)
3142 {
3143 int pc = GET_u2 (linenumber_pointer);
3144 linenumber_pointer += 4;
3145 if (pc >= length)
3146 warning (0, "invalid PC in line number table");
3147 else
3148 {
3149 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3150 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3151 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3152 }
3153 }
3154
3155 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3156 return;
3157
3158 promote_arguments ();
3159 cache_this_class_ref (method);
3160 cache_cpool_data_ref ();
3161
3162 /* Translate bytecodes. */
3163 linenumber_pointer = linenumber_table;
3164 for (PC = 0; PC < length;)
3165 {
3166 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3167 {
3168 tree label = lookup_label (PC);
3169 flush_quick_stack ();
3170 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3171 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3172 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3173 load_type_state (PC);
3174 }
3175
3176 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3177 {
3178 if (dead_code_index == -1)
3179 {
3180 /* This is the start of a region of unreachable bytecodes.
3181 They still need to be processed in order for EH ranges
3182 to get handled correctly. However, we can simply
3183 replace these bytecodes with nops. */
3184 dead_code_index = PC;
3185 }
3186
3187 /* Turn this bytecode into a nop. */
3188 byte_ops[PC] = 0x0;
3189 }
3190 else
3191 {
3192 if (dead_code_index != -1)
3193 {
3194 /* We've just reached the end of a region of dead code. */
3195 if (extra_warnings)
3196 warning (0, "unreachable bytecode from %d to before %d",
3197 dead_code_index, PC);
3198 dead_code_index = -1;
3199 }
3200 }
3201
3202 /* Handle possible line number entry for this PC.
3203
3204 This code handles out-of-order and multiple linenumbers per PC,
3205 but is optimized for the case of line numbers increasing
3206 monotonically with PC. */
3207 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3208 {
3209 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3210 || GET_u2 (linenumber_pointer) != PC)
3211 linenumber_pointer = linenumber_table;
3212 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3213 {
3214 int pc = GET_u2 (linenumber_pointer);
3215 linenumber_pointer += 4;
3216 if (pc == PC)
3217 {
3218 int line = GET_u2 (linenumber_pointer - 2);
3219 input_location = linemap_line_start (line_table, line, 1);
3220 if (input_location > max_location)
3221 max_location = input_location;
3222 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3223 break;
3224 }
3225 }
3226 }
3227 maybe_pushlevels (PC);
3228 PC = process_jvm_instruction (PC, byte_ops, length);
3229 maybe_poplevels (PC);
3230 } /* for */
3231
3232 uncache_this_class_ref (method);
3233
3234 if (dead_code_index != -1)
3235 {
3236 /* We've just reached the end of a region of dead code. */
3237 if (extra_warnings)
3238 warning (0, "unreachable bytecode from %d to the end of the method",
3239 dead_code_index);
3240 }
3241
3242 DECL_FUNCTION_LAST_LINE (method) = max_location;
3243 }
3244
3245 static void
3246 java_push_constant_from_pool (JCF *jcf, int index)
3247 {
3248 tree c;
3249 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3250 {
3251 tree name;
3252 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3253 index = alloc_name_constant (CONSTANT_String, name);
3254 c = build_ref_from_constant_pool (index);
3255 c = convert (promote_type (string_type_node), c);
3256 }
3257 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3258 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3259 {
3260 tree record = get_class_constant (jcf, index);
3261 c = build_class_ref (record);
3262 }
3263 else
3264 c = get_constant (jcf, index);
3265 push_value (c);
3266 }
3267
3268 int
3269 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3270 long length ATTRIBUTE_UNUSED)
3271 {
3272 const char *opname; /* Temporary ??? */
3273 int oldpc = PC; /* PC at instruction start. */
3274
3275 /* If the instruction is at the beginning of an exception handler,
3276 replace the top of the stack with the thrown object reference. */
3277 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3278 {
3279 /* Note that the verifier will not emit a type map at all for
3280 dead exception handlers. In this case we just ignore the
3281 situation. */
3282 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3283 {
3284 tree type = pop_type (promote_type (throwable_type_node));
3285 push_value (build_exception_object_ref (type));
3286 }
3287 }
3288
3289 switch (byte_ops[PC++])
3290 {
3291 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3292 case OPCODE: \
3293 opname = #OPNAME; \
3294 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3295 break;
3296
3297 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3298 { \
3299 int saw_index = 0; \
3300 int index = OPERAND_VALUE; \
3301 (void) saw_index; /* Avoid set but not used warning. */ \
3302 build_java_ret \
3303 (find_local_variable (index, return_address_type_node, oldpc)); \
3304 }
3305
3306 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3307 { \
3308 /* OPERAND_VALUE may have side-effects on PC */ \
3309 int opvalue = OPERAND_VALUE; \
3310 build_java_jsr (oldpc + opvalue, PC); \
3311 }
3312
3313 /* Push a constant onto the stack. */
3314 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3315 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3316 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3317 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3318
3319 /* internal macro added for use by the WIDE case */
3320 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3321 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3322
3323 /* Push local variable onto the opcode stack. */
3324 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3325 { \
3326 /* have to do this since OPERAND_VALUE may have side-effects */ \
3327 int opvalue = OPERAND_VALUE; \
3328 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3329 }
3330
3331 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3332 expand_java_return (OPERAND_TYPE##_type_node)
3333
3334 #define REM_EXPR TRUNC_MOD_EXPR
3335 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3336 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3337
3338 #define FIELD(IS_STATIC, IS_PUT) \
3339 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3340
3341 #define TEST(OPERAND_TYPE, CONDITION) \
3342 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3343
3344 #define COND(OPERAND_TYPE, CONDITION) \
3345 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3346
3347 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3348 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3349
3350 #define BRANCH_GOTO(OPERAND_VALUE) \
3351 expand_java_goto (oldpc + OPERAND_VALUE)
3352
3353 #define BRANCH_CALL(OPERAND_VALUE) \
3354 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3355
3356 #if 0
3357 #define BRANCH_RETURN(OPERAND_VALUE) \
3358 { \
3359 tree type = OPERAND_TYPE##_type_node; \
3360 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3361 expand_java_ret (value); \
3362 }
3363 #endif
3364
3365 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3366 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3367 fprintf (stderr, "(not implemented)\n")
3368 #define NOT_IMPL1(OPERAND_VALUE) \
3369 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3370 fprintf (stderr, "(not implemented)\n")
3371
3372 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3373
3374 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3375
3376 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3377
3378 #define STACK_SWAP(COUNT) java_stack_swap()
3379
3380 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3381 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3382 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3383
3384 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3385 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3386
3387 #define LOOKUP_SWITCH \
3388 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3389 tree selector = pop_value (INT_type_node); \
3390 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3391 while (--npairs >= 0) \
3392 { \
3393 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3394 expand_java_add_case (switch_expr, match, oldpc + offset); \
3395 } \
3396 }
3397
3398 #define TABLE_SWITCH \
3399 { jint default_offset = IMMEDIATE_s4; \
3400 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3401 tree selector = pop_value (INT_type_node); \
3402 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3403 for (; low <= high; low++) \
3404 { \
3405 jint offset = IMMEDIATE_s4; \
3406 expand_java_add_case (switch_expr, low, oldpc + offset); \
3407 } \
3408 }
3409
3410 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3411 { int opcode = byte_ops[PC-1]; \
3412 int method_ref_index = IMMEDIATE_u2; \
3413 int nargs; \
3414 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3415 else nargs = -1; \
3416 expand_invoke (opcode, method_ref_index, nargs); \
3417 }
3418
3419 /* Handle new, checkcast, instanceof */
3420 #define OBJECT(TYPE, OP) \
3421 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3422
3423 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3424
3425 #define ARRAY_LOAD(OPERAND_TYPE) \
3426 { \
3427 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3428 }
3429
3430 #define ARRAY_STORE(OPERAND_TYPE) \
3431 { \
3432 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3433 }
3434
3435 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3436 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3437 #define ARRAY_NEW_PTR() \
3438 push_value (build_anewarray (get_class_constant (current_jcf, \
3439 IMMEDIATE_u2), \
3440 pop_value (int_type_node)));
3441 #define ARRAY_NEW_NUM() \
3442 { \
3443 int atype = IMMEDIATE_u1; \
3444 push_value (build_newarray (atype, pop_value (int_type_node)));\
3445 }
3446 #define ARRAY_NEW_MULTI() \
3447 { \
3448 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3449 int ndims = IMMEDIATE_u1; \
3450 expand_java_multianewarray( klass, ndims ); \
3451 }
3452
3453 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3454 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3455 pop_value (OPERAND_TYPE##_type_node)));
3456
3457 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3458 { \
3459 push_value (build1 (NOP_EXPR, int_type_node, \
3460 (convert (TO_TYPE##_type_node, \
3461 pop_value (FROM_TYPE##_type_node))))); \
3462 }
3463
3464 #define CONVERT(FROM_TYPE, TO_TYPE) \
3465 { \
3466 push_value (convert (TO_TYPE##_type_node, \
3467 pop_value (FROM_TYPE##_type_node))); \
3468 }
3469
3470 /* internal macro added for use by the WIDE case
3471 Added TREE_TYPE (decl) assignment, apbianco */
3472 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3473 { \
3474 tree decl, value; \
3475 int index = OPVALUE; \
3476 tree type = OPTYPE; \
3477 value = pop_value (type); \
3478 type = TREE_TYPE (value); \
3479 decl = find_local_variable (index, type, oldpc); \
3480 set_local_type (index, type); \
3481 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3482 }
3483
3484 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3485 { \
3486 /* have to do this since OPERAND_VALUE may have side-effects */ \
3487 int opvalue = OPERAND_VALUE; \
3488 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3489 }
3490
3491 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3492 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3493
3494 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3495 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3496
3497 #define MONITOR_OPERATION(call) \
3498 { \
3499 tree o = pop_value (ptr_type_node); \
3500 tree c; \
3501 flush_quick_stack (); \
3502 c = build_java_monitor (call, o); \
3503 TREE_SIDE_EFFECTS (c) = 1; \
3504 java_add_stmt (c); \
3505 }
3506
3507 #define SPECIAL_IINC(IGNORED) \
3508 { \
3509 unsigned int local_var_index = IMMEDIATE_u1; \
3510 int ival = IMMEDIATE_s1; \
3511 expand_iinc(local_var_index, ival, oldpc); \
3512 }
3513
3514 #define SPECIAL_WIDE(IGNORED) \
3515 { \
3516 int modified_opcode = IMMEDIATE_u1; \
3517 unsigned int local_var_index = IMMEDIATE_u2; \
3518 switch (modified_opcode) \
3519 { \
3520 case OPCODE_iinc: \
3521 { \
3522 int ival = IMMEDIATE_s2; \
3523 expand_iinc (local_var_index, ival, oldpc); \
3524 break; \
3525 } \
3526 case OPCODE_iload: \
3527 case OPCODE_lload: \
3528 case OPCODE_fload: \
3529 case OPCODE_dload: \
3530 case OPCODE_aload: \
3531 { \
3532 /* duplicate code from LOAD macro */ \
3533 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3534 break; \
3535 } \
3536 case OPCODE_istore: \
3537 case OPCODE_lstore: \
3538 case OPCODE_fstore: \
3539 case OPCODE_dstore: \
3540 case OPCODE_astore: \
3541 { \
3542 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3543 break; \
3544 } \
3545 default: \
3546 error ("unrecogized wide sub-instruction"); \
3547 } \
3548 }
3549
3550 #define SPECIAL_THROW(IGNORED) \
3551 build_java_athrow (pop_value (throwable_type_node))
3552
3553 #define SPECIAL_BREAK NOT_IMPL1
3554 #define IMPL NOT_IMPL
3555
3556 #include "javaop.def"
3557 #undef JAVAOP
3558 default:
3559 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3560 }
3561 return PC;
3562 }
3563
3564 /* Return the opcode at PC in the code section pointed to by
3565 CODE_OFFSET. */
3566
3567 static unsigned char
3568 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3569 {
3570 unsigned char opcode;
3571 long absolute_offset = (long)JCF_TELL (jcf);
3572
3573 JCF_SEEK (jcf, code_offset);
3574 opcode = jcf->read_ptr [pc];
3575 JCF_SEEK (jcf, absolute_offset);
3576 return opcode;
3577 }
3578
3579 /* Some bytecode compilers are emitting accurate LocalVariableTable
3580 attributes. Here's an example:
3581
3582 PC <t>store_<n>
3583 PC+1 ...
3584
3585 Attribute "LocalVariableTable"
3586 slot #<n>: ... (PC: PC+1 length: L)
3587
3588 This is accurate because the local in slot <n> really exists after
3589 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3590
3591 This procedure recognizes this situation and extends the live range
3592 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3593 length of the store instruction.)
3594
3595 This function is used by `give_name_to_locals' so that a local's
3596 DECL features a DECL_LOCAL_START_PC such that the first related
3597 store operation will use DECL as a destination, not an unrelated
3598 temporary created for the occasion.
3599
3600 This function uses a global (instruction_bits) `note_instructions' should
3601 have allocated and filled properly. */
3602
3603 int
3604 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3605 int start_pc, int slot)
3606 {
3607 int first, index, opcode;
3608 int pc, insn_pc;
3609 int wide_found = 0;
3610
3611 if (!start_pc)
3612 return start_pc;
3613
3614 first = index = -1;
3615
3616 /* Find last previous instruction and remember it */
3617 for (pc = start_pc-1; pc; pc--)
3618 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3619 break;
3620 insn_pc = pc;
3621
3622 /* Retrieve the instruction, handle `wide'. */
3623 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3624 if (opcode == OPCODE_wide)
3625 {
3626 wide_found = 1;
3627 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3628 }
3629
3630 switch (opcode)
3631 {
3632 case OPCODE_astore_0:
3633 case OPCODE_astore_1:
3634 case OPCODE_astore_2:
3635 case OPCODE_astore_3:
3636 first = OPCODE_astore_0;
3637 break;
3638
3639 case OPCODE_istore_0:
3640 case OPCODE_istore_1:
3641 case OPCODE_istore_2:
3642 case OPCODE_istore_3:
3643 first = OPCODE_istore_0;
3644 break;
3645
3646 case OPCODE_lstore_0:
3647 case OPCODE_lstore_1:
3648 case OPCODE_lstore_2:
3649 case OPCODE_lstore_3:
3650 first = OPCODE_lstore_0;
3651 break;
3652
3653 case OPCODE_fstore_0:
3654 case OPCODE_fstore_1:
3655 case OPCODE_fstore_2:
3656 case OPCODE_fstore_3:
3657 first = OPCODE_fstore_0;
3658 break;
3659
3660 case OPCODE_dstore_0:
3661 case OPCODE_dstore_1:
3662 case OPCODE_dstore_2:
3663 case OPCODE_dstore_3:
3664 first = OPCODE_dstore_0;
3665 break;
3666
3667 case OPCODE_astore:
3668 case OPCODE_istore:
3669 case OPCODE_lstore:
3670 case OPCODE_fstore:
3671 case OPCODE_dstore:
3672 index = peek_opcode_at_pc (jcf, code_offset, pc);
3673 if (wide_found)
3674 {
3675 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3676 index = (other << 8) + index;
3677 }
3678 break;
3679 }
3680
3681 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3682 means we have a <t>store. */
3683 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3684 start_pc = insn_pc;
3685
3686 return start_pc;
3687 }
3688
3689 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3690 order, as specified by Java Language Specification.
3691
3692 The problem is that while expand_expr will evaluate its sub-operands in
3693 left-to-right order, for variables it will just return an rtx (i.e.
3694 an lvalue) for the variable (rather than an rvalue). So it is possible
3695 that a later sub-operand will change the register, and when the
3696 actual operation is done, it will use the new value, when it should
3697 have used the original value.
3698
3699 We fix this by using save_expr. This forces the sub-operand to be
3700 copied into a fresh virtual register,
3701
3702 For method invocation, we modify the arguments so that a
3703 left-to-right order evaluation is performed. Saved expressions
3704 will, in CALL_EXPR order, be reused when the call will be expanded.
3705
3706 We also promote outgoing args if needed. */
3707
3708 tree
3709 force_evaluation_order (tree node)
3710 {
3711 if (flag_syntax_only)
3712 return node;
3713 if (TREE_CODE (node) == CALL_EXPR
3714 || (TREE_CODE (node) == COMPOUND_EXPR
3715 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3716 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3717 {
3718 tree call, cmp;
3719 int i, nargs;
3720
3721 /* Account for wrapped around ctors. */
3722 if (TREE_CODE (node) == COMPOUND_EXPR)
3723 call = TREE_OPERAND (node, 0);
3724 else
3725 call = node;
3726
3727 nargs = call_expr_nargs (call);
3728
3729 /* This reverses the evaluation order. This is a desired effect. */
3730 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3731 {
3732 tree arg = CALL_EXPR_ARG (call, i);
3733 /* Promote types smaller than integer. This is required by
3734 some ABIs. */
3735 tree type = TREE_TYPE (arg);
3736 tree saved;
3737 if (targetm.calls.promote_prototypes (type)
3738 && INTEGRAL_TYPE_P (type)
3739 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3740 TYPE_SIZE (integer_type_node)))
3741 arg = fold_convert (integer_type_node, arg);
3742
3743 saved = save_expr (force_evaluation_order (arg));
3744 cmp = (cmp == NULL_TREE ? saved :
3745 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3746
3747 CALL_EXPR_ARG (call, i) = saved;
3748 }
3749
3750 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3751 TREE_SIDE_EFFECTS (cmp) = 1;
3752
3753 if (cmp)
3754 {
3755 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3756 if (TREE_TYPE (cmp) != void_type_node)
3757 cmp = save_expr (cmp);
3758 TREE_SIDE_EFFECTS (cmp) = 1;
3759 node = cmp;
3760 }
3761 }
3762 return node;
3763 }
3764
3765 /* Build a node to represent empty statements and blocks. */
3766
3767 tree
3768 build_java_empty_stmt (void)
3769 {
3770 tree t = build_empty_stmt (input_location);
3771 return t;
3772 }
3773
3774 /* Promote all args of integral type before generating any code. */
3775
3776 static void
3777 promote_arguments (void)
3778 {
3779 int i;
3780 tree arg;
3781 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3782 arg != NULL_TREE; arg = DECL_CHAIN (arg), i++)
3783 {
3784 tree arg_type = TREE_TYPE (arg);
3785 if (INTEGRAL_TYPE_P (arg_type)
3786 && TYPE_PRECISION (arg_type) < 32)
3787 {
3788 tree copy = find_local_variable (i, integer_type_node, -1);
3789 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3790 copy,
3791 fold_convert (integer_type_node, arg)));
3792 }
3793 if (TYPE_IS_WIDE (arg_type))
3794 i++;
3795 }
3796 }
3797
3798 /* Create a local variable that points to the constant pool. */
3799
3800 static void
3801 cache_cpool_data_ref (void)
3802 {
3803 if (optimize)
3804 {
3805 tree cpool;
3806 tree d = build_constant_data_ref (flag_indirect_classes);
3807 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3808 build_pointer_type (TREE_TYPE (d)));
3809 java_add_local_var (cpool_ptr);
3810 TREE_CONSTANT (cpool_ptr) = 1;
3811
3812 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3813 cpool_ptr, build_address_of (d)));
3814 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3815 TREE_THIS_NOTRAP (cpool) = 1;
3816 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3817 }
3818 }
3819
3820 #include "gt-java-expr.h"