Convert ChangeLog files to UTF-8.
[gcc.git] / gcc / java / expr.c
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
20
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "real.h"
33 #include "rtl.h"
34 #include "flags.h"
35 #include "expr.h"
36 #include "java-tree.h"
37 #include "javaop.h"
38 #include "java-opcodes.h"
39 #include "jcf.h"
40 #include "java-except.h"
41 #include "parse.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "ggc.h"
45 #include "tree-iterator.h"
46 #include "gimple.h"
47 #include "target.h"
48
49 static void flush_quick_stack (void);
50 static void push_value (tree);
51 static tree pop_value (tree);
52 static void java_stack_swap (void);
53 static void java_stack_dup (int, int);
54 static void build_java_athrow (tree);
55 static void build_java_jsr (int, int);
56 static void build_java_ret (tree);
57 static void expand_java_multianewarray (tree, int);
58 static void expand_java_arraystore (tree);
59 static void expand_java_arrayload (tree);
60 static void expand_java_array_length (void);
61 static tree build_java_monitor (tree, tree);
62 static void expand_java_pushc (int, tree);
63 static void expand_java_return (tree);
64 static void expand_load_internal (int, tree, int);
65 static void expand_java_NEW (tree);
66 static void expand_java_INSTANCEOF (tree);
67 static void expand_java_CHECKCAST (tree);
68 static void expand_iinc (unsigned int, int, int);
69 static void expand_java_binop (tree, enum tree_code);
70 static void note_label (int, int);
71 static void expand_compare (enum tree_code, tree, tree, int);
72 static void expand_test (enum tree_code, tree, int);
73 static void expand_cond (enum tree_code, tree, int);
74 static void expand_java_goto (int);
75 static tree expand_java_switch (tree, int);
76 static void expand_java_add_case (tree, int, int);
77 static tree pop_arguments (tree);
78 static void expand_invoke (int, int, int);
79 static void expand_java_field_op (int, int, int);
80 static void java_push_constant_from_pool (struct JCF *, int);
81 static void java_stack_pop (int);
82 static tree build_java_throw_out_of_bounds_exception (tree);
83 static tree build_java_check_indexed_type (tree, tree);
84 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
85 static void promote_arguments (void);
86 static void cache_cpool_data_ref (void);
87
88 static GTY(()) tree operand_type[59];
89
90 static GTY(()) tree methods_ident;
91 static GTY(()) tree ncode_ident;
92 tree dtable_ident = NULL_TREE;
93
94 /* Set to nonzero value in order to emit class initialization code
95 before static field references. */
96 int always_initialize_class_p = 0;
97
98 /* We store the stack state in two places:
99 Within a basic block, we use the quick_stack, which is a
100 pushdown list (TREE_LISTs) of expression nodes.
101 This is the top part of the stack; below that we use find_stack_slot.
102 At the end of a basic block, the quick_stack must be flushed
103 to the stack slot array (as handled by find_stack_slot).
104 Using quick_stack generates better code (especially when
105 compiled without optimization), because we do not have to
106 explicitly store and load trees to temporary variables.
107
108 If a variable is on the quick stack, it means the value of variable
109 when the quick stack was last flushed. Conceptually, flush_quick_stack
110 saves all the quick_stack elements in parallel. However, that is
111 complicated, so it actually saves them (i.e. copies each stack value
112 to is home virtual register) from low indexes. This allows a quick_stack
113 element at index i (counting from the bottom of stack the) to references
114 slot virtuals for register that are >= i, but not those that are deeper.
115 This convention makes most operations easier. For example iadd works
116 even when the stack contains (reg[0], reg[1]): It results in the
117 stack containing (reg[0]+reg[1]), which is OK. However, some stack
118 operations are more complicated. For example dup given a stack
119 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120 the convention, since stack value 1 would refer to a register with
121 lower index (reg[0]), which flush_quick_stack does not safely handle.
122 So dup cannot just add an extra element to the quick_stack, but iadd can.
123 */
124
125 static GTY(()) tree quick_stack;
126
127 /* A free-list of unused permanent TREE_LIST nodes. */
128 static GTY((deletable)) tree tree_list_free_list;
129
130 /* The physical memory page size used in this computer. See
131 build_field_ref(). */
132 static GTY(()) tree page_size;
133
134 /* The stack pointer of the Java virtual machine.
135 This does include the size of the quick_stack. */
136
137 int stack_pointer;
138
139 const unsigned char *linenumber_table;
140 int linenumber_count;
141
142 /* Largest pc so far in this method that has been passed to lookup_label. */
143 int highest_label_pc_this_method = -1;
144
145 /* Base value for this method to add to pc to get generated label. */
146 int start_label_pc_this_method = 0;
147
148 void
149 init_expr_processing (void)
150 {
151 operand_type[21] = operand_type[54] = int_type_node;
152 operand_type[22] = operand_type[55] = long_type_node;
153 operand_type[23] = operand_type[56] = float_type_node;
154 operand_type[24] = operand_type[57] = double_type_node;
155 operand_type[25] = operand_type[58] = ptr_type_node;
156 }
157
158 tree
159 java_truthvalue_conversion (tree expr)
160 {
161 /* It is simpler and generates better code to have only TRUTH_*_EXPR
162 or comparison expressions as truth values at this level.
163
164 This function should normally be identity for Java. */
165
166 switch (TREE_CODE (expr))
167 {
168 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
169 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
170 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
171 case ORDERED_EXPR: case UNORDERED_EXPR:
172 case TRUTH_ANDIF_EXPR:
173 case TRUTH_ORIF_EXPR:
174 case TRUTH_AND_EXPR:
175 case TRUTH_OR_EXPR:
176 case TRUTH_XOR_EXPR:
177 case TRUTH_NOT_EXPR:
178 case ERROR_MARK:
179 return expr;
180
181 case INTEGER_CST:
182 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
183
184 case REAL_CST:
185 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
186
187 /* are these legal? XXX JH */
188 case NEGATE_EXPR:
189 case ABS_EXPR:
190 case FLOAT_EXPR:
191 /* These don't change whether an object is nonzero or zero. */
192 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
193
194 case COND_EXPR:
195 /* Distribute the conversion into the arms of a COND_EXPR. */
196 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
198 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
199
200 case NOP_EXPR:
201 /* If this is widening the argument, we can ignore it. */
202 if (TYPE_PRECISION (TREE_TYPE (expr))
203 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
204 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
205 /* fall through to default */
206
207 default:
208 return fold_build2 (NE_EXPR, boolean_type_node,
209 expr, boolean_false_node);
210 }
211 }
212
213 /* Save any stack slots that happen to be in the quick_stack into their
214 home virtual register slots.
215
216 The copy order is from low stack index to high, to support the invariant
217 that the expression for a slot may contain decls for stack slots with
218 higher (or the same) index, but not lower. */
219
220 static void
221 flush_quick_stack (void)
222 {
223 int stack_index = stack_pointer;
224 tree prev, cur, next;
225
226 /* First reverse the quick_stack, and count the number of slots it has. */
227 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
228 {
229 next = TREE_CHAIN (cur);
230 TREE_CHAIN (cur) = prev;
231 prev = cur;
232 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
233 }
234 quick_stack = prev;
235
236 while (quick_stack != NULL_TREE)
237 {
238 tree decl;
239 tree node = quick_stack, type;
240 quick_stack = TREE_CHAIN (node);
241 TREE_CHAIN (node) = tree_list_free_list;
242 tree_list_free_list = node;
243 node = TREE_VALUE (node);
244 type = TREE_TYPE (node);
245
246 decl = find_stack_slot (stack_index, type);
247 if (decl != node)
248 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
249 stack_index += 1 + TYPE_IS_WIDE (type);
250 }
251 }
252
253 /* Push TYPE on the type stack.
254 Return true on success, 0 on overflow. */
255
256 int
257 push_type_0 (tree type)
258 {
259 int n_words;
260 type = promote_type (type);
261 n_words = 1 + TYPE_IS_WIDE (type);
262 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
263 return 0;
264 /* Allocate decl for this variable now, so we get a temporary that
265 survives the whole method. */
266 find_stack_slot (stack_pointer, type);
267 stack_type_map[stack_pointer++] = type;
268 n_words--;
269 while (--n_words >= 0)
270 stack_type_map[stack_pointer++] = TYPE_SECOND;
271 return 1;
272 }
273
274 void
275 push_type (tree type)
276 {
277 int r = push_type_0 (type);
278 gcc_assert (r);
279 }
280
281 static void
282 push_value (tree value)
283 {
284 tree type = TREE_TYPE (value);
285 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
286 {
287 type = promote_type (type);
288 value = convert (type, value);
289 }
290 push_type (type);
291 if (tree_list_free_list == NULL_TREE)
292 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
293 else
294 {
295 tree node = tree_list_free_list;
296 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
297 TREE_VALUE (node) = value;
298 TREE_CHAIN (node) = quick_stack;
299 quick_stack = node;
300 }
301 /* If the value has a side effect, then we need to evaluate it
302 whether or not the result is used. If the value ends up on the
303 quick stack and is then popped, this won't happen -- so we flush
304 the quick stack. It is safest to simply always flush, though,
305 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
306 the latter we may need to strip conversions. */
307 flush_quick_stack ();
308 }
309
310 /* Pop a type from the type stack.
311 TYPE is the expected type. Return the actual type, which must be
312 convertible to TYPE.
313 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
314
315 tree
316 pop_type_0 (tree type, char **messagep)
317 {
318 int n_words;
319 tree t;
320 *messagep = NULL;
321 if (TREE_CODE (type) == RECORD_TYPE)
322 type = promote_type (type);
323 n_words = 1 + TYPE_IS_WIDE (type);
324 if (stack_pointer < n_words)
325 {
326 *messagep = xstrdup ("stack underflow");
327 return type;
328 }
329 while (--n_words > 0)
330 {
331 if (stack_type_map[--stack_pointer] != void_type_node)
332 {
333 *messagep = xstrdup ("Invalid multi-word value on type stack");
334 return type;
335 }
336 }
337 t = stack_type_map[--stack_pointer];
338 if (type == NULL_TREE || t == type)
339 return t;
340 if (TREE_CODE (t) == TREE_LIST)
341 {
342 do
343 {
344 tree tt = TREE_PURPOSE (t);
345 if (! can_widen_reference_to (tt, type))
346 {
347 t = tt;
348 goto fail;
349 }
350 t = TREE_CHAIN (t);
351 }
352 while (t);
353 return t;
354 }
355 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
356 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
357 return t;
358 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
359 {
360 /* If the expected type we've been passed is object or ptr
361 (i.e. void*), the caller needs to know the real type. */
362 if (type == ptr_type_node || type == object_ptr_type_node)
363 return t;
364
365 /* Since the verifier has already run, we know that any
366 types we see will be compatible. In BC mode, this fact
367 may be checked at runtime, but if that is so then we can
368 assume its truth here as well. So, we always succeed
369 here, with the expected type. */
370 return type;
371 }
372
373 if (! flag_verify_invocations && flag_indirect_dispatch
374 && t == object_ptr_type_node)
375 {
376 if (type != ptr_type_node)
377 warning (0, "need to insert runtime check for %s",
378 xstrdup (lang_printable_name (type, 0)));
379 return type;
380 }
381
382 /* lang_printable_name uses a static buffer, so we must save the result
383 from calling it the first time. */
384 fail:
385 {
386 char *temp = xstrdup (lang_printable_name (type, 0));
387 /* If the stack contains a multi-word type, keep popping the stack until
388 the real type is found. */
389 while (t == void_type_node)
390 t = stack_type_map[--stack_pointer];
391 *messagep = concat ("expected type '", temp,
392 "' but stack contains '", lang_printable_name (t, 0),
393 "'", NULL);
394 free (temp);
395 }
396 return type;
397 }
398
399 /* Pop a type from the type stack.
400 TYPE is the expected type. Return the actual type, which must be
401 convertible to TYPE, otherwise call error. */
402
403 tree
404 pop_type (tree type)
405 {
406 char *message = NULL;
407 type = pop_type_0 (type, &message);
408 if (message != NULL)
409 {
410 error ("%s", message);
411 free (message);
412 }
413 return type;
414 }
415
416 \f
417 /* Return true if two type assertions are equal. */
418
419 static int
420 type_assertion_eq (const void * k1_p, const void * k2_p)
421 {
422 const type_assertion k1 = *(const type_assertion *)k1_p;
423 const type_assertion k2 = *(const type_assertion *)k2_p;
424 return (k1.assertion_code == k2.assertion_code
425 && k1.op1 == k2.op1
426 && k1.op2 == k2.op2);
427 }
428
429 /* Hash a type assertion. */
430
431 static hashval_t
432 type_assertion_hash (const void *p)
433 {
434 const type_assertion *k_p = (const type_assertion *) p;
435 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
436 k_p->assertion_code, 0);
437
438 switch (k_p->assertion_code)
439 {
440 case JV_ASSERT_TYPES_COMPATIBLE:
441 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
442 hash);
443 /* Fall through. */
444
445 case JV_ASSERT_IS_INSTANTIABLE:
446 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
447 hash);
448 /* Fall through. */
449
450 case JV_ASSERT_END_OF_TABLE:
451 break;
452
453 default:
454 gcc_unreachable ();
455 }
456
457 return hash;
458 }
459
460 /* Add an entry to the type assertion table for the given class.
461 KLASS is the class for which this assertion will be evaluated by the
462 runtime during loading/initialization.
463 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
464 OP1 and OP2 are the operands. The tree type of these arguments may be
465 specific to each assertion_code. */
466
467 void
468 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
469 {
470 htab_t assertions_htab;
471 type_assertion as;
472 void **as_pp;
473
474 assertions_htab = TYPE_ASSERTIONS (klass);
475 if (assertions_htab == NULL)
476 {
477 assertions_htab = htab_create_ggc (7, type_assertion_hash,
478 type_assertion_eq, NULL);
479 TYPE_ASSERTIONS (current_class) = assertions_htab;
480 }
481
482 as.assertion_code = assertion_code;
483 as.op1 = op1;
484 as.op2 = op2;
485
486 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
487
488 /* Don't add the same assertion twice. */
489 if (*as_pp)
490 return;
491
492 *as_pp = ggc_alloc (sizeof (type_assertion));
493 **(type_assertion **)as_pp = as;
494 }
495
496 \f
497 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
498 Handles array types and interfaces. */
499
500 int
501 can_widen_reference_to (tree source_type, tree target_type)
502 {
503 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
504 return 1;
505
506 /* Get rid of pointers */
507 if (TREE_CODE (source_type) == POINTER_TYPE)
508 source_type = TREE_TYPE (source_type);
509 if (TREE_CODE (target_type) == POINTER_TYPE)
510 target_type = TREE_TYPE (target_type);
511
512 if (source_type == target_type)
513 return 1;
514
515 /* FIXME: This is very pessimistic, in that it checks everything,
516 even if we already know that the types are compatible. If we're
517 to support full Java class loader semantics, we need this.
518 However, we could do something more optimal. */
519 if (! flag_verify_invocations)
520 {
521 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
522 source_type, target_type);
523
524 if (!quiet_flag)
525 warning (0, "assert: %s is assign compatible with %s",
526 xstrdup (lang_printable_name (target_type, 0)),
527 xstrdup (lang_printable_name (source_type, 0)));
528 /* Punt everything to runtime. */
529 return 1;
530 }
531
532 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
533 {
534 return 1;
535 }
536 else
537 {
538 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
539 {
540 HOST_WIDE_INT source_length, target_length;
541 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
542 {
543 /* An array implements Cloneable and Serializable. */
544 tree name = DECL_NAME (TYPE_NAME (target_type));
545 return (name == java_lang_cloneable_identifier_node
546 || name == java_io_serializable_identifier_node);
547 }
548 target_length = java_array_type_length (target_type);
549 if (target_length >= 0)
550 {
551 source_length = java_array_type_length (source_type);
552 if (source_length != target_length)
553 return 0;
554 }
555 source_type = TYPE_ARRAY_ELEMENT (source_type);
556 target_type = TYPE_ARRAY_ELEMENT (target_type);
557 if (source_type == target_type)
558 return 1;
559 if (TREE_CODE (source_type) != POINTER_TYPE
560 || TREE_CODE (target_type) != POINTER_TYPE)
561 return 0;
562 return can_widen_reference_to (source_type, target_type);
563 }
564 else
565 {
566 int source_depth = class_depth (source_type);
567 int target_depth = class_depth (target_type);
568
569 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
570 {
571 if (! quiet_flag)
572 warning (0, "assert: %s is assign compatible with %s",
573 xstrdup (lang_printable_name (target_type, 0)),
574 xstrdup (lang_printable_name (source_type, 0)));
575 return 1;
576 }
577
578 /* class_depth can return a negative depth if an error occurred */
579 if (source_depth < 0 || target_depth < 0)
580 return 0;
581
582 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
583 {
584 /* target_type is OK if source_type or source_type ancestors
585 implement target_type. We handle multiple sub-interfaces */
586 tree binfo, base_binfo;
587 int i;
588
589 for (binfo = TYPE_BINFO (source_type), i = 0;
590 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
591 if (can_widen_reference_to
592 (BINFO_TYPE (base_binfo), target_type))
593 return 1;
594
595 if (!i)
596 return 0;
597 }
598
599 for ( ; source_depth > target_depth; source_depth--)
600 {
601 source_type
602 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
603 }
604 return source_type == target_type;
605 }
606 }
607 }
608
609 static tree
610 pop_value (tree type)
611 {
612 type = pop_type (type);
613 if (quick_stack)
614 {
615 tree node = quick_stack;
616 quick_stack = TREE_CHAIN (quick_stack);
617 TREE_CHAIN (node) = tree_list_free_list;
618 tree_list_free_list = node;
619 node = TREE_VALUE (node);
620 return node;
621 }
622 else
623 return find_stack_slot (stack_pointer, promote_type (type));
624 }
625
626
627 /* Pop and discard the top COUNT stack slots. */
628
629 static void
630 java_stack_pop (int count)
631 {
632 while (count > 0)
633 {
634 tree type, val;
635
636 gcc_assert (stack_pointer != 0);
637
638 type = stack_type_map[stack_pointer - 1];
639 if (type == TYPE_SECOND)
640 {
641 count--;
642 gcc_assert (stack_pointer != 1 && count > 0);
643
644 type = stack_type_map[stack_pointer - 2];
645 }
646 val = pop_value (type);
647 count--;
648 }
649 }
650
651 /* Implement the 'swap' operator (to swap two top stack slots). */
652
653 static void
654 java_stack_swap (void)
655 {
656 tree type1, type2;
657 tree temp;
658 tree decl1, decl2;
659
660 if (stack_pointer < 2
661 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
662 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
663 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
664 /* Bad stack swap. */
665 abort ();
666 /* Bad stack swap. */
667
668 flush_quick_stack ();
669 decl1 = find_stack_slot (stack_pointer - 1, type1);
670 decl2 = find_stack_slot (stack_pointer - 2, type2);
671 temp = build_decl (VAR_DECL, NULL_TREE, type1);
672 java_add_local_var (temp);
673 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
674 java_add_stmt (build2 (MODIFY_EXPR, type2,
675 find_stack_slot (stack_pointer - 1, type2),
676 decl2));
677 java_add_stmt (build2 (MODIFY_EXPR, type1,
678 find_stack_slot (stack_pointer - 2, type1),
679 temp));
680 stack_type_map[stack_pointer - 1] = type2;
681 stack_type_map[stack_pointer - 2] = type1;
682 }
683
684 static void
685 java_stack_dup (int size, int offset)
686 {
687 int low_index = stack_pointer - size - offset;
688 int dst_index;
689 if (low_index < 0)
690 error ("stack underflow - dup* operation");
691
692 flush_quick_stack ();
693
694 stack_pointer += size;
695 dst_index = stack_pointer;
696
697 for (dst_index = stack_pointer; --dst_index >= low_index; )
698 {
699 tree type;
700 int src_index = dst_index - size;
701 if (src_index < low_index)
702 src_index = dst_index + size + offset;
703 type = stack_type_map [src_index];
704 if (type == TYPE_SECOND)
705 {
706 /* Dup operation splits 64-bit number. */
707 gcc_assert (src_index > low_index);
708
709 stack_type_map[dst_index] = type;
710 src_index--; dst_index--;
711 type = stack_type_map[src_index];
712 gcc_assert (TYPE_IS_WIDE (type));
713 }
714 else
715 gcc_assert (! TYPE_IS_WIDE (type));
716
717 if (src_index != dst_index)
718 {
719 tree src_decl = find_stack_slot (src_index, type);
720 tree dst_decl = find_stack_slot (dst_index, type);
721
722 java_add_stmt
723 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
724 stack_type_map[dst_index] = type;
725 }
726 }
727 }
728
729 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
730 value stack. */
731
732 static void
733 build_java_athrow (tree node)
734 {
735 tree call;
736
737 call = build_call_nary (void_type_node,
738 build_address_of (throw_node),
739 1, node);
740 TREE_SIDE_EFFECTS (call) = 1;
741 java_add_stmt (call);
742 java_stack_pop (stack_pointer);
743 }
744
745 /* Implementation for jsr/ret */
746
747 static void
748 build_java_jsr (int target_pc, int return_pc)
749 {
750 tree where = lookup_label (target_pc);
751 tree ret = lookup_label (return_pc);
752 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
753 push_value (ret_label);
754 flush_quick_stack ();
755 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
756
757 /* Do not need to emit the label here. We noted the existence of the
758 label as a jump target in note_instructions; we'll emit the label
759 for real at the beginning of the expand_byte_code loop. */
760 }
761
762 static void
763 build_java_ret (tree location)
764 {
765 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
766 }
767
768 /* Implementation of operations on array: new, load, store, length */
769
770 tree
771 decode_newarray_type (int atype)
772 {
773 switch (atype)
774 {
775 case 4: return boolean_type_node;
776 case 5: return char_type_node;
777 case 6: return float_type_node;
778 case 7: return double_type_node;
779 case 8: return byte_type_node;
780 case 9: return short_type_node;
781 case 10: return int_type_node;
782 case 11: return long_type_node;
783 default: return NULL_TREE;
784 }
785 }
786
787 /* Map primitive type to the code used by OPCODE_newarray. */
788
789 int
790 encode_newarray_type (tree type)
791 {
792 if (type == boolean_type_node)
793 return 4;
794 else if (type == char_type_node)
795 return 5;
796 else if (type == float_type_node)
797 return 6;
798 else if (type == double_type_node)
799 return 7;
800 else if (type == byte_type_node)
801 return 8;
802 else if (type == short_type_node)
803 return 9;
804 else if (type == int_type_node)
805 return 10;
806 else if (type == long_type_node)
807 return 11;
808 else
809 gcc_unreachable ();
810 }
811
812 /* Build a call to _Jv_ThrowBadArrayIndex(), the
813 ArrayIndexOfBoundsException exception handler. */
814
815 static tree
816 build_java_throw_out_of_bounds_exception (tree index)
817 {
818 tree node;
819
820 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
821 has void return type. We cannot just set the type of the CALL_EXPR below
822 to int_type_node because we would lose it during gimplification. */
823 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
824 node = build_call_nary (void_type_node,
825 build_address_of (soft_badarrayindex_node),
826 1, index);
827 TREE_SIDE_EFFECTS (node) = 1;
828
829 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
830 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
831
832 return (node);
833 }
834
835 /* Return the length of an array. Doesn't perform any checking on the nature
836 or value of the array NODE. May be used to implement some bytecodes. */
837
838 tree
839 build_java_array_length_access (tree node)
840 {
841 tree type = TREE_TYPE (node);
842 tree array_type = TREE_TYPE (type);
843 HOST_WIDE_INT length;
844
845 if (!is_array_type_p (type))
846 {
847 /* With the new verifier, we will see an ordinary pointer type
848 here. In this case, we just use an arbitrary array type. */
849 array_type = build_java_array_type (object_ptr_type_node, -1);
850 type = promote_type (array_type);
851 }
852
853 length = java_array_type_length (type);
854 if (length >= 0)
855 return build_int_cst (NULL_TREE, length);
856
857 node = build3 (COMPONENT_REF, int_type_node,
858 build_java_indirect_ref (array_type, node,
859 flag_check_references),
860 lookup_field (&array_type, get_identifier ("length")),
861 NULL_TREE);
862 IS_ARRAY_LENGTH_ACCESS (node) = 1;
863 return node;
864 }
865
866 /* Optionally checks a reference against the NULL pointer. ARG1: the
867 expr, ARG2: we should check the reference. Don't generate extra
868 checks if we're not generating code. */
869
870 tree
871 java_check_reference (tree expr, int check)
872 {
873 if (!flag_syntax_only && check)
874 {
875 expr = save_expr (expr);
876 expr = build3 (COND_EXPR, TREE_TYPE (expr),
877 build2 (EQ_EXPR, boolean_type_node,
878 expr, null_pointer_node),
879 build_call_nary (void_type_node,
880 build_address_of (soft_nullpointer_node),
881 0),
882 expr);
883 }
884
885 return expr;
886 }
887
888 /* Reference an object: just like an INDIRECT_REF, but with checking. */
889
890 tree
891 build_java_indirect_ref (tree type, tree expr, int check)
892 {
893 tree t;
894 t = java_check_reference (expr, check);
895 t = convert (build_pointer_type (type), t);
896 return build1 (INDIRECT_REF, type, t);
897 }
898
899 /* Implement array indexing (either as l-value or r-value).
900 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
901 Optionally performs bounds checking and/or test to NULL.
902 At this point, ARRAY should have been verified as an array. */
903
904 tree
905 build_java_arrayaccess (tree array, tree type, tree index)
906 {
907 tree node, throw_expr = NULL_TREE;
908 tree data_field;
909 tree ref;
910 tree array_type = TREE_TYPE (TREE_TYPE (array));
911 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
912
913 if (!is_array_type_p (TREE_TYPE (array)))
914 {
915 /* With the new verifier, we will see an ordinary pointer type
916 here. In this case, we just use the correct array type. */
917 array_type = build_java_array_type (type, -1);
918 }
919
920 if (flag_bounds_check)
921 {
922 /* Generate:
923 * (unsigned jint) INDEX >= (unsigned jint) LEN
924 * && throw ArrayIndexOutOfBoundsException.
925 * Note this is equivalent to and more efficient than:
926 * INDEX < 0 || INDEX >= LEN && throw ... */
927 tree test;
928 tree len = convert (unsigned_int_type_node,
929 build_java_array_length_access (array));
930 test = fold_build2 (GE_EXPR, boolean_type_node,
931 convert (unsigned_int_type_node, index),
932 len);
933 if (! integer_zerop (test))
934 {
935 throw_expr
936 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
937 build_java_throw_out_of_bounds_exception (index));
938 /* allows expansion within COMPOUND */
939 TREE_SIDE_EFFECTS( throw_expr ) = 1;
940 }
941 }
942
943 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
944 to have the bounds check evaluated first. */
945 if (throw_expr != NULL_TREE)
946 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
947
948 data_field = lookup_field (&array_type, get_identifier ("data"));
949
950 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
951 build_java_indirect_ref (array_type, array,
952 flag_check_references),
953 data_field, NULL_TREE);
954
955 /* Take the address of the data field and convert it to a pointer to
956 the element type. */
957 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
958
959 /* Multiply the index by the size of an element to obtain a byte
960 offset. Convert the result to a pointer to the element type. */
961 index = build2 (MULT_EXPR, sizetype,
962 fold_convert (sizetype, index),
963 size_exp);
964
965 /* Sum the byte offset and the address of the data field. */
966 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
967
968 /* Finally, return
969
970 *((&array->data) + index*size_exp)
971
972 */
973 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
974 }
975
976 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
977 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
978 determine that no check is required. */
979
980 tree
981 build_java_arraystore_check (tree array, tree object)
982 {
983 tree check, element_type, source;
984 tree array_type_p = TREE_TYPE (array);
985 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
986
987 if (! flag_verify_invocations)
988 {
989 /* With the new verifier, we don't track precise types. FIXME:
990 performance regression here. */
991 element_type = TYPE_NAME (object_type_node);
992 }
993 else
994 {
995 gcc_assert (is_array_type_p (array_type_p));
996
997 /* Get the TYPE_DECL for ARRAY's element type. */
998 element_type
999 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
1000 }
1001
1002 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
1003 && TREE_CODE (object_type) == TYPE_DECL);
1004
1005 if (!flag_store_check)
1006 return build1 (NOP_EXPR, array_type_p, array);
1007
1008 /* No check is needed if the element type is final. Also check that
1009 element_type matches object_type, since in the bytecode
1010 compilation case element_type may be the actual element type of
1011 the array rather than its declared type. However, if we're doing
1012 indirect dispatch, we can't do the `final' optimization. */
1013 if (element_type == object_type
1014 && ! flag_indirect_dispatch
1015 && CLASS_FINAL (element_type))
1016 return build1 (NOP_EXPR, array_type_p, array);
1017
1018 /* OBJECT might be wrapped by a SAVE_EXPR. */
1019 if (TREE_CODE (object) == SAVE_EXPR)
1020 source = TREE_OPERAND (object, 0);
1021 else
1022 source = object;
1023
1024 /* Avoid the check if OBJECT was just loaded from the same array. */
1025 if (TREE_CODE (source) == ARRAY_REF)
1026 {
1027 tree target;
1028 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
1029 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
1030 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1031 if (TREE_CODE (source) == SAVE_EXPR)
1032 source = TREE_OPERAND (source, 0);
1033
1034 target = array;
1035 if (TREE_CODE (target) == SAVE_EXPR)
1036 target = TREE_OPERAND (target, 0);
1037
1038 if (source == target)
1039 return build1 (NOP_EXPR, array_type_p, array);
1040 }
1041
1042 /* Build an invocation of _Jv_CheckArrayStore */
1043 check = build_call_nary (void_type_node,
1044 build_address_of (soft_checkarraystore_node),
1045 2, array, object);
1046 TREE_SIDE_EFFECTS (check) = 1;
1047
1048 return check;
1049 }
1050
1051 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1052 ARRAY_NODE. This function is used to retrieve something less vague than
1053 a pointer type when indexing the first dimension of something like [[<t>.
1054 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1055 return unchanged. */
1056
1057 static tree
1058 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1059 tree indexed_type)
1060 {
1061 /* We used to check to see if ARRAY_NODE really had array type.
1062 However, with the new verifier, this is not necessary, as we know
1063 that the object will be an array of the appropriate type. */
1064
1065 return indexed_type;
1066 }
1067
1068 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1069 called with an integer code (the type of array to create), and the length
1070 of the array to create. */
1071
1072 tree
1073 build_newarray (int atype_value, tree length)
1074 {
1075 tree type_arg;
1076
1077 tree prim_type = decode_newarray_type (atype_value);
1078 tree type
1079 = build_java_array_type (prim_type,
1080 host_integerp (length, 0) == INTEGER_CST
1081 ? tree_low_cst (length, 0) : -1);
1082
1083 /* Pass a reference to the primitive type class and save the runtime
1084 some work. */
1085 type_arg = build_class_ref (prim_type);
1086
1087 return build_call_nary (promote_type (type),
1088 build_address_of (soft_newarray_node),
1089 2, type_arg, length);
1090 }
1091
1092 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1093 of the dimension. */
1094
1095 tree
1096 build_anewarray (tree class_type, tree length)
1097 {
1098 tree type
1099 = build_java_array_type (class_type,
1100 host_integerp (length, 0)
1101 ? tree_low_cst (length, 0) : -1);
1102
1103 return build_call_nary (promote_type (type),
1104 build_address_of (soft_anewarray_node),
1105 3,
1106 length,
1107 build_class_ref (class_type),
1108 null_pointer_node);
1109 }
1110
1111 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1112
1113 tree
1114 build_new_array (tree type, tree length)
1115 {
1116 if (JPRIMITIVE_TYPE_P (type))
1117 return build_newarray (encode_newarray_type (type), length);
1118 else
1119 return build_anewarray (TREE_TYPE (type), length);
1120 }
1121
1122 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1123 class pointer, a number of dimensions and the matching number of
1124 dimensions. The argument list is NULL terminated. */
1125
1126 static void
1127 expand_java_multianewarray (tree class_type, int ndim)
1128 {
1129 int i;
1130 tree args = build_tree_list( NULL_TREE, null_pointer_node );
1131
1132 for( i = 0; i < ndim; i++ )
1133 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1134
1135 args = tree_cons (NULL_TREE,
1136 build_class_ref (class_type),
1137 tree_cons (NULL_TREE,
1138 build_int_cst (NULL_TREE, ndim),
1139 args));
1140
1141 push_value (build_call_list (promote_type (class_type),
1142 build_address_of (soft_multianewarray_node),
1143 args));
1144 }
1145
1146 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1147 ARRAY is an array type. May expand some bound checking and NULL
1148 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1149 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1150 INT. In those cases, we make the conversion.
1151
1152 if ARRAy is a reference type, the assignment is checked at run-time
1153 to make sure that the RHS can be assigned to the array element
1154 type. It is not necessary to generate this code if ARRAY is final. */
1155
1156 static void
1157 expand_java_arraystore (tree rhs_type_node)
1158 {
1159 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1160 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1161 int_type_node : rhs_type_node);
1162 tree index = pop_value (int_type_node);
1163 tree array_type, array, temp, access;
1164
1165 /* If we're processing an `aaload' we might as well just pick
1166 `Object'. */
1167 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1168 {
1169 array_type = build_java_array_type (object_ptr_type_node, -1);
1170 rhs_type_node = object_ptr_type_node;
1171 }
1172 else
1173 array_type = build_java_array_type (rhs_type_node, -1);
1174
1175 array = pop_value (array_type);
1176 array = build1 (NOP_EXPR, promote_type (array_type), array);
1177
1178 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1179
1180 flush_quick_stack ();
1181
1182 index = save_expr (index);
1183 array = save_expr (array);
1184
1185 /* We want to perform the bounds check (done by
1186 build_java_arrayaccess) before the type check (done by
1187 build_java_arraystore_check). So, we call build_java_arrayaccess
1188 -- which returns an ARRAY_REF lvalue -- and we then generate code
1189 to stash the address of that lvalue in a temp. Then we call
1190 build_java_arraystore_check, and finally we generate a
1191 MODIFY_EXPR to set the array element. */
1192
1193 access = build_java_arrayaccess (array, rhs_type_node, index);
1194 temp = build_decl (VAR_DECL, NULL_TREE,
1195 build_pointer_type (TREE_TYPE (access)));
1196 java_add_local_var (temp);
1197 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1198 temp,
1199 build_fold_addr_expr (access)));
1200
1201 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1202 {
1203 tree check = build_java_arraystore_check (array, rhs_node);
1204 java_add_stmt (check);
1205 }
1206
1207 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1208 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1209 rhs_node));
1210 }
1211
1212 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1213 sure that LHS is an array type. May expand some bound checking and NULL
1214 pointer checking.
1215 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1216 BOOLEAN/SHORT, we push a promoted type back to the stack.
1217 */
1218
1219 static void
1220 expand_java_arrayload (tree lhs_type_node)
1221 {
1222 tree load_node;
1223 tree index_node = pop_value (int_type_node);
1224 tree array_type;
1225 tree array_node;
1226
1227 /* If we're processing an `aaload' we might as well just pick
1228 `Object'. */
1229 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1230 {
1231 array_type = build_java_array_type (object_ptr_type_node, -1);
1232 lhs_type_node = object_ptr_type_node;
1233 }
1234 else
1235 array_type = build_java_array_type (lhs_type_node, -1);
1236 array_node = pop_value (array_type);
1237 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1238
1239 index_node = save_expr (index_node);
1240 array_node = save_expr (array_node);
1241
1242 lhs_type_node = build_java_check_indexed_type (array_node,
1243 lhs_type_node);
1244 load_node = build_java_arrayaccess (array_node,
1245 lhs_type_node,
1246 index_node);
1247 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1248 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1249 push_value (load_node);
1250 }
1251
1252 /* Expands .length. Makes sure that we deal with and array and may expand
1253 a NULL check on the array object. */
1254
1255 static void
1256 expand_java_array_length (void)
1257 {
1258 tree array = pop_value (ptr_type_node);
1259 tree length = build_java_array_length_access (array);
1260
1261 push_value (length);
1262 }
1263
1264 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1265 either soft_monitorenter_node or soft_monitorexit_node. */
1266
1267 static tree
1268 build_java_monitor (tree call, tree object)
1269 {
1270 return build_call_nary (void_type_node,
1271 build_address_of (call),
1272 1, object);
1273 }
1274
1275 /* Emit code for one of the PUSHC instructions. */
1276
1277 static void
1278 expand_java_pushc (int ival, tree type)
1279 {
1280 tree value;
1281 if (type == ptr_type_node && ival == 0)
1282 value = null_pointer_node;
1283 else if (type == int_type_node || type == long_type_node)
1284 value = build_int_cst (type, ival);
1285 else if (type == float_type_node || type == double_type_node)
1286 {
1287 REAL_VALUE_TYPE x;
1288 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1289 value = build_real (type, x);
1290 }
1291 else
1292 gcc_unreachable ();
1293
1294 push_value (value);
1295 }
1296
1297 static void
1298 expand_java_return (tree type)
1299 {
1300 if (type == void_type_node)
1301 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1302 else
1303 {
1304 tree retval = pop_value (type);
1305 tree res = DECL_RESULT (current_function_decl);
1306 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1307
1308 /* Handle the situation where the native integer type is smaller
1309 than the JVM integer. It can happen for many cross compilers.
1310 The whole if expression just goes away if INT_TYPE_SIZE < 32
1311 is false. */
1312 if (INT_TYPE_SIZE < 32
1313 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1314 < GET_MODE_SIZE (TYPE_MODE (type))))
1315 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1316
1317 TREE_SIDE_EFFECTS (retval) = 1;
1318 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1319 }
1320 }
1321
1322 static void
1323 expand_load_internal (int index, tree type, int pc)
1324 {
1325 tree copy;
1326 tree var = find_local_variable (index, type, pc);
1327
1328 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1329 on the stack. If there is an assignment to this VAR_DECL between
1330 the stack push and the use, then the wrong code could be
1331 generated. To avoid this we create a new local and copy our
1332 value into it. Then we push this new local on the stack.
1333 Hopefully this all gets optimized out. */
1334 copy = build_decl (VAR_DECL, NULL_TREE, type);
1335 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1336 && TREE_TYPE (copy) != TREE_TYPE (var))
1337 var = convert (type, var);
1338 java_add_local_var (copy);
1339 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1340
1341 push_value (copy);
1342 }
1343
1344 tree
1345 build_address_of (tree value)
1346 {
1347 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1348 }
1349
1350 bool
1351 class_has_finalize_method (tree type)
1352 {
1353 tree super = CLASSTYPE_SUPER (type);
1354
1355 if (super == NULL_TREE)
1356 return false; /* Every class with a real finalizer inherits */
1357 /* from java.lang.Object. */
1358 else
1359 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1360 }
1361
1362 tree
1363 java_create_object (tree type)
1364 {
1365 tree alloc_node = (class_has_finalize_method (type)
1366 ? alloc_object_node
1367 : alloc_no_finalizer_node);
1368
1369 return build_call_nary (promote_type (type),
1370 build_address_of (alloc_node),
1371 1, build_class_ref (type));
1372 }
1373
1374 static void
1375 expand_java_NEW (tree type)
1376 {
1377 tree alloc_node;
1378
1379 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1380 : alloc_no_finalizer_node);
1381 if (! CLASS_LOADED_P (type))
1382 load_class (type, 1);
1383 safe_layout_class (type);
1384 push_value (build_call_nary (promote_type (type),
1385 build_address_of (alloc_node),
1386 1, build_class_ref (type)));
1387 }
1388
1389 /* This returns an expression which will extract the class of an
1390 object. */
1391
1392 tree
1393 build_get_class (tree value)
1394 {
1395 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1396 tree vtable_field = lookup_field (&object_type_node,
1397 get_identifier ("vtable"));
1398 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1399 build_java_indirect_ref (object_type_node, value,
1400 flag_check_references),
1401 vtable_field, NULL_TREE);
1402 return build3 (COMPONENT_REF, class_ptr_type,
1403 build1 (INDIRECT_REF, dtable_type, tmp),
1404 class_field, NULL_TREE);
1405 }
1406
1407 /* This builds the tree representation of the `instanceof' operator.
1408 It tries various tricks to optimize this in cases where types are
1409 known. */
1410
1411 tree
1412 build_instanceof (tree value, tree type)
1413 {
1414 tree expr;
1415 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1416 tree valtype = TREE_TYPE (TREE_TYPE (value));
1417 tree valclass = TYPE_NAME (valtype);
1418 tree klass;
1419
1420 /* When compiling from bytecode, we need to ensure that TYPE has
1421 been loaded. */
1422 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1423 {
1424 load_class (type, 1);
1425 safe_layout_class (type);
1426 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1427 return error_mark_node;
1428 }
1429 klass = TYPE_NAME (type);
1430
1431 if (type == object_type_node || inherits_from_p (valtype, type))
1432 {
1433 /* Anything except `null' is an instance of Object. Likewise,
1434 if the object is known to be an instance of the class, then
1435 we only need to check for `null'. */
1436 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1437 }
1438 else if (flag_verify_invocations
1439 && ! TYPE_ARRAY_P (type)
1440 && ! TYPE_ARRAY_P (valtype)
1441 && DECL_P (klass) && DECL_P (valclass)
1442 && ! CLASS_INTERFACE (valclass)
1443 && ! CLASS_INTERFACE (klass)
1444 && ! inherits_from_p (type, valtype)
1445 && (CLASS_FINAL (klass)
1446 || ! inherits_from_p (valtype, type)))
1447 {
1448 /* The classes are from different branches of the derivation
1449 tree, so we immediately know the answer. */
1450 expr = boolean_false_node;
1451 }
1452 else if (DECL_P (klass) && CLASS_FINAL (klass))
1453 {
1454 tree save = save_expr (value);
1455 expr = build3 (COND_EXPR, itype,
1456 build2 (NE_EXPR, boolean_type_node,
1457 save, null_pointer_node),
1458 build2 (EQ_EXPR, itype,
1459 build_get_class (save),
1460 build_class_ref (type)),
1461 boolean_false_node);
1462 }
1463 else
1464 {
1465 expr = build_call_nary (itype,
1466 build_address_of (soft_instanceof_node),
1467 2, value, build_class_ref (type));
1468 }
1469 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1470 return expr;
1471 }
1472
1473 static void
1474 expand_java_INSTANCEOF (tree type)
1475 {
1476 tree value = pop_value (object_ptr_type_node);
1477 value = build_instanceof (value, type);
1478 push_value (value);
1479 }
1480
1481 static void
1482 expand_java_CHECKCAST (tree type)
1483 {
1484 tree value = pop_value (ptr_type_node);
1485 value = build_call_nary (promote_type (type),
1486 build_address_of (soft_checkcast_node),
1487 2, build_class_ref (type), value);
1488 push_value (value);
1489 }
1490
1491 static void
1492 expand_iinc (unsigned int local_var_index, int ival, int pc)
1493 {
1494 tree local_var, res;
1495 tree constant_value;
1496
1497 flush_quick_stack ();
1498 local_var = find_local_variable (local_var_index, int_type_node, pc);
1499 constant_value = build_int_cst (NULL_TREE, ival);
1500 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1501 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1502 }
1503
1504
1505 tree
1506 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1507 {
1508 tree call = NULL;
1509 tree arg1 = convert (type, op1);
1510 tree arg2 = convert (type, op2);
1511
1512 if (type == int_type_node)
1513 {
1514 switch (op)
1515 {
1516 case TRUNC_DIV_EXPR:
1517 call = soft_idiv_node;
1518 break;
1519 case TRUNC_MOD_EXPR:
1520 call = soft_irem_node;
1521 break;
1522 default:
1523 break;
1524 }
1525 }
1526 else if (type == long_type_node)
1527 {
1528 switch (op)
1529 {
1530 case TRUNC_DIV_EXPR:
1531 call = soft_ldiv_node;
1532 break;
1533 case TRUNC_MOD_EXPR:
1534 call = soft_lrem_node;
1535 break;
1536 default:
1537 break;
1538 }
1539 }
1540
1541 gcc_assert (call);
1542 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1543 return call;
1544 }
1545
1546 tree
1547 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1548 {
1549 tree mask;
1550 switch (op)
1551 {
1552 case URSHIFT_EXPR:
1553 {
1554 tree u_type = unsigned_type_for (type);
1555 arg1 = convert (u_type, arg1);
1556 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1557 return convert (type, arg1);
1558 }
1559 case LSHIFT_EXPR:
1560 case RSHIFT_EXPR:
1561 mask = build_int_cst (NULL_TREE,
1562 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1563 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1564 break;
1565
1566 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1567 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1568 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1569 {
1570 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1571 boolean_type_node, arg1, arg2);
1572 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1573 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1574 ifexp2, integer_zero_node,
1575 op == COMPARE_L_EXPR
1576 ? integer_minus_one_node
1577 : integer_one_node);
1578 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1579 op == COMPARE_L_EXPR ? integer_one_node
1580 : integer_minus_one_node,
1581 second_compare);
1582 }
1583 case COMPARE_EXPR:
1584 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1585 {
1586 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1587 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1588 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1589 ifexp2, integer_one_node,
1590 integer_zero_node);
1591 return fold_build3 (COND_EXPR, int_type_node,
1592 ifexp1, integer_minus_one_node, second_compare);
1593 }
1594 case TRUNC_DIV_EXPR:
1595 case TRUNC_MOD_EXPR:
1596 if (TREE_CODE (type) == REAL_TYPE
1597 && op == TRUNC_MOD_EXPR)
1598 {
1599 tree call;
1600 if (type != double_type_node)
1601 {
1602 arg1 = convert (double_type_node, arg1);
1603 arg2 = convert (double_type_node, arg2);
1604 }
1605 call = build_call_nary (double_type_node,
1606 build_address_of (soft_fmod_node),
1607 2, arg1, arg2);
1608 if (type != double_type_node)
1609 call = convert (type, call);
1610 return call;
1611 }
1612
1613 if (TREE_CODE (type) == INTEGER_TYPE
1614 && flag_use_divide_subroutine
1615 && ! flag_syntax_only)
1616 return build_java_soft_divmod (op, type, arg1, arg2);
1617
1618 break;
1619 default: ;
1620 }
1621 return fold_build2 (op, type, arg1, arg2);
1622 }
1623
1624 static void
1625 expand_java_binop (tree type, enum tree_code op)
1626 {
1627 tree larg, rarg;
1628 tree ltype = type;
1629 tree rtype = type;
1630 switch (op)
1631 {
1632 case LSHIFT_EXPR:
1633 case RSHIFT_EXPR:
1634 case URSHIFT_EXPR:
1635 rtype = int_type_node;
1636 rarg = pop_value (rtype);
1637 break;
1638 default:
1639 rarg = pop_value (rtype);
1640 }
1641 larg = pop_value (ltype);
1642 push_value (build_java_binop (op, type, larg, rarg));
1643 }
1644
1645 /* Lookup the field named NAME in *TYPEP or its super classes.
1646 If not found, return NULL_TREE.
1647 (If the *TYPEP is not found, or if the field reference is
1648 ambiguous, return error_mark_node.)
1649 If found, return the FIELD_DECL, and set *TYPEP to the
1650 class containing the field. */
1651
1652 tree
1653 lookup_field (tree *typep, tree name)
1654 {
1655 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1656 {
1657 load_class (*typep, 1);
1658 safe_layout_class (*typep);
1659 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1660 return error_mark_node;
1661 }
1662 do
1663 {
1664 tree field, binfo, base_binfo;
1665 tree save_field;
1666 int i;
1667
1668 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1669 if (DECL_NAME (field) == name)
1670 return field;
1671
1672 /* Process implemented interfaces. */
1673 save_field = NULL_TREE;
1674 for (binfo = TYPE_BINFO (*typep), i = 0;
1675 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1676 {
1677 tree t = BINFO_TYPE (base_binfo);
1678 if ((field = lookup_field (&t, name)))
1679 {
1680 if (save_field == field)
1681 continue;
1682 if (save_field == NULL_TREE)
1683 save_field = field;
1684 else
1685 {
1686 tree i1 = DECL_CONTEXT (save_field);
1687 tree i2 = DECL_CONTEXT (field);
1688 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1689 IDENTIFIER_POINTER (name),
1690 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1691 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1692 return error_mark_node;
1693 }
1694 }
1695 }
1696
1697 if (save_field != NULL_TREE)
1698 return save_field;
1699
1700 *typep = CLASSTYPE_SUPER (*typep);
1701 } while (*typep);
1702 return NULL_TREE;
1703 }
1704
1705 /* Look up the field named NAME in object SELF_VALUE,
1706 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1707 SELF_VALUE is NULL_TREE if looking for a static field. */
1708
1709 tree
1710 build_field_ref (tree self_value, tree self_class, tree name)
1711 {
1712 tree base_class = self_class;
1713 tree field_decl = lookup_field (&base_class, name);
1714 if (field_decl == NULL_TREE)
1715 {
1716 error ("field %qs not found", IDENTIFIER_POINTER (name));
1717 return error_mark_node;
1718 }
1719 if (self_value == NULL_TREE)
1720 {
1721 return build_static_field_ref (field_decl);
1722 }
1723 else
1724 {
1725 tree base_type = promote_type (base_class);
1726
1727 /* CHECK is true if self_value is not the this pointer. */
1728 int check = (! (DECL_P (self_value)
1729 && DECL_NAME (self_value) == this_identifier_node));
1730
1731 /* Determine whether a field offset from NULL will lie within
1732 Page 0: this is necessary on those GNU/Linux/BSD systems that
1733 trap SEGV to generate NullPointerExceptions.
1734
1735 We assume that Page 0 will be mapped with NOPERM, and that
1736 memory may be allocated from any other page, so only field
1737 offsets < pagesize are guaranteed to trap. We also assume
1738 the smallest page size we'll encounter is 4k bytes. */
1739 if (! flag_syntax_only && check && ! flag_check_references
1740 && ! flag_indirect_dispatch)
1741 {
1742 tree field_offset = byte_position (field_decl);
1743 if (! page_size)
1744 page_size = size_int (4096);
1745 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1746 }
1747
1748 if (base_type != TREE_TYPE (self_value))
1749 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1750 if (! flag_syntax_only && flag_indirect_dispatch)
1751 {
1752 tree otable_index
1753 = build_int_cst (NULL_TREE, get_symbol_table_index
1754 (field_decl, NULL_TREE,
1755 &TYPE_OTABLE_METHODS (output_class)));
1756 tree field_offset
1757 = build4 (ARRAY_REF, integer_type_node,
1758 TYPE_OTABLE_DECL (output_class), otable_index,
1759 NULL_TREE, NULL_TREE);
1760 tree address;
1761
1762 if (DECL_CONTEXT (field_decl) != output_class)
1763 field_offset
1764 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1765 build2 (EQ_EXPR, boolean_type_node,
1766 field_offset, integer_zero_node),
1767 build_call_nary (void_type_node,
1768 build_address_of (soft_nosuchfield_node),
1769 1, otable_index),
1770 field_offset);
1771
1772 field_offset = fold (convert (sizetype, field_offset));
1773 self_value = java_check_reference (self_value, check);
1774 address
1775 = fold_build2 (POINTER_PLUS_EXPR,
1776 TREE_TYPE (self_value),
1777 self_value, field_offset);
1778 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1779 address);
1780 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1781 }
1782
1783 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1784 self_value, check);
1785 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1786 self_value, field_decl, NULL_TREE);
1787 }
1788 }
1789
1790 tree
1791 lookup_label (int pc)
1792 {
1793 tree name;
1794 char buf[32];
1795 if (pc > highest_label_pc_this_method)
1796 highest_label_pc_this_method = pc;
1797 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1798 name = get_identifier (buf);
1799 if (IDENTIFIER_LOCAL_VALUE (name))
1800 return IDENTIFIER_LOCAL_VALUE (name);
1801 else
1802 {
1803 /* The type of the address of a label is return_address_type_node. */
1804 tree decl = create_label_decl (name);
1805 return pushdecl (decl);
1806 }
1807 }
1808
1809 /* Generate a unique name for the purpose of loops and switches
1810 labels, and try-catch-finally blocks label or temporary variables. */
1811
1812 tree
1813 generate_name (void)
1814 {
1815 static int l_number = 0;
1816 char buff [32];
1817 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1818 l_number++;
1819 return get_identifier (buff);
1820 }
1821
1822 tree
1823 create_label_decl (tree name)
1824 {
1825 tree decl;
1826 decl = build_decl (LABEL_DECL, name,
1827 TREE_TYPE (return_address_type_node));
1828 DECL_CONTEXT (decl) = current_function_decl;
1829 DECL_IGNORED_P (decl) = 1;
1830 return decl;
1831 }
1832
1833 /* This maps a bytecode offset (PC) to various flags. */
1834 char *instruction_bits;
1835
1836 /* This is a vector of type states for the current method. It is
1837 indexed by PC. Each element is a tree vector holding the type
1838 state at that PC. We only note type states at basic block
1839 boundaries. */
1840 VEC(tree, gc) *type_states;
1841
1842 static void
1843 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1844 {
1845 lookup_label (target_pc);
1846 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1847 }
1848
1849 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1850 where CONDITION is one of one the compare operators. */
1851
1852 static void
1853 expand_compare (enum tree_code condition, tree value1, tree value2,
1854 int target_pc)
1855 {
1856 tree target = lookup_label (target_pc);
1857 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1858 java_add_stmt
1859 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1860 build1 (GOTO_EXPR, void_type_node, target),
1861 build_java_empty_stmt ()));
1862 }
1863
1864 /* Emit code for a TEST-type opcode. */
1865
1866 static void
1867 expand_test (enum tree_code condition, tree type, int target_pc)
1868 {
1869 tree value1, value2;
1870 flush_quick_stack ();
1871 value1 = pop_value (type);
1872 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1873 expand_compare (condition, value1, value2, target_pc);
1874 }
1875
1876 /* Emit code for a COND-type opcode. */
1877
1878 static void
1879 expand_cond (enum tree_code condition, tree type, int target_pc)
1880 {
1881 tree value1, value2;
1882 flush_quick_stack ();
1883 /* note: pop values in opposite order */
1884 value2 = pop_value (type);
1885 value1 = pop_value (type);
1886 /* Maybe should check value1 and value2 for type compatibility ??? */
1887 expand_compare (condition, value1, value2, target_pc);
1888 }
1889
1890 static void
1891 expand_java_goto (int target_pc)
1892 {
1893 tree target_label = lookup_label (target_pc);
1894 flush_quick_stack ();
1895 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1896 }
1897
1898 static tree
1899 expand_java_switch (tree selector, int default_pc)
1900 {
1901 tree switch_expr, x;
1902
1903 flush_quick_stack ();
1904 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1905 NULL_TREE, NULL_TREE);
1906 java_add_stmt (switch_expr);
1907
1908 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1909 create_artificial_label ());
1910 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1911
1912 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1913 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1914
1915 return switch_expr;
1916 }
1917
1918 static void
1919 expand_java_add_case (tree switch_expr, int match, int target_pc)
1920 {
1921 tree value, x;
1922
1923 value = build_int_cst (TREE_TYPE (switch_expr), match);
1924
1925 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1926 create_artificial_label ());
1927 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1928
1929 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1930 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1931 }
1932
1933 static tree
1934 pop_arguments (tree arg_types)
1935 {
1936 if (arg_types == end_params_node)
1937 return NULL_TREE;
1938 if (TREE_CODE (arg_types) == TREE_LIST)
1939 {
1940 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1941 tree type = TREE_VALUE (arg_types);
1942 tree arg = pop_value (type);
1943
1944 /* We simply cast each argument to its proper type. This is
1945 needed since we lose type information coming out of the
1946 verifier. We also have to do this when we pop an integer
1947 type that must be promoted for the function call. */
1948 if (TREE_CODE (type) == POINTER_TYPE)
1949 arg = build1 (NOP_EXPR, type, arg);
1950 else if (targetm.calls.promote_prototypes (type)
1951 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1952 && INTEGRAL_TYPE_P (type))
1953 arg = convert (integer_type_node, arg);
1954 return tree_cons (NULL_TREE, arg, tail);
1955 }
1956 gcc_unreachable ();
1957 }
1958
1959 /* Attach to PTR (a block) the declaration found in ENTRY. */
1960
1961 int
1962 attach_init_test_initialization_flags (void **entry, void *ptr)
1963 {
1964 tree block = (tree)ptr;
1965 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1966
1967 if (block != error_mark_node)
1968 {
1969 if (TREE_CODE (block) == BIND_EXPR)
1970 {
1971 tree body = BIND_EXPR_BODY (block);
1972 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1973 BIND_EXPR_VARS (block) = ite->value;
1974 body = build2 (COMPOUND_EXPR, void_type_node,
1975 build1 (DECL_EXPR, void_type_node, ite->value), body);
1976 BIND_EXPR_BODY (block) = body;
1977 }
1978 else
1979 {
1980 tree body = BLOCK_SUBBLOCKS (block);
1981 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1982 BLOCK_EXPR_DECLS (block) = ite->value;
1983 body = build2 (COMPOUND_EXPR, void_type_node,
1984 build1 (DECL_EXPR, void_type_node, ite->value), body);
1985 BLOCK_SUBBLOCKS (block) = body;
1986 }
1987
1988 }
1989 return true;
1990 }
1991
1992 /* Build an expression to initialize the class CLAS.
1993 if EXPR is non-NULL, returns an expression to first call the initializer
1994 (if it is needed) and then calls EXPR. */
1995
1996 tree
1997 build_class_init (tree clas, tree expr)
1998 {
1999 tree init;
2000
2001 /* An optimization: if CLAS is a superclass of the class we're
2002 compiling, we don't need to initialize it. However, if CLAS is
2003 an interface, it won't necessarily be initialized, even if we
2004 implement it. */
2005 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
2006 && inherits_from_p (current_class, clas))
2007 || current_class == clas)
2008 return expr;
2009
2010 if (always_initialize_class_p)
2011 {
2012 init = build_call_nary (void_type_node,
2013 build_address_of (soft_initclass_node),
2014 1, build_class_ref (clas));
2015 TREE_SIDE_EFFECTS (init) = 1;
2016 }
2017 else
2018 {
2019 tree *init_test_decl;
2020 tree decl;
2021 init_test_decl = java_treetreehash_new
2022 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
2023
2024 if (*init_test_decl == NULL)
2025 {
2026 /* Build a declaration and mark it as a flag used to track
2027 static class initializations. */
2028 decl = build_decl (VAR_DECL, NULL_TREE,
2029 boolean_type_node);
2030 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2031 DECL_CONTEXT (decl) = current_function_decl;
2032 DECL_INITIAL (decl) = boolean_false_node;
2033 /* Don't emit any symbolic debugging info for this decl. */
2034 DECL_IGNORED_P (decl) = 1;
2035 *init_test_decl = decl;
2036 }
2037
2038 init = build_call_nary (void_type_node,
2039 build_address_of (soft_initclass_node),
2040 1, build_class_ref (clas));
2041 TREE_SIDE_EFFECTS (init) = 1;
2042 init = build3 (COND_EXPR, void_type_node,
2043 build2 (EQ_EXPR, boolean_type_node,
2044 *init_test_decl, boolean_false_node),
2045 init, integer_zero_node);
2046 TREE_SIDE_EFFECTS (init) = 1;
2047 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2048 build2 (MODIFY_EXPR, boolean_type_node,
2049 *init_test_decl, boolean_true_node));
2050 TREE_SIDE_EFFECTS (init) = 1;
2051 }
2052
2053 if (expr != NULL_TREE)
2054 {
2055 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2056 TREE_SIDE_EFFECTS (expr) = 1;
2057 return expr;
2058 }
2059 return init;
2060 }
2061
2062 \f
2063
2064 /* Rewrite expensive calls that require stack unwinding at runtime to
2065 cheaper alternatives. The logic here performs these
2066 transformations:
2067
2068 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2069 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2070
2071 */
2072
2073 typedef struct
2074 {
2075 const char *classname;
2076 const char *method;
2077 const char *signature;
2078 const char *new_signature;
2079 int flags;
2080 tree (*rewrite_arglist) (tree arglist);
2081 } rewrite_rule;
2082
2083 /* Add __builtin_return_address(0) to the end of an arglist. */
2084
2085
2086 static tree
2087 rewrite_arglist_getcaller (tree arglist)
2088 {
2089 tree retaddr
2090 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2091 1, integer_zero_node);
2092
2093 DECL_INLINE (current_function_decl) = 0;
2094
2095 return chainon (arglist,
2096 tree_cons (NULL_TREE, retaddr,
2097 NULL_TREE));
2098 }
2099
2100 /* Add this.class to the end of an arglist. */
2101
2102 static tree
2103 rewrite_arglist_getclass (tree arglist)
2104 {
2105 return chainon (arglist,
2106 tree_cons (NULL_TREE, build_class_ref (output_class),
2107 NULL_TREE));
2108 }
2109
2110 static rewrite_rule rules[] =
2111 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2112 "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2113 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2114 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2115 "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2116 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2117 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2118 "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2119 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2120 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2121 "()Ljava/lang/ClassLoader;",
2122 "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2123 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2124
2125 {NULL, NULL, NULL, NULL, 0, NULL}};
2126
2127 /* True if this method is special, i.e. it's a private method that
2128 should be exported from a DSO. */
2129
2130 bool
2131 special_method_p (tree candidate_method)
2132 {
2133 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2134 tree method = DECL_NAME (candidate_method);
2135 rewrite_rule *p;
2136
2137 for (p = rules; p->classname; p++)
2138 {
2139 if (get_identifier (p->classname) == context
2140 && get_identifier (p->method) == method)
2141 return true;
2142 }
2143 return false;
2144 }
2145
2146 /* Scan the rules list for replacements for *METHOD_P and replace the
2147 args accordingly. If the rewrite results in an access to a private
2148 method, update SPECIAL.*/
2149
2150 void
2151 maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2152 tree *method_signature_p, tree *special)
2153 {
2154 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2155 rewrite_rule *p;
2156 *special = NULL_TREE;
2157
2158 for (p = rules; p->classname; p++)
2159 {
2160 if (get_identifier (p->classname) == context)
2161 {
2162 tree method = DECL_NAME (*method_p);
2163 if (get_identifier (p->method) == method
2164 && get_identifier (p->signature) == *method_signature_p)
2165 {
2166 tree maybe_method
2167 = lookup_java_method (DECL_CONTEXT (*method_p),
2168 method,
2169 get_identifier (p->new_signature));
2170 if (! maybe_method && ! flag_verify_invocations)
2171 {
2172 maybe_method
2173 = add_method (DECL_CONTEXT (*method_p), p->flags,
2174 method, get_identifier (p->new_signature));
2175 DECL_EXTERNAL (maybe_method) = 1;
2176 }
2177 *method_p = maybe_method;
2178 gcc_assert (*method_p);
2179 *arg_list_p = p->rewrite_arglist (*arg_list_p);
2180 *method_signature_p = get_identifier (p->new_signature);
2181 *special = integer_one_node;
2182
2183 break;
2184 }
2185 }
2186 }
2187 }
2188
2189 \f
2190
2191 tree
2192 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2193 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2194 tree arg_list ATTRIBUTE_UNUSED, tree special)
2195 {
2196 tree func;
2197 if (is_compiled_class (self_type))
2198 {
2199 /* With indirect dispatch we have to use indirect calls for all
2200 publicly visible methods or gcc will use PLT indirections
2201 to reach them. We also have to use indirect dispatch for all
2202 external methods. */
2203 if (! flag_indirect_dispatch
2204 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2205 {
2206 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2207 method);
2208 }
2209 else
2210 {
2211 tree table_index
2212 = build_int_cst (NULL_TREE,
2213 (get_symbol_table_index
2214 (method, special,
2215 &TYPE_ATABLE_METHODS (output_class))));
2216 func
2217 = build4 (ARRAY_REF,
2218 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2219 TYPE_ATABLE_DECL (output_class), table_index,
2220 NULL_TREE, NULL_TREE);
2221 }
2222 func = convert (method_ptr_type_node, func);
2223 }
2224 else
2225 {
2226 /* We don't know whether the method has been (statically) compiled.
2227 Compile this code to get a reference to the method's code:
2228
2229 SELF_TYPE->methods[METHOD_INDEX].ncode
2230
2231 */
2232
2233 int method_index = 0;
2234 tree meth, ref;
2235
2236 /* The method might actually be declared in some superclass, so
2237 we have to use its class context, not the caller's notion of
2238 where the method is. */
2239 self_type = DECL_CONTEXT (method);
2240 ref = build_class_ref (self_type);
2241 ref = build1 (INDIRECT_REF, class_type_node, ref);
2242 if (ncode_ident == NULL_TREE)
2243 ncode_ident = get_identifier ("ncode");
2244 if (methods_ident == NULL_TREE)
2245 methods_ident = get_identifier ("methods");
2246 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2247 lookup_field (&class_type_node, methods_ident),
2248 NULL_TREE);
2249 for (meth = TYPE_METHODS (self_type);
2250 ; meth = TREE_CHAIN (meth))
2251 {
2252 if (method == meth)
2253 break;
2254 if (meth == NULL_TREE)
2255 fatal_error ("method '%s' not found in class",
2256 IDENTIFIER_POINTER (DECL_NAME (method)));
2257 method_index++;
2258 }
2259 method_index *= int_size_in_bytes (method_type_node);
2260 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2261 ref, size_int (method_index));
2262 ref = build1 (INDIRECT_REF, method_type_node, ref);
2263 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2264 ref, lookup_field (&method_type_node, ncode_ident),
2265 NULL_TREE);
2266 }
2267 return func;
2268 }
2269
2270 tree
2271 invoke_build_dtable (int is_invoke_interface, tree arg_list)
2272 {
2273 tree dtable, objectref;
2274
2275 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2276
2277 /* If we're dealing with interfaces and if the objectref
2278 argument is an array then get the dispatch table of the class
2279 Object rather than the one from the objectref. */
2280 objectref = (is_invoke_interface
2281 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2282 ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2283
2284 if (dtable_ident == NULL_TREE)
2285 dtable_ident = get_identifier ("vtable");
2286 dtable = build_java_indirect_ref (object_type_node, objectref,
2287 flag_check_references);
2288 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2289 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2290
2291 return dtable;
2292 }
2293
2294 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2295 T. If this decl has not been seen before, it will be added to the
2296 [oa]table_methods. If it has, the existing table slot will be
2297 reused. */
2298
2299 int
2300 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2301 {
2302 int i = 1;
2303 tree method_list;
2304
2305 if (*symbol_table == NULL_TREE)
2306 {
2307 *symbol_table = build_tree_list (special, t);
2308 return 1;
2309 }
2310
2311 method_list = *symbol_table;
2312
2313 while (1)
2314 {
2315 tree value = TREE_VALUE (method_list);
2316 tree purpose = TREE_PURPOSE (method_list);
2317 if (value == t && purpose == special)
2318 return i;
2319 i++;
2320 if (TREE_CHAIN (method_list) == NULL_TREE)
2321 break;
2322 else
2323 method_list = TREE_CHAIN (method_list);
2324 }
2325
2326 TREE_CHAIN (method_list) = build_tree_list (special, t);
2327 return i;
2328 }
2329
2330 tree
2331 build_invokevirtual (tree dtable, tree method, tree special)
2332 {
2333 tree func;
2334 tree nativecode_ptr_ptr_type_node
2335 = build_pointer_type (nativecode_ptr_type_node);
2336 tree method_index;
2337 tree otable_index;
2338
2339 if (flag_indirect_dispatch)
2340 {
2341 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2342
2343 otable_index
2344 = build_int_cst (NULL_TREE, get_symbol_table_index
2345 (method, special,
2346 &TYPE_OTABLE_METHODS (output_class)));
2347 method_index = build4 (ARRAY_REF, integer_type_node,
2348 TYPE_OTABLE_DECL (output_class),
2349 otable_index, NULL_TREE, NULL_TREE);
2350 }
2351 else
2352 {
2353 /* We fetch the DECL_VINDEX field directly here, rather than
2354 using get_method_index(). DECL_VINDEX is the true offset
2355 from the vtable base to a method, regrdless of any extra
2356 words inserted at the start of the vtable. */
2357 method_index = DECL_VINDEX (method);
2358 method_index = size_binop (MULT_EXPR, method_index,
2359 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2360 if (TARGET_VTABLE_USES_DESCRIPTORS)
2361 method_index = size_binop (MULT_EXPR, method_index,
2362 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2363 }
2364
2365 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2366 convert (sizetype, method_index));
2367
2368 if (TARGET_VTABLE_USES_DESCRIPTORS)
2369 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2370 else
2371 {
2372 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2373 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2374 }
2375
2376 return func;
2377 }
2378
2379 static GTY(()) tree class_ident;
2380 tree
2381 build_invokeinterface (tree dtable, tree method)
2382 {
2383 tree interface;
2384 tree idx;
2385
2386 /* We expand invokeinterface here. */
2387
2388 if (class_ident == NULL_TREE)
2389 class_ident = get_identifier ("class");
2390
2391 dtable = build_java_indirect_ref (dtable_type, dtable,
2392 flag_check_references);
2393 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2394 lookup_field (&dtable_type, class_ident), NULL_TREE);
2395
2396 interface = DECL_CONTEXT (method);
2397 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2398 layout_class_methods (interface);
2399
2400 if (flag_indirect_dispatch)
2401 {
2402 int itable_index
2403 = 2 * (get_symbol_table_index
2404 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2405 interface
2406 = build4 (ARRAY_REF,
2407 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2408 TYPE_ITABLE_DECL (output_class),
2409 build_int_cst (NULL_TREE, itable_index-1),
2410 NULL_TREE, NULL_TREE);
2411 idx
2412 = build4 (ARRAY_REF,
2413 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2414 TYPE_ITABLE_DECL (output_class),
2415 build_int_cst (NULL_TREE, itable_index),
2416 NULL_TREE, NULL_TREE);
2417 interface = convert (class_ptr_type, interface);
2418 idx = convert (integer_type_node, idx);
2419 }
2420 else
2421 {
2422 idx = build_int_cst (NULL_TREE,
2423 get_interface_method_index (method, interface));
2424 interface = build_class_ref (interface);
2425 }
2426
2427 return build_call_nary (ptr_type_node,
2428 build_address_of (soft_lookupinterfacemethod_node),
2429 3, dtable, interface, idx);
2430 }
2431
2432 /* Expand one of the invoke_* opcodes.
2433 OPCODE is the specific opcode.
2434 METHOD_REF_INDEX is an index into the constant pool.
2435 NARGS is the number of arguments, or -1 if not specified. */
2436
2437 static void
2438 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2439 {
2440 tree method_signature
2441 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2442 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2443 method_ref_index);
2444 tree self_type
2445 = get_class_constant (current_jcf,
2446 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2447 method_ref_index));
2448 const char *const self_name
2449 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2450 tree call, func, method, arg_list, method_type;
2451 tree check = NULL_TREE;
2452
2453 tree special = NULL_TREE;
2454
2455 if (! CLASS_LOADED_P (self_type))
2456 {
2457 load_class (self_type, 1);
2458 safe_layout_class (self_type);
2459 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2460 fatal_error ("failed to find class '%s'", self_name);
2461 }
2462 layout_class_methods (self_type);
2463
2464 if (ID_INIT_P (method_name))
2465 method = lookup_java_constructor (self_type, method_signature);
2466 else
2467 method = lookup_java_method (self_type, method_name, method_signature);
2468
2469 /* We've found a method in a class other than the one in which it
2470 was wanted. This can happen if, for instance, we're trying to
2471 compile invokespecial super.equals().
2472 FIXME: This is a kludge. Rather than nullifying the result, we
2473 should change lookup_java_method() so that it doesn't search the
2474 superclass chain when we're BC-compiling. */
2475 if (! flag_verify_invocations
2476 && method
2477 && ! TYPE_ARRAY_P (self_type)
2478 && self_type != DECL_CONTEXT (method))
2479 method = NULL_TREE;
2480
2481 /* We've found a method in an interface, but this isn't an interface
2482 call. */
2483 if (opcode != OPCODE_invokeinterface
2484 && method
2485 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2486 method = NULL_TREE;
2487
2488 /* We've found a non-interface method but we are making an
2489 interface call. This can happen if the interface overrides a
2490 method in Object. */
2491 if (! flag_verify_invocations
2492 && opcode == OPCODE_invokeinterface
2493 && method
2494 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2495 method = NULL_TREE;
2496
2497 if (method == NULL_TREE)
2498 {
2499 if (flag_verify_invocations || ! flag_indirect_dispatch)
2500 {
2501 error ("class '%s' has no method named '%s' matching signature '%s'",
2502 self_name,
2503 IDENTIFIER_POINTER (method_name),
2504 IDENTIFIER_POINTER (method_signature));
2505 }
2506 else
2507 {
2508 int flags = ACC_PUBLIC;
2509 if (opcode == OPCODE_invokestatic)
2510 flags |= ACC_STATIC;
2511 if (opcode == OPCODE_invokeinterface)
2512 {
2513 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2514 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2515 }
2516 method = add_method (self_type, flags, method_name,
2517 method_signature);
2518 DECL_ARTIFICIAL (method) = 1;
2519 METHOD_DUMMY (method) = 1;
2520 layout_class_method (self_type, NULL,
2521 method, NULL);
2522 }
2523 }
2524
2525 /* Invoke static can't invoke static/abstract method */
2526 if (method != NULL_TREE)
2527 {
2528 if (opcode == OPCODE_invokestatic)
2529 {
2530 if (!METHOD_STATIC (method))
2531 {
2532 error ("invokestatic on non static method");
2533 method = NULL_TREE;
2534 }
2535 else if (METHOD_ABSTRACT (method))
2536 {
2537 error ("invokestatic on abstract method");
2538 method = NULL_TREE;
2539 }
2540 }
2541 else
2542 {
2543 if (METHOD_STATIC (method))
2544 {
2545 error ("invoke[non-static] on static method");
2546 method = NULL_TREE;
2547 }
2548 }
2549 }
2550
2551 if (method == NULL_TREE)
2552 {
2553 /* If we got here, we emitted an error message above. So we
2554 just pop the arguments, push a properly-typed zero, and
2555 continue. */
2556 method_type = get_type_from_signature (method_signature);
2557 pop_arguments (TYPE_ARG_TYPES (method_type));
2558 if (opcode != OPCODE_invokestatic)
2559 pop_type (self_type);
2560 method_type = promote_type (TREE_TYPE (method_type));
2561 push_value (convert (method_type, integer_zero_node));
2562 return;
2563 }
2564
2565 method_type = TREE_TYPE (method);
2566 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2567 flush_quick_stack ();
2568
2569 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2570 &special);
2571
2572 func = NULL_TREE;
2573 if (opcode == OPCODE_invokestatic)
2574 func = build_known_method_ref (method, method_type, self_type,
2575 method_signature, arg_list, special);
2576 else if (opcode == OPCODE_invokespecial
2577 || (opcode == OPCODE_invokevirtual
2578 && (METHOD_PRIVATE (method)
2579 || METHOD_FINAL (method)
2580 || CLASS_FINAL (TYPE_NAME (self_type)))))
2581 {
2582 /* If the object for the method call is null, we throw an
2583 exception. We don't do this if the object is the current
2584 method's `this'. In other cases we just rely on an
2585 optimization pass to eliminate redundant checks. FIXME:
2586 Unfortunately there doesn't seem to be a way to determine
2587 what the current method is right now.
2588 We do omit the check if we're calling <init>. */
2589 /* We use a SAVE_EXPR here to make sure we only evaluate
2590 the new `self' expression once. */
2591 tree save_arg = save_expr (TREE_VALUE (arg_list));
2592 TREE_VALUE (arg_list) = save_arg;
2593 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2594 func = build_known_method_ref (method, method_type, self_type,
2595 method_signature, arg_list, special);
2596 }
2597 else
2598 {
2599 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2600 arg_list);
2601 if (opcode == OPCODE_invokevirtual)
2602 func = build_invokevirtual (dtable, method, special);
2603 else
2604 func = build_invokeinterface (dtable, method);
2605 }
2606
2607 if (TREE_CODE (func) == ADDR_EXPR)
2608 TREE_TYPE (func) = build_pointer_type (method_type);
2609 else
2610 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2611
2612 call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2613 TREE_SIDE_EFFECTS (call) = 1;
2614 call = check_for_builtin (method, call);
2615
2616 if (check != NULL_TREE)
2617 {
2618 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2619 TREE_SIDE_EFFECTS (call) = 1;
2620 }
2621
2622 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2623 java_add_stmt (call);
2624 else
2625 {
2626 push_value (call);
2627 flush_quick_stack ();
2628 }
2629 }
2630
2631 /* Create a stub which will be put into the vtable but which will call
2632 a JNI function. */
2633
2634 tree
2635 build_jni_stub (tree method)
2636 {
2637 tree jnifunc, call, args, body, method_sig, arg_types;
2638 tree jniarg0, jniarg1, jniarg2, jniarg3;
2639 tree jni_func_type, tem;
2640 tree env_var, res_var = NULL_TREE, block;
2641 tree method_args, res_type;
2642 tree meth_var;
2643 tree bind;
2644
2645 int args_size = 0;
2646
2647 tree klass = DECL_CONTEXT (method);
2648 klass = build_class_ref (klass);
2649
2650 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2651
2652 DECL_ARTIFICIAL (method) = 1;
2653 DECL_EXTERNAL (method) = 0;
2654
2655 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2656 DECL_CONTEXT (env_var) = method;
2657
2658 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2659 {
2660 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2661 TREE_TYPE (TREE_TYPE (method)));
2662 DECL_CONTEXT (res_var) = method;
2663 TREE_CHAIN (env_var) = res_var;
2664 }
2665
2666 method_args = DECL_ARGUMENTS (method);
2667 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2668 TREE_SIDE_EFFECTS (block) = 1;
2669 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2670
2671 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2672 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2673 build_call_nary (ptr_type_node,
2674 build_address_of (soft_getjnienvnewframe_node),
2675 1, klass));
2676
2677 /* All the arguments to this method become arguments to the
2678 underlying JNI function. If we had to wrap object arguments in a
2679 special way, we would do that here. */
2680 args = NULL_TREE;
2681 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2682 {
2683 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2684 #ifdef PARM_BOUNDARY
2685 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2686 * PARM_BOUNDARY);
2687 #endif
2688 args_size += (arg_bits / BITS_PER_UNIT);
2689
2690 args = tree_cons (NULL_TREE, tem, args);
2691 }
2692 args = nreverse (args);
2693 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2694
2695 /* For a static method the second argument is the class. For a
2696 non-static method the second argument is `this'; that is already
2697 available in the argument list. */
2698 if (METHOD_STATIC (method))
2699 {
2700 args_size += int_size_in_bytes (TREE_TYPE (klass));
2701 args = tree_cons (NULL_TREE, klass, args);
2702 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2703 }
2704
2705 /* The JNIEnv structure is the first argument to the JNI function. */
2706 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2707 args = tree_cons (NULL_TREE, env_var, args);
2708 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2709
2710 /* We call _Jv_LookupJNIMethod to find the actual underlying
2711 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2712 exception if this function is not found at runtime. */
2713 method_sig = build_java_signature (TREE_TYPE (method));
2714 jniarg0 = klass;
2715 jniarg1 = build_utf8_ref (DECL_NAME (method));
2716 jniarg2 = build_utf8_ref (unmangle_classname
2717 (IDENTIFIER_POINTER (method_sig),
2718 IDENTIFIER_LENGTH (method_sig)));
2719 jniarg3 = build_int_cst (NULL_TREE, args_size);
2720
2721 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2722
2723 #ifdef MODIFY_JNI_METHOD_CALL
2724 tem = MODIFY_JNI_METHOD_CALL (tem);
2725 #endif
2726
2727 jni_func_type = build_pointer_type (tem);
2728
2729 /* Use the actual function type, rather than a generic pointer type,
2730 such that this decl keeps the actual pointer type from being
2731 garbage-collected. If it is, we end up using canonical types
2732 with different uids for equivalent function types, and this in
2733 turn causes utf8 identifiers and output order to vary. */
2734 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), jni_func_type);
2735 TREE_STATIC (meth_var) = 1;
2736 TREE_PUBLIC (meth_var) = 0;
2737 DECL_EXTERNAL (meth_var) = 0;
2738 DECL_CONTEXT (meth_var) = method;
2739 DECL_ARTIFICIAL (meth_var) = 1;
2740 DECL_INITIAL (meth_var) = null_pointer_node;
2741 TREE_USED (meth_var) = 1;
2742 chainon (env_var, meth_var);
2743 build_result_decl (method);
2744
2745 jnifunc = build3 (COND_EXPR, jni_func_type,
2746 build2 (NE_EXPR, boolean_type_node,
2747 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2748 meth_var,
2749 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2750 build1
2751 (NOP_EXPR, jni_func_type,
2752 build_call_nary (ptr_type_node,
2753 build_address_of
2754 (soft_lookupjnimethod_node),
2755 4,
2756 jniarg0, jniarg1,
2757 jniarg2, jniarg3))));
2758
2759 /* Now we make the actual JNI call via the resulting function
2760 pointer. */
2761 call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2762 jnifunc, args);
2763
2764 /* If the JNI call returned a result, capture it here. If we had to
2765 unwrap JNI object results, we would do that here. */
2766 if (res_var != NULL_TREE)
2767 {
2768 /* If the call returns an object, it may return a JNI weak
2769 reference, in which case we must unwrap it. */
2770 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2771 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2772 build_address_of (soft_unwrapjni_node),
2773 1, call);
2774 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2775 res_var, call);
2776 }
2777
2778 TREE_SIDE_EFFECTS (call) = 1;
2779
2780 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2781 TREE_SIDE_EFFECTS (body) = 1;
2782
2783 /* Now free the environment we allocated. */
2784 call = build_call_nary (ptr_type_node,
2785 build_address_of (soft_jnipopsystemframe_node),
2786 1, env_var);
2787 TREE_SIDE_EFFECTS (call) = 1;
2788 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2789 TREE_SIDE_EFFECTS (body) = 1;
2790
2791 /* Finally, do the return. */
2792 res_type = void_type_node;
2793 if (res_var != NULL_TREE)
2794 {
2795 tree drt;
2796 gcc_assert (DECL_RESULT (method));
2797 /* Make sure we copy the result variable to the actual
2798 result. We use the type of the DECL_RESULT because it
2799 might be different from the return type of the function:
2800 it might be promoted. */
2801 drt = TREE_TYPE (DECL_RESULT (method));
2802 if (drt != TREE_TYPE (res_var))
2803 res_var = build1 (CONVERT_EXPR, drt, res_var);
2804 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2805 TREE_SIDE_EFFECTS (res_var) = 1;
2806 }
2807
2808 body = build2 (COMPOUND_EXPR, void_type_node, body,
2809 build1 (RETURN_EXPR, void_type_node, res_var));
2810 TREE_SIDE_EFFECTS (body) = 1;
2811
2812 /* Prepend class initialization for static methods reachable from
2813 other classes. */
2814 if (METHOD_STATIC (method)
2815 && (! METHOD_PRIVATE (method)
2816 || INNER_CLASS_P (DECL_CONTEXT (method))))
2817 {
2818 tree init = build_call_expr (soft_initclass_node, 1,
2819 klass);
2820 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2821 TREE_SIDE_EFFECTS (body) = 1;
2822 }
2823
2824 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2825 body, block);
2826 return bind;
2827 }
2828
2829
2830 /* Given lvalue EXP, return a volatile expression that references the
2831 same object. */
2832
2833 tree
2834 java_modify_addr_for_volatile (tree exp)
2835 {
2836 tree exp_type = TREE_TYPE (exp);
2837 tree v_type
2838 = build_qualified_type (exp_type,
2839 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2840 tree addr = build_fold_addr_expr (exp);
2841 v_type = build_pointer_type (v_type);
2842 addr = fold_convert (v_type, addr);
2843 exp = build_fold_indirect_ref (addr);
2844 return exp;
2845 }
2846
2847
2848 /* Expand an operation to extract from or store into a field.
2849 IS_STATIC is 1 iff the field is static.
2850 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2851 FIELD_REF_INDEX is an index into the constant pool. */
2852
2853 static void
2854 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2855 {
2856 tree self_type
2857 = get_class_constant (current_jcf,
2858 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2859 field_ref_index));
2860 const char *self_name
2861 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2862 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2863 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2864 field_ref_index);
2865 tree field_type = get_type_from_signature (field_signature);
2866 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2867 tree field_ref;
2868 int is_error = 0;
2869 tree original_self_type = self_type;
2870 tree field_decl;
2871 tree modify_expr;
2872
2873 if (! CLASS_LOADED_P (self_type))
2874 load_class (self_type, 1);
2875 field_decl = lookup_field (&self_type, field_name);
2876 if (field_decl == error_mark_node)
2877 {
2878 is_error = 1;
2879 }
2880 else if (field_decl == NULL_TREE)
2881 {
2882 if (! flag_verify_invocations)
2883 {
2884 int flags = ACC_PUBLIC;
2885 if (is_static)
2886 flags |= ACC_STATIC;
2887 self_type = original_self_type;
2888 field_decl = add_field (original_self_type, field_name,
2889 field_type, flags);
2890 DECL_ARTIFICIAL (field_decl) = 1;
2891 DECL_IGNORED_P (field_decl) = 1;
2892 #if 0
2893 /* FIXME: We should be pessimistic about volatility. We
2894 don't know one way or another, but this is safe.
2895 However, doing this has bad effects on code quality. We
2896 need to look at better ways to do this. */
2897 TREE_THIS_VOLATILE (field_decl) = 1;
2898 #endif
2899 }
2900 else
2901 {
2902 error ("missing field '%s' in '%s'",
2903 IDENTIFIER_POINTER (field_name), self_name);
2904 is_error = 1;
2905 }
2906 }
2907 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2908 {
2909 error ("mismatching signature for field '%s' in '%s'",
2910 IDENTIFIER_POINTER (field_name), self_name);
2911 is_error = 1;
2912 }
2913 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2914 if (is_error)
2915 {
2916 if (! is_putting)
2917 push_value (convert (field_type, integer_zero_node));
2918 flush_quick_stack ();
2919 return;
2920 }
2921
2922 field_ref = build_field_ref (field_ref, self_type, field_name);
2923 if (is_static
2924 && ! flag_indirect_dispatch)
2925 {
2926 tree context = DECL_CONTEXT (field_ref);
2927 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2928 field_ref = build_class_init (context, field_ref);
2929 else
2930 field_ref = build_class_init (self_type, field_ref);
2931 }
2932 if (is_putting)
2933 {
2934 flush_quick_stack ();
2935 if (FIELD_FINAL (field_decl))
2936 {
2937 if (DECL_CONTEXT (field_decl) != current_class)
2938 error ("assignment to final field %q+D not in field's class",
2939 field_decl);
2940 /* We used to check for assignments to final fields not
2941 occurring in the class initializer or in a constructor
2942 here. However, this constraint doesn't seem to be
2943 enforced by the JVM. */
2944 }
2945
2946 if (TREE_THIS_VOLATILE (field_decl))
2947 field_ref = java_modify_addr_for_volatile (field_ref);
2948
2949 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2950 field_ref, new_value);
2951
2952 if (TREE_THIS_VOLATILE (field_decl))
2953 java_add_stmt
2954 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2955
2956 java_add_stmt (modify_expr);
2957 }
2958 else
2959 {
2960 tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2961 java_add_local_var (temp);
2962
2963 if (TREE_THIS_VOLATILE (field_decl))
2964 field_ref = java_modify_addr_for_volatile (field_ref);
2965
2966 modify_expr
2967 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2968 java_add_stmt (modify_expr);
2969
2970 if (TREE_THIS_VOLATILE (field_decl))
2971 java_add_stmt
2972 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2973
2974 push_value (temp);
2975 }
2976 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2977 }
2978
2979 static void
2980 load_type_state (int pc)
2981 {
2982 int i;
2983 tree vec = VEC_index (tree, type_states, pc);
2984 int cur_length = TREE_VEC_LENGTH (vec);
2985 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2986 for (i = 0; i < cur_length; i++)
2987 type_map [i] = TREE_VEC_ELT (vec, i);
2988 }
2989
2990 /* Go over METHOD's bytecode and note instruction starts in
2991 instruction_bits[]. */
2992
2993 void
2994 note_instructions (JCF *jcf, tree method)
2995 {
2996 int PC;
2997 unsigned char* byte_ops;
2998 long length = DECL_CODE_LENGTH (method);
2999
3000 int saw_index;
3001 jint INT_temp;
3002
3003 #undef RET /* Defined by config/i386/i386.h */
3004 #undef PTR
3005 #define BCODE byte_ops
3006 #define BYTE_type_node byte_type_node
3007 #define SHORT_type_node short_type_node
3008 #define INT_type_node int_type_node
3009 #define LONG_type_node long_type_node
3010 #define CHAR_type_node char_type_node
3011 #define PTR_type_node ptr_type_node
3012 #define FLOAT_type_node float_type_node
3013 #define DOUBLE_type_node double_type_node
3014 #define VOID_type_node void_type_node
3015 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3016 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3017 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
3018 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
3019
3020 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
3021
3022 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3023 byte_ops = jcf->read_ptr;
3024 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
3025 memset (instruction_bits, 0, length + 1);
3026 type_states = VEC_alloc (tree, gc, length + 1);
3027 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
3028
3029 /* This pass figures out which PC can be the targets of jumps. */
3030 for (PC = 0; PC < length;)
3031 {
3032 int oldpc = PC; /* PC at instruction start. */
3033 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3034 switch (byte_ops[PC++])
3035 {
3036 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3037 case OPCODE: \
3038 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3039 break;
3040
3041 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3042
3043 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3044 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3045 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3046 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3047 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3048 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3049 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3050 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3051
3052 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3053 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3054 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3055 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3056 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3057 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3058 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3059 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3060
3061 /* two forms of wide instructions */
3062 #define PRE_SPECIAL_WIDE(IGNORE) \
3063 { \
3064 int modified_opcode = IMMEDIATE_u1; \
3065 if (modified_opcode == OPCODE_iinc) \
3066 { \
3067 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3068 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3069 } \
3070 else \
3071 { \
3072 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3073 } \
3074 }
3075
3076 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3077
3078 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3079
3080 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3081 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3082 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3083 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3084 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3085 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3086 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3087 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3088 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3089 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3090
3091 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3092 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3093 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3094 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3095 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3096 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3097 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3098 NOTE_LABEL (PC); \
3099 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3100
3101 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3102
3103 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3104 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3105
3106 #define PRE_LOOKUP_SWITCH \
3107 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3108 NOTE_LABEL (default_offset+oldpc); \
3109 if (npairs >= 0) \
3110 while (--npairs >= 0) { \
3111 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3112 jint offset = IMMEDIATE_s4; \
3113 NOTE_LABEL (offset+oldpc); } \
3114 }
3115
3116 #define PRE_TABLE_SWITCH \
3117 { jint default_offset = IMMEDIATE_s4; \
3118 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3119 NOTE_LABEL (default_offset+oldpc); \
3120 if (low <= high) \
3121 while (low++ <= high) { \
3122 jint offset = IMMEDIATE_s4; \
3123 NOTE_LABEL (offset+oldpc); } \
3124 }
3125
3126 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3127 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3128 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3129 (void)(IMMEDIATE_u2); \
3130 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3131
3132 #include "javaop.def"
3133 #undef JAVAOP
3134 }
3135 } /* for */
3136 }
3137
3138 void
3139 expand_byte_code (JCF *jcf, tree method)
3140 {
3141 int PC;
3142 int i;
3143 const unsigned char *linenumber_pointer;
3144 int dead_code_index = -1;
3145 unsigned char* byte_ops;
3146 long length = DECL_CODE_LENGTH (method);
3147 location_t max_location = input_location;
3148
3149 stack_pointer = 0;
3150 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3151 byte_ops = jcf->read_ptr;
3152
3153 /* We make an initial pass of the line number table, to note
3154 which instructions have associated line number entries. */
3155 linenumber_pointer = linenumber_table;
3156 for (i = 0; i < linenumber_count; i++)
3157 {
3158 int pc = GET_u2 (linenumber_pointer);
3159 linenumber_pointer += 4;
3160 if (pc >= length)
3161 warning (0, "invalid PC in line number table");
3162 else
3163 {
3164 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3165 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3166 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3167 }
3168 }
3169
3170 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3171 return;
3172
3173 promote_arguments ();
3174 cache_this_class_ref (method);
3175 cache_cpool_data_ref ();
3176
3177 /* Translate bytecodes. */
3178 linenumber_pointer = linenumber_table;
3179 for (PC = 0; PC < length;)
3180 {
3181 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3182 {
3183 tree label = lookup_label (PC);
3184 flush_quick_stack ();
3185 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3186 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3187 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3188 load_type_state (PC);
3189 }
3190
3191 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3192 {
3193 if (dead_code_index == -1)
3194 {
3195 /* This is the start of a region of unreachable bytecodes.
3196 They still need to be processed in order for EH ranges
3197 to get handled correctly. However, we can simply
3198 replace these bytecodes with nops. */
3199 dead_code_index = PC;
3200 }
3201
3202 /* Turn this bytecode into a nop. */
3203 byte_ops[PC] = 0x0;
3204 }
3205 else
3206 {
3207 if (dead_code_index != -1)
3208 {
3209 /* We've just reached the end of a region of dead code. */
3210 if (extra_warnings)
3211 warning (0, "unreachable bytecode from %d to before %d",
3212 dead_code_index, PC);
3213 dead_code_index = -1;
3214 }
3215 }
3216
3217 /* Handle possible line number entry for this PC.
3218
3219 This code handles out-of-order and multiple linenumbers per PC,
3220 but is optimized for the case of line numbers increasing
3221 monotonically with PC. */
3222 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3223 {
3224 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3225 || GET_u2 (linenumber_pointer) != PC)
3226 linenumber_pointer = linenumber_table;
3227 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3228 {
3229 int pc = GET_u2 (linenumber_pointer);
3230 linenumber_pointer += 4;
3231 if (pc == PC)
3232 {
3233 int line = GET_u2 (linenumber_pointer - 2);
3234 input_location = linemap_line_start (line_table, line, 1);
3235 if (input_location > max_location)
3236 max_location = input_location;
3237 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3238 break;
3239 }
3240 }
3241 }
3242 maybe_pushlevels (PC);
3243 PC = process_jvm_instruction (PC, byte_ops, length);
3244 maybe_poplevels (PC);
3245 } /* for */
3246
3247 uncache_this_class_ref (method);
3248
3249 if (dead_code_index != -1)
3250 {
3251 /* We've just reached the end of a region of dead code. */
3252 if (extra_warnings)
3253 warning (0, "unreachable bytecode from %d to the end of the method",
3254 dead_code_index);
3255 }
3256
3257 DECL_FUNCTION_LAST_LINE (method) = max_location;
3258 }
3259
3260 static void
3261 java_push_constant_from_pool (JCF *jcf, int index)
3262 {
3263 tree c;
3264 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3265 {
3266 tree name;
3267 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3268 index = alloc_name_constant (CONSTANT_String, name);
3269 c = build_ref_from_constant_pool (index);
3270 c = convert (promote_type (string_type_node), c);
3271 }
3272 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3273 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3274 {
3275 tree record = get_class_constant (jcf, index);
3276 c = build_class_ref (record);
3277 }
3278 else
3279 c = get_constant (jcf, index);
3280 push_value (c);
3281 }
3282
3283 int
3284 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3285 long length ATTRIBUTE_UNUSED)
3286 {
3287 const char *opname; /* Temporary ??? */
3288 int oldpc = PC; /* PC at instruction start. */
3289
3290 /* If the instruction is at the beginning of an exception handler,
3291 replace the top of the stack with the thrown object reference. */
3292 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3293 {
3294 /* Note that the verifier will not emit a type map at all for
3295 dead exception handlers. In this case we just ignore the
3296 situation. */
3297 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3298 {
3299 tree type = pop_type (promote_type (throwable_type_node));
3300 push_value (build_exception_object_ref (type));
3301 }
3302 }
3303
3304 switch (byte_ops[PC++])
3305 {
3306 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3307 case OPCODE: \
3308 opname = #OPNAME; \
3309 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3310 break;
3311
3312 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3313 { \
3314 int saw_index = 0; \
3315 int index = OPERAND_VALUE; \
3316 build_java_ret \
3317 (find_local_variable (index, return_address_type_node, oldpc)); \
3318 }
3319
3320 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3321 { \
3322 /* OPERAND_VALUE may have side-effects on PC */ \
3323 int opvalue = OPERAND_VALUE; \
3324 build_java_jsr (oldpc + opvalue, PC); \
3325 }
3326
3327 /* Push a constant onto the stack. */
3328 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3329 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3330 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3331 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3332
3333 /* internal macro added for use by the WIDE case */
3334 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3335 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3336
3337 /* Push local variable onto the opcode stack. */
3338 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3339 { \
3340 /* have to do this since OPERAND_VALUE may have side-effects */ \
3341 int opvalue = OPERAND_VALUE; \
3342 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3343 }
3344
3345 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3346 expand_java_return (OPERAND_TYPE##_type_node)
3347
3348 #define REM_EXPR TRUNC_MOD_EXPR
3349 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3350 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3351
3352 #define FIELD(IS_STATIC, IS_PUT) \
3353 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3354
3355 #define TEST(OPERAND_TYPE, CONDITION) \
3356 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3357
3358 #define COND(OPERAND_TYPE, CONDITION) \
3359 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3360
3361 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3362 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3363
3364 #define BRANCH_GOTO(OPERAND_VALUE) \
3365 expand_java_goto (oldpc + OPERAND_VALUE)
3366
3367 #define BRANCH_CALL(OPERAND_VALUE) \
3368 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3369
3370 #if 0
3371 #define BRANCH_RETURN(OPERAND_VALUE) \
3372 { \
3373 tree type = OPERAND_TYPE##_type_node; \
3374 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3375 expand_java_ret (value); \
3376 }
3377 #endif
3378
3379 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3380 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3381 fprintf (stderr, "(not implemented)\n")
3382 #define NOT_IMPL1(OPERAND_VALUE) \
3383 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3384 fprintf (stderr, "(not implemented)\n")
3385
3386 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3387
3388 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3389
3390 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3391
3392 #define STACK_SWAP(COUNT) java_stack_swap()
3393
3394 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3395 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3396 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3397
3398 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3399 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3400
3401 #define LOOKUP_SWITCH \
3402 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3403 tree selector = pop_value (INT_type_node); \
3404 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3405 while (--npairs >= 0) \
3406 { \
3407 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3408 expand_java_add_case (switch_expr, match, oldpc + offset); \
3409 } \
3410 }
3411
3412 #define TABLE_SWITCH \
3413 { jint default_offset = IMMEDIATE_s4; \
3414 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3415 tree selector = pop_value (INT_type_node); \
3416 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3417 for (; low <= high; low++) \
3418 { \
3419 jint offset = IMMEDIATE_s4; \
3420 expand_java_add_case (switch_expr, low, oldpc + offset); \
3421 } \
3422 }
3423
3424 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3425 { int opcode = byte_ops[PC-1]; \
3426 int method_ref_index = IMMEDIATE_u2; \
3427 int nargs; \
3428 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3429 else nargs = -1; \
3430 expand_invoke (opcode, method_ref_index, nargs); \
3431 }
3432
3433 /* Handle new, checkcast, instanceof */
3434 #define OBJECT(TYPE, OP) \
3435 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3436
3437 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3438
3439 #define ARRAY_LOAD(OPERAND_TYPE) \
3440 { \
3441 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3442 }
3443
3444 #define ARRAY_STORE(OPERAND_TYPE) \
3445 { \
3446 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3447 }
3448
3449 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3450 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3451 #define ARRAY_NEW_PTR() \
3452 push_value (build_anewarray (get_class_constant (current_jcf, \
3453 IMMEDIATE_u2), \
3454 pop_value (int_type_node)));
3455 #define ARRAY_NEW_NUM() \
3456 { \
3457 int atype = IMMEDIATE_u1; \
3458 push_value (build_newarray (atype, pop_value (int_type_node)));\
3459 }
3460 #define ARRAY_NEW_MULTI() \
3461 { \
3462 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3463 int ndims = IMMEDIATE_u1; \
3464 expand_java_multianewarray( klass, ndims ); \
3465 }
3466
3467 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3468 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3469 pop_value (OPERAND_TYPE##_type_node)));
3470
3471 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3472 { \
3473 push_value (build1 (NOP_EXPR, int_type_node, \
3474 (convert (TO_TYPE##_type_node, \
3475 pop_value (FROM_TYPE##_type_node))))); \
3476 }
3477
3478 #define CONVERT(FROM_TYPE, TO_TYPE) \
3479 { \
3480 push_value (convert (TO_TYPE##_type_node, \
3481 pop_value (FROM_TYPE##_type_node))); \
3482 }
3483
3484 /* internal macro added for use by the WIDE case
3485 Added TREE_TYPE (decl) assignment, apbianco */
3486 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3487 { \
3488 tree decl, value; \
3489 int index = OPVALUE; \
3490 tree type = OPTYPE; \
3491 value = pop_value (type); \
3492 type = TREE_TYPE (value); \
3493 decl = find_local_variable (index, type, oldpc); \
3494 set_local_type (index, type); \
3495 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3496 }
3497
3498 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3499 { \
3500 /* have to do this since OPERAND_VALUE may have side-effects */ \
3501 int opvalue = OPERAND_VALUE; \
3502 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3503 }
3504
3505 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3506 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3507
3508 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3509 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3510
3511 #define MONITOR_OPERATION(call) \
3512 { \
3513 tree o = pop_value (ptr_type_node); \
3514 tree c; \
3515 flush_quick_stack (); \
3516 c = build_java_monitor (call, o); \
3517 TREE_SIDE_EFFECTS (c) = 1; \
3518 java_add_stmt (c); \
3519 }
3520
3521 #define SPECIAL_IINC(IGNORED) \
3522 { \
3523 unsigned int local_var_index = IMMEDIATE_u1; \
3524 int ival = IMMEDIATE_s1; \
3525 expand_iinc(local_var_index, ival, oldpc); \
3526 }
3527
3528 #define SPECIAL_WIDE(IGNORED) \
3529 { \
3530 int modified_opcode = IMMEDIATE_u1; \
3531 unsigned int local_var_index = IMMEDIATE_u2; \
3532 switch (modified_opcode) \
3533 { \
3534 case OPCODE_iinc: \
3535 { \
3536 int ival = IMMEDIATE_s2; \
3537 expand_iinc (local_var_index, ival, oldpc); \
3538 break; \
3539 } \
3540 case OPCODE_iload: \
3541 case OPCODE_lload: \
3542 case OPCODE_fload: \
3543 case OPCODE_dload: \
3544 case OPCODE_aload: \
3545 { \
3546 /* duplicate code from LOAD macro */ \
3547 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3548 break; \
3549 } \
3550 case OPCODE_istore: \
3551 case OPCODE_lstore: \
3552 case OPCODE_fstore: \
3553 case OPCODE_dstore: \
3554 case OPCODE_astore: \
3555 { \
3556 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3557 break; \
3558 } \
3559 default: \
3560 error ("unrecogized wide sub-instruction"); \
3561 } \
3562 }
3563
3564 #define SPECIAL_THROW(IGNORED) \
3565 build_java_athrow (pop_value (throwable_type_node))
3566
3567 #define SPECIAL_BREAK NOT_IMPL1
3568 #define IMPL NOT_IMPL
3569
3570 #include "javaop.def"
3571 #undef JAVAOP
3572 default:
3573 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3574 }
3575 return PC;
3576 }
3577
3578 /* Return the opcode at PC in the code section pointed to by
3579 CODE_OFFSET. */
3580
3581 static unsigned char
3582 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3583 {
3584 unsigned char opcode;
3585 long absolute_offset = (long)JCF_TELL (jcf);
3586
3587 JCF_SEEK (jcf, code_offset);
3588 opcode = jcf->read_ptr [pc];
3589 JCF_SEEK (jcf, absolute_offset);
3590 return opcode;
3591 }
3592
3593 /* Some bytecode compilers are emitting accurate LocalVariableTable
3594 attributes. Here's an example:
3595
3596 PC <t>store_<n>
3597 PC+1 ...
3598
3599 Attribute "LocalVariableTable"
3600 slot #<n>: ... (PC: PC+1 length: L)
3601
3602 This is accurate because the local in slot <n> really exists after
3603 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3604
3605 This procedure recognizes this situation and extends the live range
3606 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3607 length of the store instruction.)
3608
3609 This function is used by `give_name_to_locals' so that a local's
3610 DECL features a DECL_LOCAL_START_PC such that the first related
3611 store operation will use DECL as a destination, not an unrelated
3612 temporary created for the occasion.
3613
3614 This function uses a global (instruction_bits) `note_instructions' should
3615 have allocated and filled properly. */
3616
3617 int
3618 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3619 int start_pc, int slot)
3620 {
3621 int first, index, opcode;
3622 int pc, insn_pc;
3623 int wide_found = 0;
3624
3625 if (!start_pc)
3626 return start_pc;
3627
3628 first = index = -1;
3629
3630 /* Find last previous instruction and remember it */
3631 for (pc = start_pc-1; pc; pc--)
3632 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3633 break;
3634 insn_pc = pc;
3635
3636 /* Retrieve the instruction, handle `wide'. */
3637 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3638 if (opcode == OPCODE_wide)
3639 {
3640 wide_found = 1;
3641 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3642 }
3643
3644 switch (opcode)
3645 {
3646 case OPCODE_astore_0:
3647 case OPCODE_astore_1:
3648 case OPCODE_astore_2:
3649 case OPCODE_astore_3:
3650 first = OPCODE_astore_0;
3651 break;
3652
3653 case OPCODE_istore_0:
3654 case OPCODE_istore_1:
3655 case OPCODE_istore_2:
3656 case OPCODE_istore_3:
3657 first = OPCODE_istore_0;
3658 break;
3659
3660 case OPCODE_lstore_0:
3661 case OPCODE_lstore_1:
3662 case OPCODE_lstore_2:
3663 case OPCODE_lstore_3:
3664 first = OPCODE_lstore_0;
3665 break;
3666
3667 case OPCODE_fstore_0:
3668 case OPCODE_fstore_1:
3669 case OPCODE_fstore_2:
3670 case OPCODE_fstore_3:
3671 first = OPCODE_fstore_0;
3672 break;
3673
3674 case OPCODE_dstore_0:
3675 case OPCODE_dstore_1:
3676 case OPCODE_dstore_2:
3677 case OPCODE_dstore_3:
3678 first = OPCODE_dstore_0;
3679 break;
3680
3681 case OPCODE_astore:
3682 case OPCODE_istore:
3683 case OPCODE_lstore:
3684 case OPCODE_fstore:
3685 case OPCODE_dstore:
3686 index = peek_opcode_at_pc (jcf, code_offset, pc);
3687 if (wide_found)
3688 {
3689 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3690 index = (other << 8) + index;
3691 }
3692 break;
3693 }
3694
3695 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3696 means we have a <t>store. */
3697 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3698 start_pc = insn_pc;
3699
3700 return start_pc;
3701 }
3702
3703 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3704 order, as specified by Java Language Specification.
3705
3706 The problem is that while expand_expr will evaluate its sub-operands in
3707 left-to-right order, for variables it will just return an rtx (i.e.
3708 an lvalue) for the variable (rather than an rvalue). So it is possible
3709 that a later sub-operand will change the register, and when the
3710 actual operation is done, it will use the new value, when it should
3711 have used the original value.
3712
3713 We fix this by using save_expr. This forces the sub-operand to be
3714 copied into a fresh virtual register,
3715
3716 For method invocation, we modify the arguments so that a
3717 left-to-right order evaluation is performed. Saved expressions
3718 will, in CALL_EXPR order, be reused when the call will be expanded.
3719
3720 We also promote outgoing args if needed. */
3721
3722 tree
3723 force_evaluation_order (tree node)
3724 {
3725 if (flag_syntax_only)
3726 return node;
3727 if (TREE_CODE (node) == CALL_EXPR
3728 || (TREE_CODE (node) == COMPOUND_EXPR
3729 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3730 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3731 {
3732 tree call, cmp;
3733 int i, nargs;
3734
3735 /* Account for wrapped around ctors. */
3736 if (TREE_CODE (node) == COMPOUND_EXPR)
3737 call = TREE_OPERAND (node, 0);
3738 else
3739 call = node;
3740
3741 nargs = call_expr_nargs (call);
3742
3743 /* This reverses the evaluation order. This is a desired effect. */
3744 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3745 {
3746 tree arg = CALL_EXPR_ARG (call, i);
3747 /* Promote types smaller than integer. This is required by
3748 some ABIs. */
3749 tree type = TREE_TYPE (arg);
3750 tree saved;
3751 if (targetm.calls.promote_prototypes (type)
3752 && INTEGRAL_TYPE_P (type)
3753 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3754 TYPE_SIZE (integer_type_node)))
3755 arg = fold_convert (integer_type_node, arg);
3756
3757 saved = save_expr (force_evaluation_order (arg));
3758 cmp = (cmp == NULL_TREE ? saved :
3759 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3760
3761 CALL_EXPR_ARG (call, i) = saved;
3762 }
3763
3764 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3765 TREE_SIDE_EFFECTS (cmp) = 1;
3766
3767 if (cmp)
3768 {
3769 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3770 if (TREE_TYPE (cmp) != void_type_node)
3771 cmp = save_expr (cmp);
3772 TREE_SIDE_EFFECTS (cmp) = 1;
3773 node = cmp;
3774 }
3775 }
3776 return node;
3777 }
3778
3779 /* Build a node to represent empty statements and blocks. */
3780
3781 tree
3782 build_java_empty_stmt (void)
3783 {
3784 tree t = build_empty_stmt ();
3785 return t;
3786 }
3787
3788 /* Promote all args of integral type before generating any code. */
3789
3790 static void
3791 promote_arguments (void)
3792 {
3793 int i;
3794 tree arg;
3795 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3796 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3797 {
3798 tree arg_type = TREE_TYPE (arg);
3799 if (INTEGRAL_TYPE_P (arg_type)
3800 && TYPE_PRECISION (arg_type) < 32)
3801 {
3802 tree copy = find_local_variable (i, integer_type_node, -1);
3803 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3804 copy,
3805 fold_convert (integer_type_node, arg)));
3806 }
3807 if (TYPE_IS_WIDE (arg_type))
3808 i++;
3809 }
3810 }
3811
3812 /* Create a local variable that points to the constant pool. */
3813
3814 static void
3815 cache_cpool_data_ref (void)
3816 {
3817 if (optimize)
3818 {
3819 tree cpool;
3820 tree d = build_constant_data_ref (flag_indirect_classes);
3821 tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE,
3822 build_pointer_type (TREE_TYPE (d)));
3823 java_add_local_var (cpool_ptr);
3824 TREE_CONSTANT (cpool_ptr) = 1;
3825
3826 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3827 cpool_ptr, build_address_of (d)));
3828 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3829 TREE_THIS_NOTRAP (cpool) = 1;
3830 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3831 }
3832 }
3833
3834 #include "gt-java-expr.h"