re PR java/33639 (gcj generates assembler errors)
[gcc.git] / gcc / java / expr.c
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>.
20
21 Java and all Java-based marks are trademarks or registered trademarks
22 of Sun Microsystems, Inc. in the United States and other countries.
23 The Free Software Foundation is independent of Sun Microsystems, Inc. */
24
25 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "real.h"
33 #include "rtl.h"
34 #include "flags.h"
35 #include "expr.h"
36 #include "java-tree.h"
37 #include "javaop.h"
38 #include "java-opcodes.h"
39 #include "jcf.h"
40 #include "java-except.h"
41 #include "parse.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "ggc.h"
45 #include "tree-gimple.h"
46 #include "target.h"
47
48 static void flush_quick_stack (void);
49 static void push_value (tree);
50 static tree pop_value (tree);
51 static void java_stack_swap (void);
52 static void java_stack_dup (int, int);
53 static void build_java_athrow (tree);
54 static void build_java_jsr (int, int);
55 static void build_java_ret (tree);
56 static void expand_java_multianewarray (tree, int);
57 static void expand_java_arraystore (tree);
58 static void expand_java_arrayload (tree);
59 static void expand_java_array_length (void);
60 static tree build_java_monitor (tree, tree);
61 static void expand_java_pushc (int, tree);
62 static void expand_java_return (tree);
63 static void expand_load_internal (int, tree, int);
64 static void expand_java_NEW (tree);
65 static void expand_java_INSTANCEOF (tree);
66 static void expand_java_CHECKCAST (tree);
67 static void expand_iinc (unsigned int, int, int);
68 static void expand_java_binop (tree, enum tree_code);
69 static void note_label (int, int);
70 static void expand_compare (enum tree_code, tree, tree, int);
71 static void expand_test (enum tree_code, tree, int);
72 static void expand_cond (enum tree_code, tree, int);
73 static void expand_java_goto (int);
74 static tree expand_java_switch (tree, int);
75 static void expand_java_add_case (tree, int, int);
76 static tree pop_arguments (tree);
77 static void expand_invoke (int, int, int);
78 static void expand_java_field_op (int, int, int);
79 static void java_push_constant_from_pool (struct JCF *, int);
80 static void java_stack_pop (int);
81 static tree build_java_throw_out_of_bounds_exception (tree);
82 static tree build_java_check_indexed_type (tree, tree);
83 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
84 static void promote_arguments (void);
85 static void cache_cpool_data_ref (void);
86
87 static GTY(()) tree operand_type[59];
88
89 static GTY(()) tree methods_ident;
90 static GTY(()) tree ncode_ident;
91 tree dtable_ident = NULL_TREE;
92
93 /* Set to nonzero value in order to emit class initialization code
94 before static field references. */
95 int always_initialize_class_p = 0;
96
97 /* We store the stack state in two places:
98 Within a basic block, we use the quick_stack, which is a
99 pushdown list (TREE_LISTs) of expression nodes.
100 This is the top part of the stack; below that we use find_stack_slot.
101 At the end of a basic block, the quick_stack must be flushed
102 to the stack slot array (as handled by find_stack_slot).
103 Using quick_stack generates better code (especially when
104 compiled without optimization), because we do not have to
105 explicitly store and load trees to temporary variables.
106
107 If a variable is on the quick stack, it means the value of variable
108 when the quick stack was last flushed. Conceptually, flush_quick_stack
109 saves all the quick_stack elements in parallel. However, that is
110 complicated, so it actually saves them (i.e. copies each stack value
111 to is home virtual register) from low indexes. This allows a quick_stack
112 element at index i (counting from the bottom of stack the) to references
113 slot virtuals for register that are >= i, but not those that are deeper.
114 This convention makes most operations easier. For example iadd works
115 even when the stack contains (reg[0], reg[1]): It results in the
116 stack containing (reg[0]+reg[1]), which is OK. However, some stack
117 operations are more complicated. For example dup given a stack
118 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
119 the convention, since stack value 1 would refer to a register with
120 lower index (reg[0]), which flush_quick_stack does not safely handle.
121 So dup cannot just add an extra element to the quick_stack, but iadd can.
122 */
123
124 static GTY(()) tree quick_stack;
125
126 /* A free-list of unused permanent TREE_LIST nodes. */
127 static GTY((deletable)) tree tree_list_free_list;
128
129 /* The physical memory page size used in this computer. See
130 build_field_ref(). */
131 static GTY(()) tree page_size;
132
133 /* The stack pointer of the Java virtual machine.
134 This does include the size of the quick_stack. */
135
136 int stack_pointer;
137
138 const unsigned char *linenumber_table;
139 int linenumber_count;
140
141 /* Largest pc so far in this method that has been passed to lookup_label. */
142 int highest_label_pc_this_method = -1;
143
144 /* Base value for this method to add to pc to get generated label. */
145 int start_label_pc_this_method = 0;
146
147 void
148 init_expr_processing (void)
149 {
150 operand_type[21] = operand_type[54] = int_type_node;
151 operand_type[22] = operand_type[55] = long_type_node;
152 operand_type[23] = operand_type[56] = float_type_node;
153 operand_type[24] = operand_type[57] = double_type_node;
154 operand_type[25] = operand_type[58] = ptr_type_node;
155 }
156
157 tree
158 java_truthvalue_conversion (tree expr)
159 {
160 /* It is simpler and generates better code to have only TRUTH_*_EXPR
161 or comparison expressions as truth values at this level.
162
163 This function should normally be identity for Java. */
164
165 switch (TREE_CODE (expr))
166 {
167 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
168 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
169 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
170 case ORDERED_EXPR: case UNORDERED_EXPR:
171 case TRUTH_ANDIF_EXPR:
172 case TRUTH_ORIF_EXPR:
173 case TRUTH_AND_EXPR:
174 case TRUTH_OR_EXPR:
175 case TRUTH_XOR_EXPR:
176 case TRUTH_NOT_EXPR:
177 case ERROR_MARK:
178 return expr;
179
180 case INTEGER_CST:
181 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
182
183 case REAL_CST:
184 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
185
186 /* are these legal? XXX JH */
187 case NEGATE_EXPR:
188 case ABS_EXPR:
189 case FLOAT_EXPR:
190 /* These don't change whether an object is nonzero or zero. */
191 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
192
193 case COND_EXPR:
194 /* Distribute the conversion into the arms of a COND_EXPR. */
195 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
196 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
197 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
198
199 case NOP_EXPR:
200 /* If this is widening the argument, we can ignore it. */
201 if (TYPE_PRECISION (TREE_TYPE (expr))
202 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
203 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
204 /* fall through to default */
205
206 default:
207 return fold_build2 (NE_EXPR, boolean_type_node,
208 expr, boolean_false_node);
209 }
210 }
211
212 /* Save any stack slots that happen to be in the quick_stack into their
213 home virtual register slots.
214
215 The copy order is from low stack index to high, to support the invariant
216 that the expression for a slot may contain decls for stack slots with
217 higher (or the same) index, but not lower. */
218
219 static void
220 flush_quick_stack (void)
221 {
222 int stack_index = stack_pointer;
223 tree prev, cur, next;
224
225 /* First reverse the quick_stack, and count the number of slots it has. */
226 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
227 {
228 next = TREE_CHAIN (cur);
229 TREE_CHAIN (cur) = prev;
230 prev = cur;
231 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
232 }
233 quick_stack = prev;
234
235 while (quick_stack != NULL_TREE)
236 {
237 tree decl;
238 tree node = quick_stack, type;
239 quick_stack = TREE_CHAIN (node);
240 TREE_CHAIN (node) = tree_list_free_list;
241 tree_list_free_list = node;
242 node = TREE_VALUE (node);
243 type = TREE_TYPE (node);
244
245 decl = find_stack_slot (stack_index, type);
246 if (decl != node)
247 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (node), decl, node));
248 stack_index += 1 + TYPE_IS_WIDE (type);
249 }
250 }
251
252 /* Push TYPE on the type stack.
253 Return true on success, 0 on overflow. */
254
255 int
256 push_type_0 (tree type)
257 {
258 int n_words;
259 type = promote_type (type);
260 n_words = 1 + TYPE_IS_WIDE (type);
261 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
262 return 0;
263 /* Allocate decl for this variable now, so we get a temporary that
264 survives the whole method. */
265 find_stack_slot (stack_pointer, type);
266 stack_type_map[stack_pointer++] = type;
267 n_words--;
268 while (--n_words >= 0)
269 stack_type_map[stack_pointer++] = TYPE_SECOND;
270 return 1;
271 }
272
273 void
274 push_type (tree type)
275 {
276 int r = push_type_0 (type);
277 gcc_assert (r);
278 }
279
280 static void
281 push_value (tree value)
282 {
283 tree type = TREE_TYPE (value);
284 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
285 {
286 type = promote_type (type);
287 value = convert (type, value);
288 }
289 push_type (type);
290 if (tree_list_free_list == NULL_TREE)
291 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
292 else
293 {
294 tree node = tree_list_free_list;
295 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
296 TREE_VALUE (node) = value;
297 TREE_CHAIN (node) = quick_stack;
298 quick_stack = node;
299 }
300 /* If the value has a side effect, then we need to evaluate it
301 whether or not the result is used. If the value ends up on the
302 quick stack and is then popped, this won't happen -- so we flush
303 the quick stack. It is safest to simply always flush, though,
304 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
305 the latter we may need to strip conversions. */
306 flush_quick_stack ();
307 }
308
309 /* Pop a type from the type stack.
310 TYPE is the expected type. Return the actual type, which must be
311 convertible to TYPE.
312 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
313
314 tree
315 pop_type_0 (tree type, char **messagep)
316 {
317 int n_words;
318 tree t;
319 *messagep = NULL;
320 if (TREE_CODE (type) == RECORD_TYPE)
321 type = promote_type (type);
322 n_words = 1 + TYPE_IS_WIDE (type);
323 if (stack_pointer < n_words)
324 {
325 *messagep = xstrdup ("stack underflow");
326 return type;
327 }
328 while (--n_words > 0)
329 {
330 if (stack_type_map[--stack_pointer] != void_type_node)
331 {
332 *messagep = xstrdup ("Invalid multi-word value on type stack");
333 return type;
334 }
335 }
336 t = stack_type_map[--stack_pointer];
337 if (type == NULL_TREE || t == type)
338 return t;
339 if (TREE_CODE (t) == TREE_LIST)
340 {
341 do
342 {
343 tree tt = TREE_PURPOSE (t);
344 if (! can_widen_reference_to (tt, type))
345 {
346 t = tt;
347 goto fail;
348 }
349 t = TREE_CHAIN (t);
350 }
351 while (t);
352 return t;
353 }
354 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
355 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
356 return t;
357 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
358 {
359 /* If the expected type we've been passed is object or ptr
360 (i.e. void*), the caller needs to know the real type. */
361 if (type == ptr_type_node || type == object_ptr_type_node)
362 return t;
363
364 /* Since the verifier has already run, we know that any
365 types we see will be compatible. In BC mode, this fact
366 may be checked at runtime, but if that is so then we can
367 assume its truth here as well. So, we always succeed
368 here, with the expected type. */
369 return type;
370 }
371
372 if (! flag_verify_invocations && flag_indirect_dispatch
373 && t == object_ptr_type_node)
374 {
375 if (type != ptr_type_node)
376 warning (0, "need to insert runtime check for %s",
377 xstrdup (lang_printable_name (type, 0)));
378 return type;
379 }
380
381 /* lang_printable_name uses a static buffer, so we must save the result
382 from calling it the first time. */
383 fail:
384 {
385 char *temp = xstrdup (lang_printable_name (type, 0));
386 /* If the stack contains a multi-word type, keep popping the stack until
387 the real type is found. */
388 while (t == void_type_node)
389 t = stack_type_map[--stack_pointer];
390 *messagep = concat ("expected type '", temp,
391 "' but stack contains '", lang_printable_name (t, 0),
392 "'", NULL);
393 free (temp);
394 }
395 return type;
396 }
397
398 /* Pop a type from the type stack.
399 TYPE is the expected type. Return the actual type, which must be
400 convertible to TYPE, otherwise call error. */
401
402 tree
403 pop_type (tree type)
404 {
405 char *message = NULL;
406 type = pop_type_0 (type, &message);
407 if (message != NULL)
408 {
409 error ("%s", message);
410 free (message);
411 }
412 return type;
413 }
414
415 \f
416 /* Return true if two type assertions are equal. */
417
418 static int
419 type_assertion_eq (const void * k1_p, const void * k2_p)
420 {
421 const type_assertion k1 = *(const type_assertion *)k1_p;
422 const type_assertion k2 = *(const type_assertion *)k2_p;
423 return (k1.assertion_code == k2.assertion_code
424 && k1.op1 == k2.op1
425 && k1.op2 == k2.op2);
426 }
427
428 /* Hash a type assertion. */
429
430 static hashval_t
431 type_assertion_hash (const void *p)
432 {
433 const type_assertion *k_p = p;
434 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
435 k_p->assertion_code, 0);
436 hash = iterative_hash (&k_p->op1, sizeof k_p->op1, hash);
437 return iterative_hash (&k_p->op2, sizeof k_p->op2, hash);
438 }
439
440 /* Add an entry to the type assertion table for the given class.
441 CLASS is the class for which this assertion will be evaluated by the
442 runtime during loading/initialization.
443 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
444 OP1 and OP2 are the operands. The tree type of these arguments may be
445 specific to each assertion_code. */
446
447 void
448 add_type_assertion (tree class, int assertion_code, tree op1, tree op2)
449 {
450 htab_t assertions_htab;
451 type_assertion as;
452 void **as_pp;
453
454 assertions_htab = TYPE_ASSERTIONS (class);
455 if (assertions_htab == NULL)
456 {
457 assertions_htab = htab_create_ggc (7, type_assertion_hash,
458 type_assertion_eq, NULL);
459 TYPE_ASSERTIONS (current_class) = assertions_htab;
460 }
461
462 as.assertion_code = assertion_code;
463 as.op1 = op1;
464 as.op2 = op2;
465
466 as_pp = htab_find_slot (assertions_htab, &as, INSERT);
467
468 /* Don't add the same assertion twice. */
469 if (*as_pp)
470 return;
471
472 *as_pp = ggc_alloc (sizeof (type_assertion));
473 **(type_assertion **)as_pp = as;
474 }
475
476 \f
477 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
478 Handles array types and interfaces. */
479
480 int
481 can_widen_reference_to (tree source_type, tree target_type)
482 {
483 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
484 return 1;
485
486 /* Get rid of pointers */
487 if (TREE_CODE (source_type) == POINTER_TYPE)
488 source_type = TREE_TYPE (source_type);
489 if (TREE_CODE (target_type) == POINTER_TYPE)
490 target_type = TREE_TYPE (target_type);
491
492 if (source_type == target_type)
493 return 1;
494
495 /* FIXME: This is very pessimistic, in that it checks everything,
496 even if we already know that the types are compatible. If we're
497 to support full Java class loader semantics, we need this.
498 However, we could do something more optimal. */
499 if (! flag_verify_invocations)
500 {
501 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
502 source_type, target_type);
503
504 if (!quiet_flag)
505 warning (0, "assert: %s is assign compatible with %s",
506 xstrdup (lang_printable_name (target_type, 0)),
507 xstrdup (lang_printable_name (source_type, 0)));
508 /* Punt everything to runtime. */
509 return 1;
510 }
511
512 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
513 {
514 return 1;
515 }
516 else
517 {
518 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
519 {
520 HOST_WIDE_INT source_length, target_length;
521 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
522 {
523 /* An array implements Cloneable and Serializable. */
524 tree name = DECL_NAME (TYPE_NAME (target_type));
525 return (name == java_lang_cloneable_identifier_node
526 || name == java_io_serializable_identifier_node);
527 }
528 target_length = java_array_type_length (target_type);
529 if (target_length >= 0)
530 {
531 source_length = java_array_type_length (source_type);
532 if (source_length != target_length)
533 return 0;
534 }
535 source_type = TYPE_ARRAY_ELEMENT (source_type);
536 target_type = TYPE_ARRAY_ELEMENT (target_type);
537 if (source_type == target_type)
538 return 1;
539 if (TREE_CODE (source_type) != POINTER_TYPE
540 || TREE_CODE (target_type) != POINTER_TYPE)
541 return 0;
542 return can_widen_reference_to (source_type, target_type);
543 }
544 else
545 {
546 int source_depth = class_depth (source_type);
547 int target_depth = class_depth (target_type);
548
549 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
550 {
551 if (! quiet_flag)
552 warning (0, "assert: %s is assign compatible with %s",
553 xstrdup (lang_printable_name (target_type, 0)),
554 xstrdup (lang_printable_name (source_type, 0)));
555 return 1;
556 }
557
558 /* class_depth can return a negative depth if an error occurred */
559 if (source_depth < 0 || target_depth < 0)
560 return 0;
561
562 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
563 {
564 /* target_type is OK if source_type or source_type ancestors
565 implement target_type. We handle multiple sub-interfaces */
566 tree binfo, base_binfo;
567 int i;
568
569 for (binfo = TYPE_BINFO (source_type), i = 0;
570 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
571 if (can_widen_reference_to
572 (BINFO_TYPE (base_binfo), target_type))
573 return 1;
574
575 if (!i)
576 return 0;
577 }
578
579 for ( ; source_depth > target_depth; source_depth--)
580 {
581 source_type
582 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
583 }
584 return source_type == target_type;
585 }
586 }
587 }
588
589 static tree
590 pop_value (tree type)
591 {
592 type = pop_type (type);
593 if (quick_stack)
594 {
595 tree node = quick_stack;
596 quick_stack = TREE_CHAIN (quick_stack);
597 TREE_CHAIN (node) = tree_list_free_list;
598 tree_list_free_list = node;
599 node = TREE_VALUE (node);
600 return node;
601 }
602 else
603 return find_stack_slot (stack_pointer, promote_type (type));
604 }
605
606
607 /* Pop and discard the top COUNT stack slots. */
608
609 static void
610 java_stack_pop (int count)
611 {
612 while (count > 0)
613 {
614 tree type, val;
615
616 gcc_assert (stack_pointer != 0);
617
618 type = stack_type_map[stack_pointer - 1];
619 if (type == TYPE_SECOND)
620 {
621 count--;
622 gcc_assert (stack_pointer != 1 && count > 0);
623
624 type = stack_type_map[stack_pointer - 2];
625 }
626 val = pop_value (type);
627 count--;
628 }
629 }
630
631 /* Implement the 'swap' operator (to swap two top stack slots). */
632
633 static void
634 java_stack_swap (void)
635 {
636 tree type1, type2;
637 tree temp;
638 tree decl1, decl2;
639
640 if (stack_pointer < 2
641 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
642 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
643 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
644 /* Bad stack swap. */
645 abort ();
646 /* Bad stack swap. */
647
648 flush_quick_stack ();
649 decl1 = find_stack_slot (stack_pointer - 1, type1);
650 decl2 = find_stack_slot (stack_pointer - 2, type2);
651 temp = build_decl (VAR_DECL, NULL_TREE, type1);
652 java_add_local_var (temp);
653 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
654 java_add_stmt (build2 (MODIFY_EXPR, type2,
655 find_stack_slot (stack_pointer - 1, type2),
656 decl2));
657 java_add_stmt (build2 (MODIFY_EXPR, type1,
658 find_stack_slot (stack_pointer - 2, type1),
659 temp));
660 stack_type_map[stack_pointer - 1] = type2;
661 stack_type_map[stack_pointer - 2] = type1;
662 }
663
664 static void
665 java_stack_dup (int size, int offset)
666 {
667 int low_index = stack_pointer - size - offset;
668 int dst_index;
669 if (low_index < 0)
670 error ("stack underflow - dup* operation");
671
672 flush_quick_stack ();
673
674 stack_pointer += size;
675 dst_index = stack_pointer;
676
677 for (dst_index = stack_pointer; --dst_index >= low_index; )
678 {
679 tree type;
680 int src_index = dst_index - size;
681 if (src_index < low_index)
682 src_index = dst_index + size + offset;
683 type = stack_type_map [src_index];
684 if (type == TYPE_SECOND)
685 {
686 /* Dup operation splits 64-bit number. */
687 gcc_assert (src_index > low_index);
688
689 stack_type_map[dst_index] = type;
690 src_index--; dst_index--;
691 type = stack_type_map[src_index];
692 gcc_assert (TYPE_IS_WIDE (type));
693 }
694 else
695 gcc_assert (! TYPE_IS_WIDE (type));
696
697 if (src_index != dst_index)
698 {
699 tree src_decl = find_stack_slot (src_index, type);
700 tree dst_decl = find_stack_slot (dst_index, type);
701
702 java_add_stmt
703 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
704 stack_type_map[dst_index] = type;
705 }
706 }
707 }
708
709 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
710 value stack. */
711
712 static void
713 build_java_athrow (tree node)
714 {
715 tree call;
716
717 call = build_call_nary (void_type_node,
718 build_address_of (throw_node),
719 1, node);
720 TREE_SIDE_EFFECTS (call) = 1;
721 java_add_stmt (call);
722 java_stack_pop (stack_pointer);
723 }
724
725 /* Implementation for jsr/ret */
726
727 static void
728 build_java_jsr (int target_pc, int return_pc)
729 {
730 tree where = lookup_label (target_pc);
731 tree ret = lookup_label (return_pc);
732 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
733 push_value (ret_label);
734 flush_quick_stack ();
735 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
736
737 /* Do not need to emit the label here. We noted the existence of the
738 label as a jump target in note_instructions; we'll emit the label
739 for real at the beginning of the expand_byte_code loop. */
740 }
741
742 static void
743 build_java_ret (tree location)
744 {
745 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
746 }
747
748 /* Implementation of operations on array: new, load, store, length */
749
750 tree
751 decode_newarray_type (int atype)
752 {
753 switch (atype)
754 {
755 case 4: return boolean_type_node;
756 case 5: return char_type_node;
757 case 6: return float_type_node;
758 case 7: return double_type_node;
759 case 8: return byte_type_node;
760 case 9: return short_type_node;
761 case 10: return int_type_node;
762 case 11: return long_type_node;
763 default: return NULL_TREE;
764 }
765 }
766
767 /* Map primitive type to the code used by OPCODE_newarray. */
768
769 int
770 encode_newarray_type (tree type)
771 {
772 if (type == boolean_type_node)
773 return 4;
774 else if (type == char_type_node)
775 return 5;
776 else if (type == float_type_node)
777 return 6;
778 else if (type == double_type_node)
779 return 7;
780 else if (type == byte_type_node)
781 return 8;
782 else if (type == short_type_node)
783 return 9;
784 else if (type == int_type_node)
785 return 10;
786 else if (type == long_type_node)
787 return 11;
788 else
789 gcc_unreachable ();
790 }
791
792 /* Build a call to _Jv_ThrowBadArrayIndex(), the
793 ArrayIndexOfBoundsException exception handler. */
794
795 static tree
796 build_java_throw_out_of_bounds_exception (tree index)
797 {
798 tree node = build_call_nary (int_type_node,
799 build_address_of (soft_badarrayindex_node),
800 1, index);
801 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
802 return (node);
803 }
804
805 /* Return the length of an array. Doesn't perform any checking on the nature
806 or value of the array NODE. May be used to implement some bytecodes. */
807
808 tree
809 build_java_array_length_access (tree node)
810 {
811 tree type = TREE_TYPE (node);
812 tree array_type = TREE_TYPE (type);
813 HOST_WIDE_INT length;
814
815 if (!is_array_type_p (type))
816 {
817 /* With the new verifier, we will see an ordinary pointer type
818 here. In this case, we just use an arbitrary array type. */
819 array_type = build_java_array_type (object_ptr_type_node, -1);
820 type = promote_type (array_type);
821 }
822
823 length = java_array_type_length (type);
824 if (length >= 0)
825 return build_int_cst (NULL_TREE, length);
826
827 node = build3 (COMPONENT_REF, int_type_node,
828 build_java_indirect_ref (array_type, node,
829 flag_check_references),
830 lookup_field (&array_type, get_identifier ("length")),
831 NULL_TREE);
832 IS_ARRAY_LENGTH_ACCESS (node) = 1;
833 return node;
834 }
835
836 /* Optionally checks a reference against the NULL pointer. ARG1: the
837 expr, ARG2: we should check the reference. Don't generate extra
838 checks if we're not generating code. */
839
840 tree
841 java_check_reference (tree expr, int check)
842 {
843 if (!flag_syntax_only && check)
844 {
845 expr = save_expr (expr);
846 expr = build3 (COND_EXPR, TREE_TYPE (expr),
847 build2 (EQ_EXPR, boolean_type_node,
848 expr, null_pointer_node),
849 build_call_nary (void_type_node,
850 build_address_of (soft_nullpointer_node),
851 0),
852 expr);
853 }
854
855 return expr;
856 }
857
858 /* Reference an object: just like an INDIRECT_REF, but with checking. */
859
860 tree
861 build_java_indirect_ref (tree type, tree expr, int check)
862 {
863 tree t;
864 t = java_check_reference (expr, check);
865 t = convert (build_pointer_type (type), t);
866 return build1 (INDIRECT_REF, type, t);
867 }
868
869 /* Implement array indexing (either as l-value or r-value).
870 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
871 Optionally performs bounds checking and/or test to NULL.
872 At this point, ARRAY should have been verified as an array. */
873
874 tree
875 build_java_arrayaccess (tree array, tree type, tree index)
876 {
877 tree node, throw = NULL_TREE;
878 tree data_field;
879 tree ref;
880 tree array_type = TREE_TYPE (TREE_TYPE (array));
881 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
882
883 if (!is_array_type_p (TREE_TYPE (array)))
884 {
885 /* With the new verifier, we will see an ordinary pointer type
886 here. In this case, we just use the correct array type. */
887 array_type = build_java_array_type (type, -1);
888 }
889
890 if (flag_bounds_check)
891 {
892 /* Generate:
893 * (unsigned jint) INDEX >= (unsigned jint) LEN
894 * && throw ArrayIndexOutOfBoundsException.
895 * Note this is equivalent to and more efficient than:
896 * INDEX < 0 || INDEX >= LEN && throw ... */
897 tree test;
898 tree len = convert (unsigned_int_type_node,
899 build_java_array_length_access (array));
900 test = fold_build2 (GE_EXPR, boolean_type_node,
901 convert (unsigned_int_type_node, index),
902 len);
903 if (! integer_zerop (test))
904 {
905 throw = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
906 build_java_throw_out_of_bounds_exception (index));
907 /* allows expansion within COMPOUND */
908 TREE_SIDE_EFFECTS( throw ) = 1;
909 }
910 }
911
912 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
913 to have the bounds check evaluated first. */
914 if (throw != NULL_TREE)
915 index = build2 (COMPOUND_EXPR, int_type_node, throw, index);
916
917 data_field = lookup_field (&array_type, get_identifier ("data"));
918
919 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
920 build_java_indirect_ref (array_type, array,
921 flag_check_references),
922 data_field, NULL_TREE);
923
924 /* Take the address of the data field and convert it to a pointer to
925 the element type. */
926 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
927
928 /* Multiply the index by the size of an element to obtain a byte
929 offset. Convert the result to a pointer to the element type. */
930 index = build2 (MULT_EXPR, sizetype,
931 fold_convert (sizetype, index),
932 size_exp);
933
934 /* Sum the byte offset and the address of the data field. */
935 node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
936
937 /* Finally, return
938
939 *((&array->data) + index*size_exp)
940
941 */
942 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
943 }
944
945 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
946 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
947 determine that no check is required. */
948
949 tree
950 build_java_arraystore_check (tree array, tree object)
951 {
952 tree check, element_type, source;
953 tree array_type_p = TREE_TYPE (array);
954 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
955
956 if (! flag_verify_invocations)
957 {
958 /* With the new verifier, we don't track precise types. FIXME:
959 performance regression here. */
960 element_type = TYPE_NAME (object_type_node);
961 }
962 else
963 {
964 gcc_assert (is_array_type_p (array_type_p));
965
966 /* Get the TYPE_DECL for ARRAY's element type. */
967 element_type
968 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
969 }
970
971 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
972 && TREE_CODE (object_type) == TYPE_DECL);
973
974 if (!flag_store_check)
975 return build1 (NOP_EXPR, array_type_p, array);
976
977 /* No check is needed if the element type is final. Also check that
978 element_type matches object_type, since in the bytecode
979 compilation case element_type may be the actual element type of
980 the array rather than its declared type. However, if we're doing
981 indirect dispatch, we can't do the `final' optimization. */
982 if (element_type == object_type
983 && ! flag_indirect_dispatch
984 && CLASS_FINAL (element_type))
985 return build1 (NOP_EXPR, array_type_p, array);
986
987 /* OBJECT might be wrapped by a SAVE_EXPR. */
988 if (TREE_CODE (object) == SAVE_EXPR)
989 source = TREE_OPERAND (object, 0);
990 else
991 source = object;
992
993 /* Avoid the check if OBJECT was just loaded from the same array. */
994 if (TREE_CODE (source) == ARRAY_REF)
995 {
996 tree target;
997 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
998 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
999 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
1000 if (TREE_CODE (source) == SAVE_EXPR)
1001 source = TREE_OPERAND (source, 0);
1002
1003 target = array;
1004 if (TREE_CODE (target) == SAVE_EXPR)
1005 target = TREE_OPERAND (target, 0);
1006
1007 if (source == target)
1008 return build1 (NOP_EXPR, array_type_p, array);
1009 }
1010
1011 /* Build an invocation of _Jv_CheckArrayStore */
1012 check = build_call_nary (void_type_node,
1013 build_address_of (soft_checkarraystore_node),
1014 2, array, object);
1015 TREE_SIDE_EFFECTS (check) = 1;
1016
1017 return check;
1018 }
1019
1020 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1021 ARRAY_NODE. This function is used to retrieve something less vague than
1022 a pointer type when indexing the first dimension of something like [[<t>.
1023 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1024 return unchanged. */
1025
1026 static tree
1027 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1028 tree indexed_type)
1029 {
1030 /* We used to check to see if ARRAY_NODE really had array type.
1031 However, with the new verifier, this is not necessary, as we know
1032 that the object will be an array of the appropriate type. */
1033
1034 return indexed_type;
1035 }
1036
1037 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1038 called with an integer code (the type of array to create), and the length
1039 of the array to create. */
1040
1041 tree
1042 build_newarray (int atype_value, tree length)
1043 {
1044 tree type_arg;
1045
1046 tree prim_type = decode_newarray_type (atype_value);
1047 tree type
1048 = build_java_array_type (prim_type,
1049 host_integerp (length, 0) == INTEGER_CST
1050 ? tree_low_cst (length, 0) : -1);
1051
1052 /* Pass a reference to the primitive type class and save the runtime
1053 some work. */
1054 type_arg = build_class_ref (prim_type);
1055
1056 return build_call_nary (promote_type (type),
1057 build_address_of (soft_newarray_node),
1058 2, type_arg, length);
1059 }
1060
1061 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1062 of the dimension. */
1063
1064 tree
1065 build_anewarray (tree class_type, tree length)
1066 {
1067 tree type
1068 = build_java_array_type (class_type,
1069 host_integerp (length, 0)
1070 ? tree_low_cst (length, 0) : -1);
1071
1072 return build_call_nary (promote_type (type),
1073 build_address_of (soft_anewarray_node),
1074 3,
1075 length,
1076 build_class_ref (class_type),
1077 null_pointer_node);
1078 }
1079
1080 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1081
1082 tree
1083 build_new_array (tree type, tree length)
1084 {
1085 if (JPRIMITIVE_TYPE_P (type))
1086 return build_newarray (encode_newarray_type (type), length);
1087 else
1088 return build_anewarray (TREE_TYPE (type), length);
1089 }
1090
1091 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1092 class pointer, a number of dimensions and the matching number of
1093 dimensions. The argument list is NULL terminated. */
1094
1095 static void
1096 expand_java_multianewarray (tree class_type, int ndim)
1097 {
1098 int i;
1099 tree args = build_tree_list( NULL_TREE, null_pointer_node );
1100
1101 for( i = 0; i < ndim; i++ )
1102 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
1103
1104 args = tree_cons (NULL_TREE,
1105 build_class_ref (class_type),
1106 tree_cons (NULL_TREE,
1107 build_int_cst (NULL_TREE, ndim),
1108 args));
1109
1110 push_value (build_call_list (promote_type (class_type),
1111 build_address_of (soft_multianewarray_node),
1112 args));
1113 }
1114
1115 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1116 ARRAY is an array type. May expand some bound checking and NULL
1117 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1118 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1119 INT. In those cases, we make the conversion.
1120
1121 if ARRAy is a reference type, the assignment is checked at run-time
1122 to make sure that the RHS can be assigned to the array element
1123 type. It is not necessary to generate this code if ARRAY is final. */
1124
1125 static void
1126 expand_java_arraystore (tree rhs_type_node)
1127 {
1128 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1129 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1130 int_type_node : rhs_type_node);
1131 tree index = pop_value (int_type_node);
1132 tree array_type, array, temp, access;
1133
1134 /* If we're processing an `aaload' we might as well just pick
1135 `Object'. */
1136 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1137 {
1138 array_type = build_java_array_type (object_ptr_type_node, -1);
1139 rhs_type_node = object_ptr_type_node;
1140 }
1141 else
1142 array_type = build_java_array_type (rhs_type_node, -1);
1143
1144 array = pop_value (array_type);
1145 array = build1 (NOP_EXPR, promote_type (array_type), array);
1146
1147 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1148
1149 flush_quick_stack ();
1150
1151 index = save_expr (index);
1152 array = save_expr (array);
1153
1154 /* We want to perform the bounds check (done by
1155 build_java_arrayaccess) before the type check (done by
1156 build_java_arraystore_check). So, we call build_java_arrayaccess
1157 -- which returns an ARRAY_REF lvalue -- and we then generate code
1158 to stash the address of that lvalue in a temp. Then we call
1159 build_java_arraystore_check, and finally we generate a
1160 MODIFY_EXPR to set the array element. */
1161
1162 access = build_java_arrayaccess (array, rhs_type_node, index);
1163 temp = build_decl (VAR_DECL, NULL_TREE,
1164 build_pointer_type (TREE_TYPE (access)));
1165 java_add_local_var (temp);
1166 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1167 temp,
1168 build_fold_addr_expr (access)));
1169
1170 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1171 {
1172 tree check = build_java_arraystore_check (array, rhs_node);
1173 java_add_stmt (check);
1174 }
1175
1176 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1177 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1178 rhs_node));
1179 }
1180
1181 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1182 sure that LHS is an array type. May expand some bound checking and NULL
1183 pointer checking.
1184 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1185 BOOLEAN/SHORT, we push a promoted type back to the stack.
1186 */
1187
1188 static void
1189 expand_java_arrayload (tree lhs_type_node)
1190 {
1191 tree load_node;
1192 tree index_node = pop_value (int_type_node);
1193 tree array_type;
1194 tree array_node;
1195
1196 /* If we're processing an `aaload' we might as well just pick
1197 `Object'. */
1198 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1199 {
1200 array_type = build_java_array_type (object_ptr_type_node, -1);
1201 lhs_type_node = object_ptr_type_node;
1202 }
1203 else
1204 array_type = build_java_array_type (lhs_type_node, -1);
1205 array_node = pop_value (array_type);
1206 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1207
1208 index_node = save_expr (index_node);
1209 array_node = save_expr (array_node);
1210
1211 lhs_type_node = build_java_check_indexed_type (array_node,
1212 lhs_type_node);
1213 load_node = build_java_arrayaccess (array_node,
1214 lhs_type_node,
1215 index_node);
1216 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1217 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1218 push_value (load_node);
1219 }
1220
1221 /* Expands .length. Makes sure that we deal with and array and may expand
1222 a NULL check on the array object. */
1223
1224 static void
1225 expand_java_array_length (void)
1226 {
1227 tree array = pop_value (ptr_type_node);
1228 tree length = build_java_array_length_access (array);
1229
1230 push_value (length);
1231 }
1232
1233 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1234 either soft_monitorenter_node or soft_monitorexit_node. */
1235
1236 static tree
1237 build_java_monitor (tree call, tree object)
1238 {
1239 return build_call_nary (void_type_node,
1240 build_address_of (call),
1241 1, object);
1242 }
1243
1244 /* Emit code for one of the PUSHC instructions. */
1245
1246 static void
1247 expand_java_pushc (int ival, tree type)
1248 {
1249 tree value;
1250 if (type == ptr_type_node && ival == 0)
1251 value = null_pointer_node;
1252 else if (type == int_type_node || type == long_type_node)
1253 value = build_int_cst (type, ival);
1254 else if (type == float_type_node || type == double_type_node)
1255 {
1256 REAL_VALUE_TYPE x;
1257 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1258 value = build_real (type, x);
1259 }
1260 else
1261 gcc_unreachable ();
1262
1263 push_value (value);
1264 }
1265
1266 static void
1267 expand_java_return (tree type)
1268 {
1269 if (type == void_type_node)
1270 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1271 else
1272 {
1273 tree retval = pop_value (type);
1274 tree res = DECL_RESULT (current_function_decl);
1275 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1276
1277 /* Handle the situation where the native integer type is smaller
1278 than the JVM integer. It can happen for many cross compilers.
1279 The whole if expression just goes away if INT_TYPE_SIZE < 32
1280 is false. */
1281 if (INT_TYPE_SIZE < 32
1282 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1283 < GET_MODE_SIZE (TYPE_MODE (type))))
1284 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1285
1286 TREE_SIDE_EFFECTS (retval) = 1;
1287 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1288 }
1289 }
1290
1291 static void
1292 expand_load_internal (int index, tree type, int pc)
1293 {
1294 tree copy;
1295 tree var = find_local_variable (index, type, pc);
1296
1297 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1298 on the stack. If there is an assignment to this VAR_DECL between
1299 the stack push and the use, then the wrong code could be
1300 generated. To avoid this we create a new local and copy our
1301 value into it. Then we push this new local on the stack.
1302 Hopefully this all gets optimized out. */
1303 copy = build_decl (VAR_DECL, NULL_TREE, type);
1304 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1305 && TREE_TYPE (copy) != TREE_TYPE (var))
1306 var = convert (type, var);
1307 java_add_local_var (copy);
1308 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1309
1310 push_value (copy);
1311 }
1312
1313 tree
1314 build_address_of (tree value)
1315 {
1316 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1317 }
1318
1319 bool
1320 class_has_finalize_method (tree type)
1321 {
1322 tree super = CLASSTYPE_SUPER (type);
1323
1324 if (super == NULL_TREE)
1325 return false; /* Every class with a real finalizer inherits */
1326 /* from java.lang.Object. */
1327 else
1328 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1329 }
1330
1331 tree
1332 java_create_object (tree type)
1333 {
1334 tree alloc_node = (class_has_finalize_method (type)
1335 ? alloc_object_node
1336 : alloc_no_finalizer_node);
1337
1338 return build_call_nary (promote_type (type),
1339 build_address_of (alloc_node),
1340 1, build_class_ref (type));
1341 }
1342
1343 static void
1344 expand_java_NEW (tree type)
1345 {
1346 tree alloc_node;
1347
1348 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1349 : alloc_no_finalizer_node);
1350 if (! CLASS_LOADED_P (type))
1351 load_class (type, 1);
1352 safe_layout_class (type);
1353 push_value (build_call_nary (promote_type (type),
1354 build_address_of (alloc_node),
1355 1, build_class_ref (type)));
1356 }
1357
1358 /* This returns an expression which will extract the class of an
1359 object. */
1360
1361 tree
1362 build_get_class (tree value)
1363 {
1364 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1365 tree vtable_field = lookup_field (&object_type_node,
1366 get_identifier ("vtable"));
1367 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1368 build_java_indirect_ref (object_type_node, value,
1369 flag_check_references),
1370 vtable_field, NULL_TREE);
1371 return build3 (COMPONENT_REF, class_ptr_type,
1372 build1 (INDIRECT_REF, dtable_type, tmp),
1373 class_field, NULL_TREE);
1374 }
1375
1376 /* This builds the tree representation of the `instanceof' operator.
1377 It tries various tricks to optimize this in cases where types are
1378 known. */
1379
1380 tree
1381 build_instanceof (tree value, tree type)
1382 {
1383 tree expr;
1384 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1385 tree valtype = TREE_TYPE (TREE_TYPE (value));
1386 tree valclass = TYPE_NAME (valtype);
1387 tree klass;
1388
1389 /* When compiling from bytecode, we need to ensure that TYPE has
1390 been loaded. */
1391 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1392 {
1393 load_class (type, 1);
1394 safe_layout_class (type);
1395 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1396 return error_mark_node;
1397 }
1398 klass = TYPE_NAME (type);
1399
1400 if (type == object_type_node || inherits_from_p (valtype, type))
1401 {
1402 /* Anything except `null' is an instance of Object. Likewise,
1403 if the object is known to be an instance of the class, then
1404 we only need to check for `null'. */
1405 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1406 }
1407 else if (flag_verify_invocations
1408 && ! TYPE_ARRAY_P (type)
1409 && ! TYPE_ARRAY_P (valtype)
1410 && DECL_P (klass) && DECL_P (valclass)
1411 && ! CLASS_INTERFACE (valclass)
1412 && ! CLASS_INTERFACE (klass)
1413 && ! inherits_from_p (type, valtype)
1414 && (CLASS_FINAL (klass)
1415 || ! inherits_from_p (valtype, type)))
1416 {
1417 /* The classes are from different branches of the derivation
1418 tree, so we immediately know the answer. */
1419 expr = boolean_false_node;
1420 }
1421 else if (DECL_P (klass) && CLASS_FINAL (klass))
1422 {
1423 tree save = save_expr (value);
1424 expr = build3 (COND_EXPR, itype,
1425 build2 (NE_EXPR, boolean_type_node,
1426 save, null_pointer_node),
1427 build2 (EQ_EXPR, itype,
1428 build_get_class (save),
1429 build_class_ref (type)),
1430 boolean_false_node);
1431 }
1432 else
1433 {
1434 expr = build_call_nary (itype,
1435 build_address_of (soft_instanceof_node),
1436 2, value, build_class_ref (type));
1437 }
1438 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1439 return expr;
1440 }
1441
1442 static void
1443 expand_java_INSTANCEOF (tree type)
1444 {
1445 tree value = pop_value (object_ptr_type_node);
1446 value = build_instanceof (value, type);
1447 push_value (value);
1448 }
1449
1450 static void
1451 expand_java_CHECKCAST (tree type)
1452 {
1453 tree value = pop_value (ptr_type_node);
1454 value = build_call_nary (promote_type (type),
1455 build_address_of (soft_checkcast_node),
1456 2, build_class_ref (type), value);
1457 push_value (value);
1458 }
1459
1460 static void
1461 expand_iinc (unsigned int local_var_index, int ival, int pc)
1462 {
1463 tree local_var, res;
1464 tree constant_value;
1465
1466 flush_quick_stack ();
1467 local_var = find_local_variable (local_var_index, int_type_node, pc);
1468 constant_value = build_int_cst (NULL_TREE, ival);
1469 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1470 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1471 }
1472
1473
1474 tree
1475 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1476 {
1477 tree call = NULL;
1478 tree arg1 = convert (type, op1);
1479 tree arg2 = convert (type, op2);
1480
1481 if (type == int_type_node)
1482 {
1483 switch (op)
1484 {
1485 case TRUNC_DIV_EXPR:
1486 call = soft_idiv_node;
1487 break;
1488 case TRUNC_MOD_EXPR:
1489 call = soft_irem_node;
1490 break;
1491 default:
1492 break;
1493 }
1494 }
1495 else if (type == long_type_node)
1496 {
1497 switch (op)
1498 {
1499 case TRUNC_DIV_EXPR:
1500 call = soft_ldiv_node;
1501 break;
1502 case TRUNC_MOD_EXPR:
1503 call = soft_lrem_node;
1504 break;
1505 default:
1506 break;
1507 }
1508 }
1509
1510 gcc_assert (call);
1511 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1512 return call;
1513 }
1514
1515 tree
1516 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1517 {
1518 tree mask;
1519 switch (op)
1520 {
1521 case URSHIFT_EXPR:
1522 {
1523 tree u_type = unsigned_type_for (type);
1524 arg1 = convert (u_type, arg1);
1525 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1526 return convert (type, arg1);
1527 }
1528 case LSHIFT_EXPR:
1529 case RSHIFT_EXPR:
1530 mask = build_int_cst (NULL_TREE,
1531 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1532 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1533 break;
1534
1535 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1536 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1537 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1538 {
1539 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1540 boolean_type_node, arg1, arg2);
1541 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1542 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1543 ifexp2, integer_zero_node,
1544 op == COMPARE_L_EXPR
1545 ? integer_minus_one_node
1546 : integer_one_node);
1547 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1548 op == COMPARE_L_EXPR ? integer_one_node
1549 : integer_minus_one_node,
1550 second_compare);
1551 }
1552 case COMPARE_EXPR:
1553 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1554 {
1555 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1556 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1557 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1558 ifexp2, integer_one_node,
1559 integer_zero_node);
1560 return fold_build3 (COND_EXPR, int_type_node,
1561 ifexp1, integer_minus_one_node, second_compare);
1562 }
1563 case TRUNC_DIV_EXPR:
1564 case TRUNC_MOD_EXPR:
1565 if (TREE_CODE (type) == REAL_TYPE
1566 && op == TRUNC_MOD_EXPR)
1567 {
1568 tree call;
1569 if (type != double_type_node)
1570 {
1571 arg1 = convert (double_type_node, arg1);
1572 arg2 = convert (double_type_node, arg2);
1573 }
1574 call = build_call_nary (double_type_node,
1575 build_address_of (soft_fmod_node),
1576 2, arg1, arg2);
1577 if (type != double_type_node)
1578 call = convert (type, call);
1579 return call;
1580 }
1581
1582 if (TREE_CODE (type) == INTEGER_TYPE
1583 && flag_use_divide_subroutine
1584 && ! flag_syntax_only)
1585 return build_java_soft_divmod (op, type, arg1, arg2);
1586
1587 break;
1588 default: ;
1589 }
1590 return fold_build2 (op, type, arg1, arg2);
1591 }
1592
1593 static void
1594 expand_java_binop (tree type, enum tree_code op)
1595 {
1596 tree larg, rarg;
1597 tree ltype = type;
1598 tree rtype = type;
1599 switch (op)
1600 {
1601 case LSHIFT_EXPR:
1602 case RSHIFT_EXPR:
1603 case URSHIFT_EXPR:
1604 rtype = int_type_node;
1605 rarg = pop_value (rtype);
1606 break;
1607 default:
1608 rarg = pop_value (rtype);
1609 }
1610 larg = pop_value (ltype);
1611 push_value (build_java_binop (op, type, larg, rarg));
1612 }
1613
1614 /* Lookup the field named NAME in *TYPEP or its super classes.
1615 If not found, return NULL_TREE.
1616 (If the *TYPEP is not found, or if the field reference is
1617 ambiguous, return error_mark_node.)
1618 If found, return the FIELD_DECL, and set *TYPEP to the
1619 class containing the field. */
1620
1621 tree
1622 lookup_field (tree *typep, tree name)
1623 {
1624 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1625 {
1626 load_class (*typep, 1);
1627 safe_layout_class (*typep);
1628 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1629 return error_mark_node;
1630 }
1631 do
1632 {
1633 tree field, binfo, base_binfo;
1634 tree save_field;
1635 int i;
1636
1637 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1638 if (DECL_NAME (field) == name)
1639 return field;
1640
1641 /* Process implemented interfaces. */
1642 save_field = NULL_TREE;
1643 for (binfo = TYPE_BINFO (*typep), i = 0;
1644 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1645 {
1646 tree t = BINFO_TYPE (base_binfo);
1647 if ((field = lookup_field (&t, name)))
1648 {
1649 if (save_field == field)
1650 continue;
1651 if (save_field == NULL_TREE)
1652 save_field = field;
1653 else
1654 {
1655 tree i1 = DECL_CONTEXT (save_field);
1656 tree i2 = DECL_CONTEXT (field);
1657 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1658 IDENTIFIER_POINTER (name),
1659 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1660 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1661 return error_mark_node;
1662 }
1663 }
1664 }
1665
1666 if (save_field != NULL_TREE)
1667 return save_field;
1668
1669 *typep = CLASSTYPE_SUPER (*typep);
1670 } while (*typep);
1671 return NULL_TREE;
1672 }
1673
1674 /* Look up the field named NAME in object SELF_VALUE,
1675 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1676 SELF_VALUE is NULL_TREE if looking for a static field. */
1677
1678 tree
1679 build_field_ref (tree self_value, tree self_class, tree name)
1680 {
1681 tree base_class = self_class;
1682 tree field_decl = lookup_field (&base_class, name);
1683 if (field_decl == NULL_TREE)
1684 {
1685 error ("field %qs not found", IDENTIFIER_POINTER (name));
1686 return error_mark_node;
1687 }
1688 if (self_value == NULL_TREE)
1689 {
1690 return build_static_field_ref (field_decl);
1691 }
1692 else
1693 {
1694 tree base_type = promote_type (base_class);
1695
1696 /* CHECK is true if self_value is not the this pointer. */
1697 int check = (! (DECL_P (self_value)
1698 && DECL_NAME (self_value) == this_identifier_node));
1699
1700 /* Determine whether a field offset from NULL will lie within
1701 Page 0: this is necessary on those GNU/Linux/BSD systems that
1702 trap SEGV to generate NullPointerExceptions.
1703
1704 We assume that Page 0 will be mapped with NOPERM, and that
1705 memory may be allocated from any other page, so only field
1706 offsets < pagesize are guaranteed to trap. We also assume
1707 the smallest page size we'll encounter is 4k bytes. */
1708 if (! flag_syntax_only && check && ! flag_check_references
1709 && ! flag_indirect_dispatch)
1710 {
1711 tree field_offset = byte_position (field_decl);
1712 if (! page_size)
1713 page_size = size_int (4096);
1714 check = ! INT_CST_LT_UNSIGNED (field_offset, page_size);
1715 }
1716
1717 if (base_type != TREE_TYPE (self_value))
1718 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1719 if (! flag_syntax_only && flag_indirect_dispatch)
1720 {
1721 tree otable_index
1722 = build_int_cst (NULL_TREE, get_symbol_table_index
1723 (field_decl, NULL_TREE,
1724 &TYPE_OTABLE_METHODS (output_class)));
1725 tree field_offset
1726 = build4 (ARRAY_REF, integer_type_node,
1727 TYPE_OTABLE_DECL (output_class), otable_index,
1728 NULL_TREE, NULL_TREE);
1729 tree address;
1730
1731 if (DECL_CONTEXT (field_decl) != output_class)
1732 field_offset
1733 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1734 build2 (EQ_EXPR, boolean_type_node,
1735 field_offset, integer_zero_node),
1736 build_call_nary (void_type_node,
1737 build_address_of (soft_nosuchfield_node),
1738 1, otable_index),
1739 field_offset);
1740
1741 field_offset = fold (convert (sizetype, field_offset));
1742 self_value = java_check_reference (self_value, check);
1743 address
1744 = fold_build2 (POINTER_PLUS_EXPR,
1745 TREE_TYPE (self_value),
1746 self_value, field_offset);
1747 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1748 address);
1749 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1750 }
1751
1752 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1753 self_value, check);
1754 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1755 self_value, field_decl, NULL_TREE);
1756 }
1757 }
1758
1759 tree
1760 lookup_label (int pc)
1761 {
1762 tree name;
1763 char buf[32];
1764 if (pc > highest_label_pc_this_method)
1765 highest_label_pc_this_method = pc;
1766 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", start_label_pc_this_method + pc);
1767 name = get_identifier (buf);
1768 if (IDENTIFIER_LOCAL_VALUE (name))
1769 return IDENTIFIER_LOCAL_VALUE (name);
1770 else
1771 {
1772 /* The type of the address of a label is return_address_type_node. */
1773 tree decl = create_label_decl (name);
1774 return pushdecl (decl);
1775 }
1776 }
1777
1778 /* Generate a unique name for the purpose of loops and switches
1779 labels, and try-catch-finally blocks label or temporary variables. */
1780
1781 tree
1782 generate_name (void)
1783 {
1784 static int l_number = 0;
1785 char buff [32];
1786 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1787 l_number++;
1788 return get_identifier (buff);
1789 }
1790
1791 tree
1792 create_label_decl (tree name)
1793 {
1794 tree decl;
1795 decl = build_decl (LABEL_DECL, name,
1796 TREE_TYPE (return_address_type_node));
1797 DECL_CONTEXT (decl) = current_function_decl;
1798 DECL_IGNORED_P (decl) = 1;
1799 return decl;
1800 }
1801
1802 /* This maps a bytecode offset (PC) to various flags. */
1803 char *instruction_bits;
1804
1805 /* This is a vector of type states for the current method. It is
1806 indexed by PC. Each element is a tree vector holding the type
1807 state at that PC. We only note type states at basic block
1808 boundaries. */
1809 VEC(tree, gc) *type_states;
1810
1811 static void
1812 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1813 {
1814 lookup_label (target_pc);
1815 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1816 }
1817
1818 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1819 where CONDITION is one of one the compare operators. */
1820
1821 static void
1822 expand_compare (enum tree_code condition, tree value1, tree value2,
1823 int target_pc)
1824 {
1825 tree target = lookup_label (target_pc);
1826 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1827 java_add_stmt
1828 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1829 build1 (GOTO_EXPR, void_type_node, target),
1830 build_java_empty_stmt ()));
1831 }
1832
1833 /* Emit code for a TEST-type opcode. */
1834
1835 static void
1836 expand_test (enum tree_code condition, tree type, int target_pc)
1837 {
1838 tree value1, value2;
1839 flush_quick_stack ();
1840 value1 = pop_value (type);
1841 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1842 expand_compare (condition, value1, value2, target_pc);
1843 }
1844
1845 /* Emit code for a COND-type opcode. */
1846
1847 static void
1848 expand_cond (enum tree_code condition, tree type, int target_pc)
1849 {
1850 tree value1, value2;
1851 flush_quick_stack ();
1852 /* note: pop values in opposite order */
1853 value2 = pop_value (type);
1854 value1 = pop_value (type);
1855 /* Maybe should check value1 and value2 for type compatibility ??? */
1856 expand_compare (condition, value1, value2, target_pc);
1857 }
1858
1859 static void
1860 expand_java_goto (int target_pc)
1861 {
1862 tree target_label = lookup_label (target_pc);
1863 flush_quick_stack ();
1864 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1865 }
1866
1867 static tree
1868 expand_java_switch (tree selector, int default_pc)
1869 {
1870 tree switch_expr, x;
1871
1872 flush_quick_stack ();
1873 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1874 NULL_TREE, NULL_TREE);
1875 java_add_stmt (switch_expr);
1876
1877 x = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, NULL_TREE,
1878 create_artificial_label ());
1879 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1880
1881 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1882 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1883
1884 return switch_expr;
1885 }
1886
1887 static void
1888 expand_java_add_case (tree switch_expr, int match, int target_pc)
1889 {
1890 tree value, x;
1891
1892 value = build_int_cst (TREE_TYPE (switch_expr), match);
1893
1894 x = build3 (CASE_LABEL_EXPR, void_type_node, value, NULL_TREE,
1895 create_artificial_label ());
1896 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1897
1898 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1899 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1900 }
1901
1902 static tree
1903 pop_arguments (tree arg_types)
1904 {
1905 if (arg_types == end_params_node)
1906 return NULL_TREE;
1907 if (TREE_CODE (arg_types) == TREE_LIST)
1908 {
1909 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1910 tree type = TREE_VALUE (arg_types);
1911 tree arg = pop_value (type);
1912
1913 /* We simply cast each argument to its proper type. This is
1914 needed since we lose type information coming out of the
1915 verifier. We also have to do this when we pop an integer
1916 type that must be promoted for the function call. */
1917 if (TREE_CODE (type) == POINTER_TYPE)
1918 arg = build1 (NOP_EXPR, type, arg);
1919 else if (targetm.calls.promote_prototypes (type)
1920 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1921 && INTEGRAL_TYPE_P (type))
1922 arg = convert (integer_type_node, arg);
1923 return tree_cons (NULL_TREE, arg, tail);
1924 }
1925 gcc_unreachable ();
1926 }
1927
1928 /* Attach to PTR (a block) the declaration found in ENTRY. */
1929
1930 int
1931 attach_init_test_initialization_flags (void **entry, void *ptr)
1932 {
1933 tree block = (tree)ptr;
1934 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
1935
1936 if (block != error_mark_node)
1937 {
1938 if (TREE_CODE (block) == BIND_EXPR)
1939 {
1940 tree body = BIND_EXPR_BODY (block);
1941 TREE_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1942 BIND_EXPR_VARS (block) = ite->value;
1943 body = build2 (COMPOUND_EXPR, void_type_node,
1944 build1 (DECL_EXPR, void_type_node, ite->value), body);
1945 BIND_EXPR_BODY (block) = body;
1946 }
1947 else
1948 {
1949 tree body = BLOCK_SUBBLOCKS (block);
1950 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1951 BLOCK_EXPR_DECLS (block) = ite->value;
1952 body = build2 (COMPOUND_EXPR, void_type_node,
1953 build1 (DECL_EXPR, void_type_node, ite->value), body);
1954 BLOCK_SUBBLOCKS (block) = body;
1955 }
1956
1957 }
1958 return true;
1959 }
1960
1961 /* Build an expression to initialize the class CLAS.
1962 if EXPR is non-NULL, returns an expression to first call the initializer
1963 (if it is needed) and then calls EXPR. */
1964
1965 tree
1966 build_class_init (tree clas, tree expr)
1967 {
1968 tree init;
1969
1970 /* An optimization: if CLAS is a superclass of the class we're
1971 compiling, we don't need to initialize it. However, if CLAS is
1972 an interface, it won't necessarily be initialized, even if we
1973 implement it. */
1974 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1975 && inherits_from_p (current_class, clas))
1976 || current_class == clas)
1977 return expr;
1978
1979 if (always_initialize_class_p)
1980 {
1981 init = build_call_nary (void_type_node,
1982 build_address_of (soft_initclass_node),
1983 1, build_class_ref (clas));
1984 TREE_SIDE_EFFECTS (init) = 1;
1985 }
1986 else
1987 {
1988 tree *init_test_decl;
1989 tree decl;
1990 init_test_decl = java_treetreehash_new
1991 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
1992
1993 if (*init_test_decl == NULL)
1994 {
1995 /* Build a declaration and mark it as a flag used to track
1996 static class initializations. */
1997 decl = build_decl (VAR_DECL, NULL_TREE,
1998 boolean_type_node);
1999 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2000 DECL_CONTEXT (decl) = current_function_decl;
2001 DECL_INITIAL (decl) = boolean_false_node;
2002 /* Don't emit any symbolic debugging info for this decl. */
2003 DECL_IGNORED_P (decl) = 1;
2004 *init_test_decl = decl;
2005 }
2006
2007 init = build_call_nary (void_type_node,
2008 build_address_of (soft_initclass_node),
2009 1, build_class_ref (clas));
2010 TREE_SIDE_EFFECTS (init) = 1;
2011 init = build3 (COND_EXPR, void_type_node,
2012 build2 (EQ_EXPR, boolean_type_node,
2013 *init_test_decl, boolean_false_node),
2014 init, integer_zero_node);
2015 TREE_SIDE_EFFECTS (init) = 1;
2016 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2017 build2 (MODIFY_EXPR, boolean_type_node,
2018 *init_test_decl, boolean_true_node));
2019 TREE_SIDE_EFFECTS (init) = 1;
2020 }
2021
2022 if (expr != NULL_TREE)
2023 {
2024 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2025 TREE_SIDE_EFFECTS (expr) = 1;
2026 return expr;
2027 }
2028 return init;
2029 }
2030
2031 \f
2032
2033 /* Rewrite expensive calls that require stack unwinding at runtime to
2034 cheaper alternatives. The logic here performs these
2035 transformations:
2036
2037 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2038 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2039
2040 */
2041
2042 typedef struct
2043 {
2044 const char *classname;
2045 const char *method;
2046 const char *signature;
2047 const char *new_signature;
2048 int flags;
2049 tree (*rewrite_arglist) (tree arglist);
2050 } rewrite_rule;
2051
2052 /* Add __builtin_return_address(0) to the end of an arglist. */
2053
2054
2055 static tree
2056 rewrite_arglist_getcaller (tree arglist)
2057 {
2058 tree retaddr
2059 = build_call_expr (built_in_decls[BUILT_IN_RETURN_ADDRESS],
2060 1, integer_zero_node);
2061
2062 DECL_INLINE (current_function_decl) = 0;
2063
2064 return chainon (arglist,
2065 tree_cons (NULL_TREE, retaddr,
2066 NULL_TREE));
2067 }
2068
2069 /* Add this.class to the end of an arglist. */
2070
2071 static tree
2072 rewrite_arglist_getclass (tree arglist)
2073 {
2074 return chainon (arglist,
2075 tree_cons (NULL_TREE, build_class_ref (output_class),
2076 NULL_TREE));
2077 }
2078
2079 static rewrite_rule rules[] =
2080 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2081 "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2082 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2083 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2084 "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2085 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2086 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2087 "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2088 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2089 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2090 "()Ljava/lang/ClassLoader;",
2091 "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2092 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2093
2094 {NULL, NULL, NULL, NULL, 0, NULL}};
2095
2096 /* True if this method is special, i.e. it's a private method that
2097 should be exported from a DSO. */
2098
2099 bool
2100 special_method_p (tree candidate_method)
2101 {
2102 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2103 tree method = DECL_NAME (candidate_method);
2104 rewrite_rule *p;
2105
2106 for (p = rules; p->classname; p++)
2107 {
2108 if (get_identifier (p->classname) == context
2109 && get_identifier (p->method) == method)
2110 return true;
2111 }
2112 return false;
2113 }
2114
2115 /* Scan the rules list for replacements for *METHOD_P and replace the
2116 args accordingly. If the rewrite results in an access to a private
2117 method, update SPECIAL.*/
2118
2119 void
2120 maybe_rewrite_invocation (tree *method_p, tree *arg_list_p,
2121 tree *method_signature_p, tree *special)
2122 {
2123 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2124 rewrite_rule *p;
2125 *special = NULL_TREE;
2126
2127 for (p = rules; p->classname; p++)
2128 {
2129 if (get_identifier (p->classname) == context)
2130 {
2131 tree method = DECL_NAME (*method_p);
2132 if (get_identifier (p->method) == method
2133 && get_identifier (p->signature) == *method_signature_p)
2134 {
2135 tree maybe_method
2136 = lookup_java_method (DECL_CONTEXT (*method_p),
2137 method,
2138 get_identifier (p->new_signature));
2139 if (! maybe_method && ! flag_verify_invocations)
2140 {
2141 maybe_method
2142 = add_method (DECL_CONTEXT (*method_p), p->flags,
2143 method, get_identifier (p->new_signature));
2144 DECL_EXTERNAL (maybe_method) = 1;
2145 }
2146 *method_p = maybe_method;
2147 gcc_assert (*method_p);
2148 *arg_list_p = p->rewrite_arglist (*arg_list_p);
2149 *method_signature_p = get_identifier (p->new_signature);
2150 *special = integer_one_node;
2151
2152 break;
2153 }
2154 }
2155 }
2156 }
2157
2158 \f
2159
2160 tree
2161 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2162 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2163 tree arg_list ATTRIBUTE_UNUSED, tree special)
2164 {
2165 tree func;
2166 if (is_compiled_class (self_type))
2167 {
2168 /* With indirect dispatch we have to use indirect calls for all
2169 publicly visible methods or gcc will use PLT indirections
2170 to reach them. We also have to use indirect dispatch for all
2171 external methods. */
2172 if (! flag_indirect_dispatch
2173 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2174 {
2175 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2176 method);
2177 }
2178 else
2179 {
2180 tree table_index
2181 = build_int_cst (NULL_TREE,
2182 (get_symbol_table_index
2183 (method, special,
2184 &TYPE_ATABLE_METHODS (output_class))));
2185 func
2186 = build4 (ARRAY_REF,
2187 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2188 TYPE_ATABLE_DECL (output_class), table_index,
2189 NULL_TREE, NULL_TREE);
2190 }
2191 func = convert (method_ptr_type_node, func);
2192 }
2193 else
2194 {
2195 /* We don't know whether the method has been (statically) compiled.
2196 Compile this code to get a reference to the method's code:
2197
2198 SELF_TYPE->methods[METHOD_INDEX].ncode
2199
2200 */
2201
2202 int method_index = 0;
2203 tree meth, ref;
2204
2205 /* The method might actually be declared in some superclass, so
2206 we have to use its class context, not the caller's notion of
2207 where the method is. */
2208 self_type = DECL_CONTEXT (method);
2209 ref = build_class_ref (self_type);
2210 ref = build1 (INDIRECT_REF, class_type_node, ref);
2211 if (ncode_ident == NULL_TREE)
2212 ncode_ident = get_identifier ("ncode");
2213 if (methods_ident == NULL_TREE)
2214 methods_ident = get_identifier ("methods");
2215 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2216 lookup_field (&class_type_node, methods_ident),
2217 NULL_TREE);
2218 for (meth = TYPE_METHODS (self_type);
2219 ; meth = TREE_CHAIN (meth))
2220 {
2221 if (method == meth)
2222 break;
2223 if (meth == NULL_TREE)
2224 fatal_error ("method '%s' not found in class",
2225 IDENTIFIER_POINTER (DECL_NAME (method)));
2226 method_index++;
2227 }
2228 method_index *= int_size_in_bytes (method_type_node);
2229 ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
2230 ref, size_int (method_index));
2231 ref = build1 (INDIRECT_REF, method_type_node, ref);
2232 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2233 ref, lookup_field (&method_type_node, ncode_ident),
2234 NULL_TREE);
2235 }
2236 return func;
2237 }
2238
2239 tree
2240 invoke_build_dtable (int is_invoke_interface, tree arg_list)
2241 {
2242 tree dtable, objectref;
2243
2244 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
2245
2246 /* If we're dealing with interfaces and if the objectref
2247 argument is an array then get the dispatch table of the class
2248 Object rather than the one from the objectref. */
2249 objectref = (is_invoke_interface
2250 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list)))
2251 ? build_class_ref (object_type_node) : TREE_VALUE (arg_list));
2252
2253 if (dtable_ident == NULL_TREE)
2254 dtable_ident = get_identifier ("vtable");
2255 dtable = build_java_indirect_ref (object_type_node, objectref,
2256 flag_check_references);
2257 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2258 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2259
2260 return dtable;
2261 }
2262
2263 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2264 T. If this decl has not been seen before, it will be added to the
2265 [oa]table_methods. If it has, the existing table slot will be
2266 reused. */
2267
2268 int
2269 get_symbol_table_index (tree t, tree special, tree *symbol_table)
2270 {
2271 int i = 1;
2272 tree method_list;
2273
2274 if (*symbol_table == NULL_TREE)
2275 {
2276 *symbol_table = build_tree_list (special, t);
2277 return 1;
2278 }
2279
2280 method_list = *symbol_table;
2281
2282 while (1)
2283 {
2284 tree value = TREE_VALUE (method_list);
2285 tree purpose = TREE_PURPOSE (method_list);
2286 if (value == t && purpose == special)
2287 return i;
2288 i++;
2289 if (TREE_CHAIN (method_list) == NULL_TREE)
2290 break;
2291 else
2292 method_list = TREE_CHAIN (method_list);
2293 }
2294
2295 TREE_CHAIN (method_list) = build_tree_list (special, t);
2296 return i;
2297 }
2298
2299 tree
2300 build_invokevirtual (tree dtable, tree method, tree special)
2301 {
2302 tree func;
2303 tree nativecode_ptr_ptr_type_node
2304 = build_pointer_type (nativecode_ptr_type_node);
2305 tree method_index;
2306 tree otable_index;
2307
2308 if (flag_indirect_dispatch)
2309 {
2310 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2311
2312 otable_index
2313 = build_int_cst (NULL_TREE, get_symbol_table_index
2314 (method, special,
2315 &TYPE_OTABLE_METHODS (output_class)));
2316 method_index = build4 (ARRAY_REF, integer_type_node,
2317 TYPE_OTABLE_DECL (output_class),
2318 otable_index, NULL_TREE, NULL_TREE);
2319 }
2320 else
2321 {
2322 /* We fetch the DECL_VINDEX field directly here, rather than
2323 using get_method_index(). DECL_VINDEX is the true offset
2324 from the vtable base to a method, regrdless of any extra
2325 words inserted at the start of the vtable. */
2326 method_index = DECL_VINDEX (method);
2327 method_index = size_binop (MULT_EXPR, method_index,
2328 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2329 if (TARGET_VTABLE_USES_DESCRIPTORS)
2330 method_index = size_binop (MULT_EXPR, method_index,
2331 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2332 }
2333
2334 func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
2335 convert (sizetype, method_index));
2336
2337 if (TARGET_VTABLE_USES_DESCRIPTORS)
2338 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2339 else
2340 {
2341 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2342 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2343 }
2344
2345 return func;
2346 }
2347
2348 static GTY(()) tree class_ident;
2349 tree
2350 build_invokeinterface (tree dtable, tree method)
2351 {
2352 tree interface;
2353 tree idx;
2354
2355 /* We expand invokeinterface here. */
2356
2357 if (class_ident == NULL_TREE)
2358 class_ident = get_identifier ("class");
2359
2360 dtable = build_java_indirect_ref (dtable_type, dtable,
2361 flag_check_references);
2362 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2363 lookup_field (&dtable_type, class_ident), NULL_TREE);
2364
2365 interface = DECL_CONTEXT (method);
2366 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2367 layout_class_methods (interface);
2368
2369 if (flag_indirect_dispatch)
2370 {
2371 int itable_index
2372 = 2 * (get_symbol_table_index
2373 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2374 interface
2375 = build4 (ARRAY_REF,
2376 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2377 TYPE_ITABLE_DECL (output_class),
2378 build_int_cst (NULL_TREE, itable_index-1),
2379 NULL_TREE, NULL_TREE);
2380 idx
2381 = build4 (ARRAY_REF,
2382 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2383 TYPE_ITABLE_DECL (output_class),
2384 build_int_cst (NULL_TREE, itable_index),
2385 NULL_TREE, NULL_TREE);
2386 interface = convert (class_ptr_type, interface);
2387 idx = convert (integer_type_node, idx);
2388 }
2389 else
2390 {
2391 idx = build_int_cst (NULL_TREE,
2392 get_interface_method_index (method, interface));
2393 interface = build_class_ref (interface);
2394 }
2395
2396 return build_call_nary (ptr_type_node,
2397 build_address_of (soft_lookupinterfacemethod_node),
2398 3, dtable, interface, idx);
2399 }
2400
2401 /* Expand one of the invoke_* opcodes.
2402 OPCODE is the specific opcode.
2403 METHOD_REF_INDEX is an index into the constant pool.
2404 NARGS is the number of arguments, or -1 if not specified. */
2405
2406 static void
2407 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2408 {
2409 tree method_signature
2410 = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2411 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool,
2412 method_ref_index);
2413 tree self_type
2414 = get_class_constant (current_jcf,
2415 COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool,
2416 method_ref_index));
2417 const char *const self_name
2418 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2419 tree call, func, method, arg_list, method_type;
2420 tree check = NULL_TREE;
2421
2422 tree special = NULL_TREE;
2423
2424 if (! CLASS_LOADED_P (self_type))
2425 {
2426 load_class (self_type, 1);
2427 safe_layout_class (self_type);
2428 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2429 fatal_error ("failed to find class '%s'", self_name);
2430 }
2431 layout_class_methods (self_type);
2432
2433 if (ID_INIT_P (method_name))
2434 method = lookup_java_constructor (self_type, method_signature);
2435 else
2436 method = lookup_java_method (self_type, method_name, method_signature);
2437
2438 /* We've found a method in a class other than the one in which it
2439 was wanted. This can happen if, for instance, we're trying to
2440 compile invokespecial super.equals().
2441 FIXME: This is a kludge. Rather than nullifying the result, we
2442 should change lookup_java_method() so that it doesn't search the
2443 superclass chain when we're BC-compiling. */
2444 if (! flag_verify_invocations
2445 && method
2446 && ! TYPE_ARRAY_P (self_type)
2447 && self_type != DECL_CONTEXT (method))
2448 method = NULL_TREE;
2449
2450 /* We've found a method in an interface, but this isn't an interface
2451 call. */
2452 if (opcode != OPCODE_invokeinterface
2453 && method
2454 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2455 method = NULL_TREE;
2456
2457 /* We've found a non-interface method but we are making an
2458 interface call. This can happen if the interface overrides a
2459 method in Object. */
2460 if (! flag_verify_invocations
2461 && opcode == OPCODE_invokeinterface
2462 && method
2463 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2464 method = NULL_TREE;
2465
2466 if (method == NULL_TREE)
2467 {
2468 if (flag_verify_invocations || ! flag_indirect_dispatch)
2469 {
2470 error ("class '%s' has no method named '%s' matching signature '%s'",
2471 self_name,
2472 IDENTIFIER_POINTER (method_name),
2473 IDENTIFIER_POINTER (method_signature));
2474 }
2475 else
2476 {
2477 int flags = ACC_PUBLIC;
2478 if (opcode == OPCODE_invokestatic)
2479 flags |= ACC_STATIC;
2480 if (opcode == OPCODE_invokeinterface)
2481 {
2482 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2483 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2484 }
2485 method = add_method (self_type, flags, method_name,
2486 method_signature);
2487 DECL_ARTIFICIAL (method) = 1;
2488 METHOD_DUMMY (method) = 1;
2489 layout_class_method (self_type, NULL,
2490 method, NULL);
2491 }
2492 }
2493
2494 /* Invoke static can't invoke static/abstract method */
2495 if (method != NULL_TREE)
2496 {
2497 if (opcode == OPCODE_invokestatic)
2498 {
2499 if (!METHOD_STATIC (method))
2500 {
2501 error ("invokestatic on non static method");
2502 method = NULL_TREE;
2503 }
2504 else if (METHOD_ABSTRACT (method))
2505 {
2506 error ("invokestatic on abstract method");
2507 method = NULL_TREE;
2508 }
2509 }
2510 else
2511 {
2512 if (METHOD_STATIC (method))
2513 {
2514 error ("invoke[non-static] on static method");
2515 method = NULL_TREE;
2516 }
2517 }
2518 }
2519
2520 if (method == NULL_TREE)
2521 {
2522 /* If we got here, we emitted an error message above. So we
2523 just pop the arguments, push a properly-typed zero, and
2524 continue. */
2525 method_type = get_type_from_signature (method_signature);
2526 pop_arguments (TYPE_ARG_TYPES (method_type));
2527 if (opcode != OPCODE_invokestatic)
2528 pop_type (self_type);
2529 method_type = promote_type (TREE_TYPE (method_type));
2530 push_value (convert (method_type, integer_zero_node));
2531 return;
2532 }
2533
2534 method_type = TREE_TYPE (method);
2535 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2536 flush_quick_stack ();
2537
2538 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2539 &special);
2540
2541 func = NULL_TREE;
2542 if (opcode == OPCODE_invokestatic)
2543 func = build_known_method_ref (method, method_type, self_type,
2544 method_signature, arg_list, special);
2545 else if (opcode == OPCODE_invokespecial
2546 || (opcode == OPCODE_invokevirtual
2547 && (METHOD_PRIVATE (method)
2548 || METHOD_FINAL (method)
2549 || CLASS_FINAL (TYPE_NAME (self_type)))))
2550 {
2551 /* If the object for the method call is null, we throw an
2552 exception. We don't do this if the object is the current
2553 method's `this'. In other cases we just rely on an
2554 optimization pass to eliminate redundant checks. FIXME:
2555 Unfortunately there doesn't seem to be a way to determine
2556 what the current method is right now.
2557 We do omit the check if we're calling <init>. */
2558 /* We use a SAVE_EXPR here to make sure we only evaluate
2559 the new `self' expression once. */
2560 tree save_arg = save_expr (TREE_VALUE (arg_list));
2561 TREE_VALUE (arg_list) = save_arg;
2562 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2563 func = build_known_method_ref (method, method_type, self_type,
2564 method_signature, arg_list, special);
2565 }
2566 else
2567 {
2568 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2569 arg_list);
2570 if (opcode == OPCODE_invokevirtual)
2571 func = build_invokevirtual (dtable, method, special);
2572 else
2573 func = build_invokeinterface (dtable, method);
2574 }
2575
2576 if (TREE_CODE (func) == ADDR_EXPR)
2577 TREE_TYPE (func) = build_pointer_type (method_type);
2578 else
2579 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2580
2581 call = build_call_list (TREE_TYPE (method_type), func, arg_list);
2582 TREE_SIDE_EFFECTS (call) = 1;
2583 call = check_for_builtin (method, call);
2584
2585 if (check != NULL_TREE)
2586 {
2587 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2588 TREE_SIDE_EFFECTS (call) = 1;
2589 }
2590
2591 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2592 java_add_stmt (call);
2593 else
2594 {
2595 push_value (call);
2596 flush_quick_stack ();
2597 }
2598 }
2599
2600 /* Create a stub which will be put into the vtable but which will call
2601 a JNI function. */
2602
2603 tree
2604 build_jni_stub (tree method)
2605 {
2606 tree jnifunc, call, args, body, method_sig, arg_types;
2607 tree jniarg0, jniarg1, jniarg2, jniarg3;
2608 tree jni_func_type, tem;
2609 tree env_var, res_var = NULL_TREE, block;
2610 tree method_args, res_type;
2611 tree meth_var;
2612 tree bind;
2613
2614 int args_size = 0;
2615
2616 tree klass = DECL_CONTEXT (method);
2617 klass = build_class_ref (klass);
2618
2619 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2620
2621 DECL_ARTIFICIAL (method) = 1;
2622 DECL_EXTERNAL (method) = 0;
2623
2624 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2625 DECL_CONTEXT (env_var) = method;
2626
2627 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2628 {
2629 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2630 TREE_TYPE (TREE_TYPE (method)));
2631 DECL_CONTEXT (res_var) = method;
2632 TREE_CHAIN (env_var) = res_var;
2633 }
2634
2635 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), ptr_type_node);
2636 TREE_STATIC (meth_var) = 1;
2637 TREE_PUBLIC (meth_var) = 0;
2638 DECL_EXTERNAL (meth_var) = 0;
2639 DECL_CONTEXT (meth_var) = method;
2640 DECL_ARTIFICIAL (meth_var) = 1;
2641 DECL_INITIAL (meth_var) = null_pointer_node;
2642 TREE_USED (meth_var) = 1;
2643 chainon (env_var, meth_var);
2644 build_result_decl (method);
2645
2646 method_args = DECL_ARGUMENTS (method);
2647 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2648 TREE_SIDE_EFFECTS (block) = 1;
2649 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2650
2651 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2652 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2653 build_call_nary (ptr_type_node,
2654 build_address_of (soft_getjnienvnewframe_node),
2655 1, klass));
2656
2657 /* All the arguments to this method become arguments to the
2658 underlying JNI function. If we had to wrap object arguments in a
2659 special way, we would do that here. */
2660 args = NULL_TREE;
2661 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2662 {
2663 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2664 #ifdef PARM_BOUNDARY
2665 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2666 * PARM_BOUNDARY);
2667 #endif
2668 args_size += (arg_bits / BITS_PER_UNIT);
2669
2670 args = tree_cons (NULL_TREE, tem, args);
2671 }
2672 args = nreverse (args);
2673 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2674
2675 /* For a static method the second argument is the class. For a
2676 non-static method the second argument is `this'; that is already
2677 available in the argument list. */
2678 if (METHOD_STATIC (method))
2679 {
2680 args_size += int_size_in_bytes (TREE_TYPE (klass));
2681 args = tree_cons (NULL_TREE, klass, args);
2682 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2683 }
2684
2685 /* The JNIEnv structure is the first argument to the JNI function. */
2686 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2687 args = tree_cons (NULL_TREE, env_var, args);
2688 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2689
2690 /* We call _Jv_LookupJNIMethod to find the actual underlying
2691 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2692 exception if this function is not found at runtime. */
2693 method_sig = build_java_signature (TREE_TYPE (method));
2694 jniarg0 = klass;
2695 jniarg1 = build_utf8_ref (DECL_NAME (method));
2696 jniarg2 = build_utf8_ref (unmangle_classname
2697 (IDENTIFIER_POINTER (method_sig),
2698 IDENTIFIER_LENGTH (method_sig)));
2699 jniarg3 = build_int_cst (NULL_TREE, args_size);
2700
2701 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2702
2703 #ifdef MODIFY_JNI_METHOD_CALL
2704 tem = MODIFY_JNI_METHOD_CALL (tem);
2705 #endif
2706
2707 jni_func_type = build_pointer_type (tem);
2708
2709 jnifunc = build3 (COND_EXPR, ptr_type_node,
2710 build2 (NE_EXPR, boolean_type_node,
2711 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2712 meth_var,
2713 build2 (MODIFY_EXPR, ptr_type_node, meth_var,
2714 build_call_nary (ptr_type_node,
2715 build_address_of
2716 (soft_lookupjnimethod_node),
2717 4,
2718 jniarg0, jniarg1,
2719 jniarg2, jniarg3)));
2720
2721 /* Now we make the actual JNI call via the resulting function
2722 pointer. */
2723 call = build_call_list (TREE_TYPE (TREE_TYPE (method)),
2724 build1 (NOP_EXPR, jni_func_type, jnifunc),
2725 args);
2726
2727 /* If the JNI call returned a result, capture it here. If we had to
2728 unwrap JNI object results, we would do that here. */
2729 if (res_var != NULL_TREE)
2730 {
2731 /* If the call returns an object, it may return a JNI weak
2732 reference, in which case we must unwrap it. */
2733 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2734 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2735 build_address_of (soft_unwrapjni_node),
2736 1, call);
2737 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2738 res_var, call);
2739 }
2740
2741 TREE_SIDE_EFFECTS (call) = 1;
2742
2743 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2744 TREE_SIDE_EFFECTS (body) = 1;
2745
2746 /* Now free the environment we allocated. */
2747 call = build_call_nary (ptr_type_node,
2748 build_address_of (soft_jnipopsystemframe_node),
2749 1, env_var);
2750 TREE_SIDE_EFFECTS (call) = 1;
2751 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2752 TREE_SIDE_EFFECTS (body) = 1;
2753
2754 /* Finally, do the return. */
2755 res_type = void_type_node;
2756 if (res_var != NULL_TREE)
2757 {
2758 tree drt;
2759 gcc_assert (DECL_RESULT (method));
2760 /* Make sure we copy the result variable to the actual
2761 result. We use the type of the DECL_RESULT because it
2762 might be different from the return type of the function:
2763 it might be promoted. */
2764 drt = TREE_TYPE (DECL_RESULT (method));
2765 if (drt != TREE_TYPE (res_var))
2766 res_var = build1 (CONVERT_EXPR, drt, res_var);
2767 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2768 TREE_SIDE_EFFECTS (res_var) = 1;
2769 }
2770
2771 body = build2 (COMPOUND_EXPR, void_type_node, body,
2772 build1 (RETURN_EXPR, void_type_node, res_var));
2773 TREE_SIDE_EFFECTS (body) = 1;
2774
2775 /* Prepend class initialization for static methods reachable from
2776 other classes. */
2777 if (METHOD_STATIC (method)
2778 && (! METHOD_PRIVATE (method)
2779 || INNER_CLASS_P (DECL_CONTEXT (method))))
2780 {
2781 tree init = build_call_expr (soft_initclass_node, 1,
2782 klass);
2783 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2784 TREE_SIDE_EFFECTS (body) = 1;
2785 }
2786
2787 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2788 body, block);
2789 return bind;
2790 }
2791
2792
2793 /* Given lvalue EXP, return a volatile expression that references the
2794 same object. */
2795
2796 tree
2797 java_modify_addr_for_volatile (tree exp)
2798 {
2799 tree exp_type = TREE_TYPE (exp);
2800 tree v_type
2801 = build_qualified_type (exp_type,
2802 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2803 tree addr = build_fold_addr_expr (exp);
2804 v_type = build_pointer_type (v_type);
2805 addr = fold_convert (v_type, addr);
2806 exp = build_fold_indirect_ref (addr);
2807 return exp;
2808 }
2809
2810
2811 /* Expand an operation to extract from or store into a field.
2812 IS_STATIC is 1 iff the field is static.
2813 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2814 FIELD_REF_INDEX is an index into the constant pool. */
2815
2816 static void
2817 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2818 {
2819 tree self_type
2820 = get_class_constant (current_jcf,
2821 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2822 field_ref_index));
2823 const char *self_name
2824 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2825 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2826 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2827 field_ref_index);
2828 tree field_type = get_type_from_signature (field_signature);
2829 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2830 tree field_ref;
2831 int is_error = 0;
2832 tree original_self_type = self_type;
2833 tree field_decl;
2834 tree modify_expr;
2835
2836 if (! CLASS_LOADED_P (self_type))
2837 load_class (self_type, 1);
2838 field_decl = lookup_field (&self_type, field_name);
2839 if (field_decl == error_mark_node)
2840 {
2841 is_error = 1;
2842 }
2843 else if (field_decl == NULL_TREE)
2844 {
2845 if (! flag_verify_invocations)
2846 {
2847 int flags = ACC_PUBLIC;
2848 if (is_static)
2849 flags |= ACC_STATIC;
2850 self_type = original_self_type;
2851 field_decl = add_field (original_self_type, field_name,
2852 field_type, flags);
2853 DECL_ARTIFICIAL (field_decl) = 1;
2854 DECL_IGNORED_P (field_decl) = 1;
2855 #if 0
2856 /* FIXME: We should be pessimistic about volatility. We
2857 don't know one way or another, but this is safe.
2858 However, doing this has bad effects on code quality. We
2859 need to look at better ways to do this. */
2860 TREE_THIS_VOLATILE (field_decl) = 1;
2861 #endif
2862 }
2863 else
2864 {
2865 error ("missing field '%s' in '%s'",
2866 IDENTIFIER_POINTER (field_name), self_name);
2867 is_error = 1;
2868 }
2869 }
2870 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2871 {
2872 error ("mismatching signature for field '%s' in '%s'",
2873 IDENTIFIER_POINTER (field_name), self_name);
2874 is_error = 1;
2875 }
2876 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2877 if (is_error)
2878 {
2879 if (! is_putting)
2880 push_value (convert (field_type, integer_zero_node));
2881 flush_quick_stack ();
2882 return;
2883 }
2884
2885 field_ref = build_field_ref (field_ref, self_type, field_name);
2886 if (is_static
2887 && ! flag_indirect_dispatch)
2888 {
2889 tree context = DECL_CONTEXT (field_ref);
2890 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2891 field_ref = build_class_init (context, field_ref);
2892 else
2893 field_ref = build_class_init (self_type, field_ref);
2894 }
2895 if (is_putting)
2896 {
2897 flush_quick_stack ();
2898 if (FIELD_FINAL (field_decl))
2899 {
2900 if (DECL_CONTEXT (field_decl) != current_class)
2901 error ("assignment to final field %q+D not in field's class",
2902 field_decl);
2903 /* We used to check for assignments to final fields not
2904 occurring in the class initializer or in a constructor
2905 here. However, this constraint doesn't seem to be
2906 enforced by the JVM. */
2907 }
2908
2909 if (TREE_THIS_VOLATILE (field_decl))
2910 field_ref = java_modify_addr_for_volatile (field_ref);
2911
2912 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2913 field_ref, new_value);
2914
2915 if (TREE_THIS_VOLATILE (field_decl))
2916 java_add_stmt
2917 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2918
2919 java_add_stmt (modify_expr);
2920 }
2921 else
2922 {
2923 tree temp = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2924 java_add_local_var (temp);
2925
2926 if (TREE_THIS_VOLATILE (field_decl))
2927 field_ref = java_modify_addr_for_volatile (field_ref);
2928
2929 modify_expr
2930 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2931 java_add_stmt (modify_expr);
2932
2933 if (TREE_THIS_VOLATILE (field_decl))
2934 java_add_stmt
2935 (build_call_expr (built_in_decls[BUILT_IN_SYNCHRONIZE], 0));
2936
2937 push_value (temp);
2938 }
2939 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2940 }
2941
2942 static void
2943 load_type_state (int pc)
2944 {
2945 int i;
2946 tree vec = VEC_index (tree, type_states, pc);
2947 int cur_length = TREE_VEC_LENGTH (vec);
2948 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2949 for (i = 0; i < cur_length; i++)
2950 type_map [i] = TREE_VEC_ELT (vec, i);
2951 }
2952
2953 /* Go over METHOD's bytecode and note instruction starts in
2954 instruction_bits[]. */
2955
2956 void
2957 note_instructions (JCF *jcf, tree method)
2958 {
2959 int PC;
2960 unsigned char* byte_ops;
2961 long length = DECL_CODE_LENGTH (method);
2962
2963 int saw_index;
2964 jint INT_temp;
2965
2966 #undef RET /* Defined by config/i386/i386.h */
2967 #undef PTR
2968 #define BCODE byte_ops
2969 #define BYTE_type_node byte_type_node
2970 #define SHORT_type_node short_type_node
2971 #define INT_type_node int_type_node
2972 #define LONG_type_node long_type_node
2973 #define CHAR_type_node char_type_node
2974 #define PTR_type_node ptr_type_node
2975 #define FLOAT_type_node float_type_node
2976 #define DOUBLE_type_node double_type_node
2977 #define VOID_type_node void_type_node
2978 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2979 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2980 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2981 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2982
2983 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
2984
2985 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
2986 byte_ops = jcf->read_ptr;
2987 instruction_bits = xrealloc (instruction_bits, length + 1);
2988 memset (instruction_bits, 0, length + 1);
2989 type_states = VEC_alloc (tree, gc, length + 1);
2990 VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
2991
2992 /* This pass figures out which PC can be the targets of jumps. */
2993 for (PC = 0; PC < length;)
2994 {
2995 int oldpc = PC; /* PC at instruction start. */
2996 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
2997 switch (byte_ops[PC++])
2998 {
2999 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3000 case OPCODE: \
3001 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3002 break;
3003
3004 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3005
3006 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3007 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3008 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3009 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3010 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3011 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3012 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3013 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3014
3015 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3016 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3017 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3018 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3019 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3020 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3021 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3022 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3023
3024 /* two forms of wide instructions */
3025 #define PRE_SPECIAL_WIDE(IGNORE) \
3026 { \
3027 int modified_opcode = IMMEDIATE_u1; \
3028 if (modified_opcode == OPCODE_iinc) \
3029 { \
3030 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3031 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3032 } \
3033 else \
3034 { \
3035 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3036 } \
3037 }
3038
3039 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3040
3041 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3042
3043 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3044 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3045 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3046 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3047 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3048 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3049 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3050 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3051 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3052 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3053
3054 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3055 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3056 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3057 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3058 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3059 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3060 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3061 NOTE_LABEL (PC); \
3062 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3063
3064 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3065
3066 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3067 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3068
3069 #define PRE_LOOKUP_SWITCH \
3070 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3071 NOTE_LABEL (default_offset+oldpc); \
3072 if (npairs >= 0) \
3073 while (--npairs >= 0) { \
3074 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3075 jint offset = IMMEDIATE_s4; \
3076 NOTE_LABEL (offset+oldpc); } \
3077 }
3078
3079 #define PRE_TABLE_SWITCH \
3080 { jint default_offset = IMMEDIATE_s4; \
3081 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3082 NOTE_LABEL (default_offset+oldpc); \
3083 if (low <= high) \
3084 while (low++ <= high) { \
3085 jint offset = IMMEDIATE_s4; \
3086 NOTE_LABEL (offset+oldpc); } \
3087 }
3088
3089 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3090 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3091 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3092 (void)(IMMEDIATE_u2); \
3093 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3094
3095 #include "javaop.def"
3096 #undef JAVAOP
3097 }
3098 } /* for */
3099 }
3100
3101 void
3102 expand_byte_code (JCF *jcf, tree method)
3103 {
3104 int PC;
3105 int i;
3106 const unsigned char *linenumber_pointer;
3107 int dead_code_index = -1;
3108 unsigned char* byte_ops;
3109 long length = DECL_CODE_LENGTH (method);
3110
3111 stack_pointer = 0;
3112 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3113 byte_ops = jcf->read_ptr;
3114
3115 /* We make an initial pass of the line number table, to note
3116 which instructions have associated line number entries. */
3117 linenumber_pointer = linenumber_table;
3118 for (i = 0; i < linenumber_count; i++)
3119 {
3120 int pc = GET_u2 (linenumber_pointer);
3121 linenumber_pointer += 4;
3122 if (pc >= length)
3123 warning (0, "invalid PC in line number table");
3124 else
3125 {
3126 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3127 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3128 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3129 }
3130 }
3131
3132 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3133 return;
3134
3135 promote_arguments ();
3136 cache_this_class_ref (method);
3137 cache_cpool_data_ref ();
3138
3139 /* Translate bytecodes. */
3140 linenumber_pointer = linenumber_table;
3141 for (PC = 0; PC < length;)
3142 {
3143 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3144 {
3145 tree label = lookup_label (PC);
3146 flush_quick_stack ();
3147 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3148 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3149 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3150 load_type_state (PC);
3151 }
3152
3153 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3154 {
3155 if (dead_code_index == -1)
3156 {
3157 /* This is the start of a region of unreachable bytecodes.
3158 They still need to be processed in order for EH ranges
3159 to get handled correctly. However, we can simply
3160 replace these bytecodes with nops. */
3161 dead_code_index = PC;
3162 }
3163
3164 /* Turn this bytecode into a nop. */
3165 byte_ops[PC] = 0x0;
3166 }
3167 else
3168 {
3169 if (dead_code_index != -1)
3170 {
3171 /* We've just reached the end of a region of dead code. */
3172 if (extra_warnings)
3173 warning (0, "unreachable bytecode from %d to before %d",
3174 dead_code_index, PC);
3175 dead_code_index = -1;
3176 }
3177 }
3178
3179 /* Handle possible line number entry for this PC.
3180
3181 This code handles out-of-order and multiple linenumbers per PC,
3182 but is optimized for the case of line numbers increasing
3183 monotonically with PC. */
3184 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3185 {
3186 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3187 || GET_u2 (linenumber_pointer) != PC)
3188 linenumber_pointer = linenumber_table;
3189 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3190 {
3191 int pc = GET_u2 (linenumber_pointer);
3192 linenumber_pointer += 4;
3193 if (pc == PC)
3194 {
3195 int line = GET_u2 (linenumber_pointer - 2);
3196 #ifdef USE_MAPPED_LOCATION
3197 input_location = linemap_line_start (line_table, line, 1);
3198 #else
3199 input_location.line = line;
3200 #endif
3201 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3202 break;
3203 }
3204 }
3205 }
3206 maybe_pushlevels (PC);
3207 PC = process_jvm_instruction (PC, byte_ops, length);
3208 maybe_poplevels (PC);
3209 } /* for */
3210
3211 uncache_this_class_ref (method);
3212
3213 if (dead_code_index != -1)
3214 {
3215 /* We've just reached the end of a region of dead code. */
3216 if (extra_warnings)
3217 warning (0, "unreachable bytecode from %d to the end of the method",
3218 dead_code_index);
3219 }
3220 }
3221
3222 static void
3223 java_push_constant_from_pool (JCF *jcf, int index)
3224 {
3225 tree c;
3226 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3227 {
3228 tree name;
3229 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3230 index = alloc_name_constant (CONSTANT_String, name);
3231 c = build_ref_from_constant_pool (index);
3232 c = convert (promote_type (string_type_node), c);
3233 }
3234 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3235 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3236 {
3237 tree record = get_class_constant (jcf, index);
3238 c = build_class_ref (record);
3239 }
3240 else
3241 c = get_constant (jcf, index);
3242 push_value (c);
3243 }
3244
3245 int
3246 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3247 long length ATTRIBUTE_UNUSED)
3248 {
3249 const char *opname; /* Temporary ??? */
3250 int oldpc = PC; /* PC at instruction start. */
3251
3252 /* If the instruction is at the beginning of an exception handler,
3253 replace the top of the stack with the thrown object reference. */
3254 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3255 {
3256 /* Note that the verifier will not emit a type map at all for
3257 dead exception handlers. In this case we just ignore the
3258 situation. */
3259 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3260 {
3261 tree type = pop_type (promote_type (throwable_type_node));
3262 push_value (build_exception_object_ref (type));
3263 }
3264 }
3265
3266 switch (byte_ops[PC++])
3267 {
3268 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3269 case OPCODE: \
3270 opname = #OPNAME; \
3271 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3272 break;
3273
3274 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3275 { \
3276 int saw_index = 0; \
3277 int index = OPERAND_VALUE; \
3278 build_java_ret \
3279 (find_local_variable (index, return_address_type_node, oldpc)); \
3280 }
3281
3282 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3283 { \
3284 /* OPERAND_VALUE may have side-effects on PC */ \
3285 int opvalue = OPERAND_VALUE; \
3286 build_java_jsr (oldpc + opvalue, PC); \
3287 }
3288
3289 /* Push a constant onto the stack. */
3290 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3291 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3292 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3293 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3294
3295 /* internal macro added for use by the WIDE case */
3296 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3297 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3298
3299 /* Push local variable onto the opcode stack. */
3300 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3301 { \
3302 /* have to do this since OPERAND_VALUE may have side-effects */ \
3303 int opvalue = OPERAND_VALUE; \
3304 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3305 }
3306
3307 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3308 expand_java_return (OPERAND_TYPE##_type_node)
3309
3310 #define REM_EXPR TRUNC_MOD_EXPR
3311 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3312 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3313
3314 #define FIELD(IS_STATIC, IS_PUT) \
3315 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3316
3317 #define TEST(OPERAND_TYPE, CONDITION) \
3318 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3319
3320 #define COND(OPERAND_TYPE, CONDITION) \
3321 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3322
3323 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3324 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3325
3326 #define BRANCH_GOTO(OPERAND_VALUE) \
3327 expand_java_goto (oldpc + OPERAND_VALUE)
3328
3329 #define BRANCH_CALL(OPERAND_VALUE) \
3330 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3331
3332 #if 0
3333 #define BRANCH_RETURN(OPERAND_VALUE) \
3334 { \
3335 tree type = OPERAND_TYPE##_type_node; \
3336 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3337 expand_java_ret (value); \
3338 }
3339 #endif
3340
3341 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3342 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3343 fprintf (stderr, "(not implemented)\n")
3344 #define NOT_IMPL1(OPERAND_VALUE) \
3345 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3346 fprintf (stderr, "(not implemented)\n")
3347
3348 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3349
3350 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3351
3352 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3353
3354 #define STACK_SWAP(COUNT) java_stack_swap()
3355
3356 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3357 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3358 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3359
3360 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3361 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3362
3363 #define LOOKUP_SWITCH \
3364 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3365 tree selector = pop_value (INT_type_node); \
3366 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3367 while (--npairs >= 0) \
3368 { \
3369 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3370 expand_java_add_case (switch_expr, match, oldpc + offset); \
3371 } \
3372 }
3373
3374 #define TABLE_SWITCH \
3375 { jint default_offset = IMMEDIATE_s4; \
3376 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3377 tree selector = pop_value (INT_type_node); \
3378 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3379 for (; low <= high; low++) \
3380 { \
3381 jint offset = IMMEDIATE_s4; \
3382 expand_java_add_case (switch_expr, low, oldpc + offset); \
3383 } \
3384 }
3385
3386 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3387 { int opcode = byte_ops[PC-1]; \
3388 int method_ref_index = IMMEDIATE_u2; \
3389 int nargs; \
3390 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3391 else nargs = -1; \
3392 expand_invoke (opcode, method_ref_index, nargs); \
3393 }
3394
3395 /* Handle new, checkcast, instanceof */
3396 #define OBJECT(TYPE, OP) \
3397 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3398
3399 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3400
3401 #define ARRAY_LOAD(OPERAND_TYPE) \
3402 { \
3403 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3404 }
3405
3406 #define ARRAY_STORE(OPERAND_TYPE) \
3407 { \
3408 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3409 }
3410
3411 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3412 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3413 #define ARRAY_NEW_PTR() \
3414 push_value (build_anewarray (get_class_constant (current_jcf, \
3415 IMMEDIATE_u2), \
3416 pop_value (int_type_node)));
3417 #define ARRAY_NEW_NUM() \
3418 { \
3419 int atype = IMMEDIATE_u1; \
3420 push_value (build_newarray (atype, pop_value (int_type_node)));\
3421 }
3422 #define ARRAY_NEW_MULTI() \
3423 { \
3424 tree class = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3425 int ndims = IMMEDIATE_u1; \
3426 expand_java_multianewarray( class, ndims ); \
3427 }
3428
3429 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3430 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3431 pop_value (OPERAND_TYPE##_type_node)));
3432
3433 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3434 { \
3435 push_value (build1 (NOP_EXPR, int_type_node, \
3436 (convert (TO_TYPE##_type_node, \
3437 pop_value (FROM_TYPE##_type_node))))); \
3438 }
3439
3440 #define CONVERT(FROM_TYPE, TO_TYPE) \
3441 { \
3442 push_value (convert (TO_TYPE##_type_node, \
3443 pop_value (FROM_TYPE##_type_node))); \
3444 }
3445
3446 /* internal macro added for use by the WIDE case
3447 Added TREE_TYPE (decl) assignment, apbianco */
3448 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3449 { \
3450 tree decl, value; \
3451 int index = OPVALUE; \
3452 tree type = OPTYPE; \
3453 value = pop_value (type); \
3454 type = TREE_TYPE (value); \
3455 decl = find_local_variable (index, type, oldpc); \
3456 set_local_type (index, type); \
3457 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3458 }
3459
3460 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3461 { \
3462 /* have to do this since OPERAND_VALUE may have side-effects */ \
3463 int opvalue = OPERAND_VALUE; \
3464 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3465 }
3466
3467 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3468 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3469
3470 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3471 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3472
3473 #define MONITOR_OPERATION(call) \
3474 { \
3475 tree o = pop_value (ptr_type_node); \
3476 tree c; \
3477 flush_quick_stack (); \
3478 c = build_java_monitor (call, o); \
3479 TREE_SIDE_EFFECTS (c) = 1; \
3480 java_add_stmt (c); \
3481 }
3482
3483 #define SPECIAL_IINC(IGNORED) \
3484 { \
3485 unsigned int local_var_index = IMMEDIATE_u1; \
3486 int ival = IMMEDIATE_s1; \
3487 expand_iinc(local_var_index, ival, oldpc); \
3488 }
3489
3490 #define SPECIAL_WIDE(IGNORED) \
3491 { \
3492 int modified_opcode = IMMEDIATE_u1; \
3493 unsigned int local_var_index = IMMEDIATE_u2; \
3494 switch (modified_opcode) \
3495 { \
3496 case OPCODE_iinc: \
3497 { \
3498 int ival = IMMEDIATE_s2; \
3499 expand_iinc (local_var_index, ival, oldpc); \
3500 break; \
3501 } \
3502 case OPCODE_iload: \
3503 case OPCODE_lload: \
3504 case OPCODE_fload: \
3505 case OPCODE_dload: \
3506 case OPCODE_aload: \
3507 { \
3508 /* duplicate code from LOAD macro */ \
3509 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3510 break; \
3511 } \
3512 case OPCODE_istore: \
3513 case OPCODE_lstore: \
3514 case OPCODE_fstore: \
3515 case OPCODE_dstore: \
3516 case OPCODE_astore: \
3517 { \
3518 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3519 break; \
3520 } \
3521 default: \
3522 error ("unrecogized wide sub-instruction"); \
3523 } \
3524 }
3525
3526 #define SPECIAL_THROW(IGNORED) \
3527 build_java_athrow (pop_value (throwable_type_node))
3528
3529 #define SPECIAL_BREAK NOT_IMPL1
3530 #define IMPL NOT_IMPL
3531
3532 #include "javaop.def"
3533 #undef JAVAOP
3534 default:
3535 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3536 }
3537 return PC;
3538 }
3539
3540 /* Return the opcode at PC in the code section pointed to by
3541 CODE_OFFSET. */
3542
3543 static unsigned char
3544 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3545 {
3546 unsigned char opcode;
3547 long absolute_offset = (long)JCF_TELL (jcf);
3548
3549 JCF_SEEK (jcf, code_offset);
3550 opcode = jcf->read_ptr [pc];
3551 JCF_SEEK (jcf, absolute_offset);
3552 return opcode;
3553 }
3554
3555 /* Some bytecode compilers are emitting accurate LocalVariableTable
3556 attributes. Here's an example:
3557
3558 PC <t>store_<n>
3559 PC+1 ...
3560
3561 Attribute "LocalVariableTable"
3562 slot #<n>: ... (PC: PC+1 length: L)
3563
3564 This is accurate because the local in slot <n> really exists after
3565 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3566
3567 This procedure recognizes this situation and extends the live range
3568 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3569 length of the store instruction.)
3570
3571 This function is used by `give_name_to_locals' so that a local's
3572 DECL features a DECL_LOCAL_START_PC such that the first related
3573 store operation will use DECL as a destination, not an unrelated
3574 temporary created for the occasion.
3575
3576 This function uses a global (instruction_bits) `note_instructions' should
3577 have allocated and filled properly. */
3578
3579 int
3580 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3581 int start_pc, int slot)
3582 {
3583 int first, index, opcode;
3584 int pc, insn_pc;
3585 int wide_found = 0;
3586
3587 if (!start_pc)
3588 return start_pc;
3589
3590 first = index = -1;
3591
3592 /* Find last previous instruction and remember it */
3593 for (pc = start_pc-1; pc; pc--)
3594 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3595 break;
3596 insn_pc = pc;
3597
3598 /* Retrieve the instruction, handle `wide'. */
3599 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3600 if (opcode == OPCODE_wide)
3601 {
3602 wide_found = 1;
3603 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3604 }
3605
3606 switch (opcode)
3607 {
3608 case OPCODE_astore_0:
3609 case OPCODE_astore_1:
3610 case OPCODE_astore_2:
3611 case OPCODE_astore_3:
3612 first = OPCODE_astore_0;
3613 break;
3614
3615 case OPCODE_istore_0:
3616 case OPCODE_istore_1:
3617 case OPCODE_istore_2:
3618 case OPCODE_istore_3:
3619 first = OPCODE_istore_0;
3620 break;
3621
3622 case OPCODE_lstore_0:
3623 case OPCODE_lstore_1:
3624 case OPCODE_lstore_2:
3625 case OPCODE_lstore_3:
3626 first = OPCODE_lstore_0;
3627 break;
3628
3629 case OPCODE_fstore_0:
3630 case OPCODE_fstore_1:
3631 case OPCODE_fstore_2:
3632 case OPCODE_fstore_3:
3633 first = OPCODE_fstore_0;
3634 break;
3635
3636 case OPCODE_dstore_0:
3637 case OPCODE_dstore_1:
3638 case OPCODE_dstore_2:
3639 case OPCODE_dstore_3:
3640 first = OPCODE_dstore_0;
3641 break;
3642
3643 case OPCODE_astore:
3644 case OPCODE_istore:
3645 case OPCODE_lstore:
3646 case OPCODE_fstore:
3647 case OPCODE_dstore:
3648 index = peek_opcode_at_pc (jcf, code_offset, pc);
3649 if (wide_found)
3650 {
3651 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3652 index = (other << 8) + index;
3653 }
3654 break;
3655 }
3656
3657 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3658 means we have a <t>store. */
3659 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3660 start_pc = insn_pc;
3661
3662 return start_pc;
3663 }
3664
3665 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3666 order, as specified by Java Language Specification.
3667
3668 The problem is that while expand_expr will evaluate its sub-operands in
3669 left-to-right order, for variables it will just return an rtx (i.e.
3670 an lvalue) for the variable (rather than an rvalue). So it is possible
3671 that a later sub-operand will change the register, and when the
3672 actual operation is done, it will use the new value, when it should
3673 have used the original value.
3674
3675 We fix this by using save_expr. This forces the sub-operand to be
3676 copied into a fresh virtual register,
3677
3678 For method invocation, we modify the arguments so that a
3679 left-to-right order evaluation is performed. Saved expressions
3680 will, in CALL_EXPR order, be reused when the call will be expanded.
3681
3682 We also promote outgoing args if needed. */
3683
3684 tree
3685 force_evaluation_order (tree node)
3686 {
3687 if (flag_syntax_only)
3688 return node;
3689 if (TREE_CODE (node) == CALL_EXPR
3690 || (TREE_CODE (node) == COMPOUND_EXPR
3691 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3692 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3693 {
3694 tree call, cmp;
3695 int i, nargs;
3696
3697 /* Account for wrapped around ctors. */
3698 if (TREE_CODE (node) == COMPOUND_EXPR)
3699 call = TREE_OPERAND (node, 0);
3700 else
3701 call = node;
3702
3703 nargs = call_expr_nargs (call);
3704
3705 /* This reverses the evaluation order. This is a desired effect. */
3706 for (i = 0, cmp = NULL_TREE; i < nargs; i++)
3707 {
3708 tree arg = CALL_EXPR_ARG (call, i);
3709 /* Promote types smaller than integer. This is required by
3710 some ABIs. */
3711 tree type = TREE_TYPE (arg);
3712 tree saved;
3713 if (targetm.calls.promote_prototypes (type)
3714 && INTEGRAL_TYPE_P (type)
3715 && INT_CST_LT_UNSIGNED (TYPE_SIZE (type),
3716 TYPE_SIZE (integer_type_node)))
3717 arg = fold_convert (integer_type_node, arg);
3718
3719 saved = save_expr (force_evaluation_order (arg));
3720 cmp = (cmp == NULL_TREE ? saved :
3721 build2 (COMPOUND_EXPR, void_type_node, cmp, saved));
3722
3723 CALL_EXPR_ARG (call, i) = saved;
3724 }
3725
3726 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3727 TREE_SIDE_EFFECTS (cmp) = 1;
3728
3729 if (cmp)
3730 {
3731 cmp = build2 (COMPOUND_EXPR, TREE_TYPE (node), cmp, node);
3732 if (TREE_TYPE (cmp) != void_type_node)
3733 cmp = save_expr (cmp);
3734 TREE_SIDE_EFFECTS (cmp) = 1;
3735 node = cmp;
3736 }
3737 }
3738 return node;
3739 }
3740
3741 /* Build a node to represent empty statements and blocks. */
3742
3743 tree
3744 build_java_empty_stmt (void)
3745 {
3746 tree t = build_empty_stmt ();
3747 return t;
3748 }
3749
3750 /* Promote all args of integral type before generating any code. */
3751
3752 static void
3753 promote_arguments (void)
3754 {
3755 int i;
3756 tree arg;
3757 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3758 arg != NULL_TREE; arg = TREE_CHAIN (arg), i++)
3759 {
3760 tree arg_type = TREE_TYPE (arg);
3761 if (INTEGRAL_TYPE_P (arg_type)
3762 && TYPE_PRECISION (arg_type) < 32)
3763 {
3764 tree copy = find_local_variable (i, integer_type_node, -1);
3765 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3766 copy,
3767 fold_convert (integer_type_node, arg)));
3768 }
3769 if (TYPE_IS_WIDE (arg_type))
3770 i++;
3771 }
3772 }
3773
3774 /* Create a local variable that points to the constant pool. */
3775
3776 static void
3777 cache_cpool_data_ref (void)
3778 {
3779 if (optimize)
3780 {
3781 tree cpool;
3782 tree d = build_constant_data_ref (flag_indirect_classes);
3783 tree cpool_ptr = build_decl (VAR_DECL, NULL_TREE,
3784 build_pointer_type (TREE_TYPE (d)));
3785 java_add_local_var (cpool_ptr);
3786 TREE_INVARIANT (cpool_ptr) = 1;
3787 TREE_CONSTANT (cpool_ptr) = 1;
3788
3789 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3790 cpool_ptr, build_address_of (d)));
3791 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3792 TREE_THIS_NOTRAP (cpool) = 1;
3793 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3794 }
3795 }
3796
3797 #include "gt-java-expr.h"