expr.h (expand_expr): Make it a macro, not a function.
[gcc.git] / gcc / java / expr.c
1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA.
21
22 Java and all Java-based marks are trademarks or registered trademarks
23 of Sun Microsystems, Inc. in the United States and other countries.
24 The Free Software Foundation is independent of Sun Microsystems, Inc. */
25
26 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "tm.h"
32 #include "tree.h"
33 #include "real.h"
34 #include "rtl.h"
35 #include "flags.h"
36 #include "expr.h"
37 #include "java-tree.h"
38 #include "javaop.h"
39 #include "java-opcodes.h"
40 #include "jcf.h"
41 #include "java-except.h"
42 #include "parse.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "ggc.h"
46
47 static void flush_quick_stack (void);
48 static void push_value (tree);
49 static tree pop_value (tree);
50 static void java_stack_swap (void);
51 static void java_stack_dup (int, int);
52 static void build_java_athrow (tree);
53 static void build_java_jsr (int, int);
54 static void build_java_ret (tree);
55 static void expand_java_multianewarray (tree, int);
56 static void expand_java_arraystore (tree);
57 static void expand_java_arrayload (tree);
58 static void expand_java_array_length (void);
59 static tree build_java_monitor (tree, tree);
60 static void expand_java_pushc (int, tree);
61 static void expand_java_return (tree);
62 static void expand_load_internal (int, tree, int);
63 static void expand_java_NEW (tree);
64 static void expand_java_INSTANCEOF (tree);
65 static void expand_java_CHECKCAST (tree);
66 static void expand_iinc (unsigned int, int, int);
67 static void expand_java_binop (tree, enum tree_code);
68 static void note_label (int, int);
69 static void expand_compare (enum tree_code, tree, tree, int);
70 static void expand_test (enum tree_code, tree, int);
71 static void expand_cond (enum tree_code, tree, int);
72 static void expand_java_goto (int);
73 #if 0
74 static void expand_java_call (int, int);
75 static void expand_java_ret (tree);
76 #endif
77 static tree pop_arguments (tree);
78 static void expand_invoke (int, int, int);
79 static void expand_java_field_op (int, int, int);
80 static void java_push_constant_from_pool (struct JCF *, int);
81 static void java_stack_pop (int);
82 static tree build_java_throw_out_of_bounds_exception (tree);
83 static tree build_java_check_indexed_type (tree, tree);
84 static tree case_identity (tree, tree);
85 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
86 static int emit_init_test_initialization (void **entry, void * ptr);
87
88 static GTY(()) tree operand_type[59];
89
90 static GTY(()) tree methods_ident;
91 static GTY(()) tree ncode_ident;
92 tree dtable_ident = NULL_TREE;
93
94 /* Set to nonzero value in order to emit class initialization code
95 before static field references. */
96 int always_initialize_class_p;
97
98 /* We store the stack state in two places:
99 Within a basic block, we use the quick_stack, which is a
100 pushdown list (TREE_LISTs) of expression nodes.
101 This is the top part of the stack; below that we use find_stack_slot.
102 At the end of a basic block, the quick_stack must be flushed
103 to the stack slot array (as handled by find_stack_slot).
104 Using quick_stack generates better code (especially when
105 compiled without optimization), because we do not have to
106 explicitly store and load trees to temporary variables.
107
108 If a variable is on the quick stack, it means the value of variable
109 when the quick stack was last flushed. Conceptually, flush_quick_stack
110 saves all the the quick_stack elements in parallel. However, that is
111 complicated, so it actually saves them (i.e. copies each stack value
112 to is home virtual register) from low indexes. This allows a quick_stack
113 element at index i (counting from the bottom of stack the) to references
114 slot virtuals for register that are >= i, but not those that are deeper.
115 This convention makes most operations easier. For example iadd works
116 even when the stack contains (reg[0], reg[1]): It results in the
117 stack containing (reg[0]+reg[1]), which is OK. However, some stack
118 operations are more complicated. For example dup given a stack
119 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
120 the convention, since stack value 1 would refer to a register with
121 lower index (reg[0]), which flush_quick_stack does not safely handle.
122 So dup cannot just add an extra element to the quick_stack, but iadd can.
123 */
124
125 static GTY(()) tree quick_stack;
126
127 /* A free-list of unused permanent TREE_LIST nodes. */
128 static GTY((deletable (""))) tree tree_list_free_list;
129
130 /* The stack pointer of the Java virtual machine.
131 This does include the size of the quick_stack. */
132
133 int stack_pointer;
134
135 const unsigned char *linenumber_table;
136 int linenumber_count;
137
138 void
139 init_expr_processing (void)
140 {
141 operand_type[21] = operand_type[54] = int_type_node;
142 operand_type[22] = operand_type[55] = long_type_node;
143 operand_type[23] = operand_type[56] = float_type_node;
144 operand_type[24] = operand_type[57] = double_type_node;
145 operand_type[25] = operand_type[58] = ptr_type_node;
146 }
147
148 tree
149 java_truthvalue_conversion (tree expr)
150 {
151 /* It is simpler and generates better code to have only TRUTH_*_EXPR
152 or comparison expressions as truth values at this level.
153
154 This function should normally be identity for Java. */
155
156 switch (TREE_CODE (expr))
157 {
158 case EQ_EXPR:
159 case NE_EXPR: case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
160 case TRUTH_ANDIF_EXPR:
161 case TRUTH_ORIF_EXPR:
162 case TRUTH_AND_EXPR:
163 case TRUTH_OR_EXPR:
164 case ERROR_MARK:
165 return expr;
166
167 case INTEGER_CST:
168 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
169
170 case REAL_CST:
171 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
172
173 /* are these legal? XXX JH */
174 case NEGATE_EXPR:
175 case ABS_EXPR:
176 case FLOAT_EXPR:
177 /* These don't change whether an object is nonzero or zero. */
178 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
179
180 case COND_EXPR:
181 /* Distribute the conversion into the arms of a COND_EXPR. */
182 return fold (build (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
183 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
184 java_truthvalue_conversion (TREE_OPERAND (expr, 2))));
185
186 case NOP_EXPR:
187 /* If this is widening the argument, we can ignore it. */
188 if (TYPE_PRECISION (TREE_TYPE (expr))
189 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
190 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
191 /* fall through to default */
192
193 default:
194 return fold (build (NE_EXPR, boolean_type_node, expr, boolean_false_node));
195 }
196 }
197
198 /* Save any stack slots that happen to be in the quick_stack into their
199 home virtual register slots.
200
201 The copy order is from low stack index to high, to support the invariant
202 that the expression for a slot may contain decls for stack slots with
203 higher (or the same) index, but not lower. */
204
205 static void
206 flush_quick_stack (void)
207 {
208 int stack_index = stack_pointer;
209 tree prev, cur, next;
210
211 /* First reverse the quick_stack, and count the number of slots it has. */
212 for (cur = quick_stack, prev = NULL_TREE; cur != NULL_TREE; cur = next)
213 {
214 next = TREE_CHAIN (cur);
215 TREE_CHAIN (cur) = prev;
216 prev = cur;
217 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (TREE_VALUE (cur)));
218 }
219 quick_stack = prev;
220
221 while (quick_stack != NULL_TREE)
222 {
223 tree decl;
224 tree node = quick_stack, type;
225 quick_stack = TREE_CHAIN (node);
226 TREE_CHAIN (node) = tree_list_free_list;
227 tree_list_free_list = node;
228 node = TREE_VALUE (node);
229 type = TREE_TYPE (node);
230
231 decl = find_stack_slot (stack_index, type);
232 if (decl != node)
233 expand_assignment (decl, node, 0);
234 stack_index += 1 + TYPE_IS_WIDE (type);
235 }
236 }
237
238 /* Push TYPE on the type stack.
239 Return true on success, 0 on overflow. */
240
241 int
242 push_type_0 (tree type)
243 {
244 int n_words;
245 type = promote_type (type);
246 n_words = 1 + TYPE_IS_WIDE (type);
247 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
248 return 0;
249 stack_type_map[stack_pointer++] = type;
250 n_words--;
251 while (--n_words >= 0)
252 stack_type_map[stack_pointer++] = TYPE_SECOND;
253 return 1;
254 }
255
256 void
257 push_type (tree type)
258 {
259 if (! push_type_0 (type))
260 abort ();
261 }
262
263 static void
264 push_value (tree value)
265 {
266 tree type = TREE_TYPE (value);
267 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
268 {
269 type = promote_type (type);
270 value = convert (type, value);
271 }
272 push_type (type);
273 if (tree_list_free_list == NULL_TREE)
274 quick_stack = tree_cons (NULL_TREE, value, quick_stack);
275 else
276 {
277 tree node = tree_list_free_list;
278 tree_list_free_list = TREE_CHAIN (tree_list_free_list);
279 TREE_VALUE (node) = value;
280 TREE_CHAIN (node) = quick_stack;
281 quick_stack = node;
282 }
283 }
284
285 /* Pop a type from the type stack.
286 TYPE is the expected type. Return the actual type, which must be
287 convertible to TYPE.
288 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
289
290 tree
291 pop_type_0 (tree type, char **messagep)
292 {
293 int n_words;
294 tree t;
295 *messagep = NULL;
296 if (TREE_CODE (type) == RECORD_TYPE)
297 type = promote_type (type);
298 n_words = 1 + TYPE_IS_WIDE (type);
299 if (stack_pointer < n_words)
300 {
301 *messagep = xstrdup ("stack underflow");
302 return type;
303 }
304 while (--n_words > 0)
305 {
306 if (stack_type_map[--stack_pointer] != void_type_node)
307 {
308 *messagep = xstrdup ("Invalid multi-word value on type stack");
309 return type;
310 }
311 }
312 t = stack_type_map[--stack_pointer];
313 if (type == NULL_TREE || t == type)
314 return t;
315 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
316 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
317 return t;
318 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
319 {
320 if (type == ptr_type_node || type == object_ptr_type_node)
321 return t;
322 else if (t == ptr_type_node) /* Special case for null reference. */
323 return type;
324 else if (can_widen_reference_to (t, type))
325 return t;
326 /* This is a kludge, but matches what Sun's verifier does.
327 It can be tricked, but is safe as long as type errors
328 (i.e. interface method calls) are caught at run-time. */
329 else if (CLASS_INTERFACE (TYPE_NAME (TREE_TYPE (type))))
330 return object_ptr_type_node;
331 }
332
333 /* lang_printable_name uses a static buffer, so we must save the result
334 from calling it the first time. */
335 {
336 char *temp = xstrdup (lang_printable_name (type, 0));
337 *messagep = concat ("expected type '", temp,
338 "' but stack contains '", lang_printable_name (t, 0),
339 "'", NULL);
340 free (temp);
341 }
342 return type;
343 }
344
345 /* Pop a type from the type stack.
346 TYPE is the expected type. Return the actual type, which must be
347 convertible to TYPE, otherwise call error. */
348
349 tree
350 pop_type (tree type)
351 {
352 char *message = NULL;
353 type = pop_type_0 (type, &message);
354 if (message != NULL)
355 {
356 error ("%s", message);
357 free (message);
358 }
359 return type;
360 }
361
362 /* Return 1f if SOURCE_TYPE can be safely widened to TARGET_TYPE.
363 Handles array types and interfaces. */
364
365 int
366 can_widen_reference_to (tree source_type, tree target_type)
367 {
368 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
369 return 1;
370
371 /* Get rid of pointers */
372 if (TREE_CODE (source_type) == POINTER_TYPE)
373 source_type = TREE_TYPE (source_type);
374 if (TREE_CODE (target_type) == POINTER_TYPE)
375 target_type = TREE_TYPE (target_type);
376
377 if (source_type == target_type)
378 return 1;
379 else
380 {
381 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
382 {
383 HOST_WIDE_INT source_length, target_length;
384 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
385 {
386 /* An array implements Cloneable and Serializable. */
387 tree name = DECL_NAME (TYPE_NAME (target_type));
388 return (name == java_lang_cloneable_identifier_node
389 || name == java_io_serializable_identifier_node);
390 }
391 target_length = java_array_type_length (target_type);
392 if (target_length >= 0)
393 {
394 source_length = java_array_type_length (source_type);
395 if (source_length != target_length)
396 return 0;
397 }
398 source_type = TYPE_ARRAY_ELEMENT (source_type);
399 target_type = TYPE_ARRAY_ELEMENT (target_type);
400 if (source_type == target_type)
401 return 1;
402 if (TREE_CODE (source_type) != POINTER_TYPE
403 || TREE_CODE (target_type) != POINTER_TYPE)
404 return 0;
405 return can_widen_reference_to (source_type, target_type);
406 }
407 else
408 {
409 int source_depth = class_depth (source_type);
410 int target_depth = class_depth (target_type);
411
412 /* class_depth can return a negative depth if an error occurred */
413 if (source_depth < 0 || target_depth < 0)
414 return 0;
415
416 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
417 {
418 /* target_type is OK if source_type or source_type ancestors
419 implement target_type. We handle multiple sub-interfaces */
420
421 tree basetype_vec = TYPE_BINFO_BASETYPES (source_type);
422 int n = TREE_VEC_LENGTH (basetype_vec), i;
423 for (i=0 ; i < n; i++)
424 if (can_widen_reference_to
425 (TREE_TYPE (TREE_VEC_ELT (basetype_vec, i)),
426 target_type))
427 return 1;
428 if (n == 0)
429 return 0;
430 }
431
432 for ( ; source_depth > target_depth; source_depth--)
433 {
434 source_type = TYPE_BINFO_BASETYPE (source_type, 0);
435 }
436 return source_type == target_type;
437 }
438 }
439 }
440
441 static tree
442 pop_value (tree type)
443 {
444 type = pop_type (type);
445 if (quick_stack)
446 {
447 tree node = quick_stack;
448 quick_stack = TREE_CHAIN (quick_stack);
449 TREE_CHAIN (node) = tree_list_free_list;
450 tree_list_free_list = node;
451 node = TREE_VALUE (node);
452 return node;
453 }
454 else
455 return find_stack_slot (stack_pointer, promote_type (type));
456 }
457
458
459 /* Pop and discard the top COUNT stack slots. */
460
461 static void
462 java_stack_pop (int count)
463 {
464 while (count > 0)
465 {
466 tree type, val;
467
468 if (stack_pointer == 0)
469 abort ();
470
471 type = stack_type_map[stack_pointer - 1];
472 if (type == TYPE_SECOND)
473 {
474 count--;
475 if (stack_pointer == 1 || count <= 0)
476 abort ();
477
478 type = stack_type_map[stack_pointer - 2];
479 }
480 val = pop_value (type);
481 count--;
482 }
483 }
484
485 /* Implement the 'swap' operator (to swap two top stack slots). */
486
487 static void
488 java_stack_swap (void)
489 {
490 tree type1, type2;
491 rtx temp;
492 tree decl1, decl2;
493
494 if (stack_pointer < 2
495 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_UNKNOWN
496 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_UNKNOWN
497 || type1 == TYPE_SECOND || type2 == TYPE_SECOND
498 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
499 /* Bad stack swap. */
500 abort ();
501
502 flush_quick_stack ();
503 decl1 = find_stack_slot (stack_pointer - 1, type1);
504 decl2 = find_stack_slot (stack_pointer - 2, type2);
505 temp = copy_to_reg (DECL_RTL (decl1));
506 emit_move_insn (DECL_RTL (find_stack_slot (stack_pointer - 1, type2)),
507 DECL_RTL (decl2));
508 emit_move_insn (DECL_RTL (find_stack_slot (stack_pointer - 2, type1)), temp);
509 stack_type_map[stack_pointer - 1] = type2;
510 stack_type_map[stack_pointer - 2] = type1;
511 }
512
513 static void
514 java_stack_dup (int size, int offset)
515 {
516 int low_index = stack_pointer - size - offset;
517 int dst_index;
518 if (low_index < 0)
519 error ("stack underflow - dup* operation");
520
521 flush_quick_stack ();
522
523 stack_pointer += size;
524 dst_index = stack_pointer;
525
526 for (dst_index = stack_pointer; --dst_index >= low_index; )
527 {
528 tree type;
529 int src_index = dst_index - size;
530 if (src_index < low_index)
531 src_index = dst_index + size + offset;
532 type = stack_type_map [src_index];
533 if (type == TYPE_SECOND)
534 {
535 if (src_index <= low_index)
536 /* Dup operation splits 64-bit number. */
537 abort ();
538
539 stack_type_map[dst_index] = type;
540 src_index--; dst_index--;
541 type = stack_type_map[src_index];
542 if (! TYPE_IS_WIDE (type))
543 abort ();
544 }
545 else if (TYPE_IS_WIDE (type))
546 abort ();
547
548 if (src_index != dst_index)
549 {
550 tree src_decl = find_stack_slot (src_index, type);
551 tree dst_decl = find_stack_slot (dst_index, type);
552 emit_move_insn (DECL_RTL (dst_decl), DECL_RTL (src_decl));
553 stack_type_map[dst_index] = type;
554 }
555 }
556 }
557
558 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
559 value stack. */
560
561 static void
562 build_java_athrow (tree node)
563 {
564 tree call;
565
566 call = build (CALL_EXPR,
567 void_type_node,
568 build_address_of (throw_node),
569 build_tree_list (NULL_TREE, node),
570 NULL_TREE);
571 TREE_SIDE_EFFECTS (call) = 1;
572 expand_expr_stmt (call);
573 java_stack_pop (stack_pointer);
574 }
575
576 /* Implementation for jsr/ret */
577
578 static void
579 build_java_jsr (int target_pc, int return_pc)
580 {
581 tree where = lookup_label (target_pc);
582 tree ret = lookup_label (return_pc);
583 tree ret_label = fold (build1 (ADDR_EXPR, return_address_type_node, ret));
584 push_value (ret_label);
585 flush_quick_stack ();
586 emit_jump (label_rtx (where));
587 expand_label (ret);
588 if (instruction_bits [return_pc] & BCODE_VERIFIED)
589 load_type_state (ret);
590 }
591
592 static void
593 build_java_ret (tree location)
594 {
595 expand_computed_goto (location);
596 }
597
598 /* Implementation of operations on array: new, load, store, length */
599
600 tree
601 decode_newarray_type (int atype)
602 {
603 switch (atype)
604 {
605 case 4: return boolean_type_node;
606 case 5: return char_type_node;
607 case 6: return float_type_node;
608 case 7: return double_type_node;
609 case 8: return byte_type_node;
610 case 9: return short_type_node;
611 case 10: return int_type_node;
612 case 11: return long_type_node;
613 default: return NULL_TREE;
614 }
615 }
616
617 /* Map primitive type to the code used by OPCODE_newarray. */
618
619 int
620 encode_newarray_type (tree type)
621 {
622 if (type == boolean_type_node)
623 return 4;
624 else if (type == char_type_node)
625 return 5;
626 else if (type == float_type_node)
627 return 6;
628 else if (type == double_type_node)
629 return 7;
630 else if (type == byte_type_node)
631 return 8;
632 else if (type == short_type_node)
633 return 9;
634 else if (type == int_type_node)
635 return 10;
636 else if (type == long_type_node)
637 return 11;
638 else
639 abort ();
640 }
641
642 /* Build a call to _Jv_ThrowBadArrayIndex(), the
643 ArrayIndexOfBoundsException exception handler. */
644
645 static tree
646 build_java_throw_out_of_bounds_exception (tree index)
647 {
648 tree node = build (CALL_EXPR, int_type_node,
649 build_address_of (soft_badarrayindex_node),
650 build_tree_list (NULL_TREE, index), NULL_TREE);
651 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
652 return (node);
653 }
654
655 /* Return the length of an array. Doesn't perform any checking on the nature
656 or value of the array NODE. May be used to implement some bytecodes. */
657
658 tree
659 build_java_array_length_access (tree node)
660 {
661 tree type = TREE_TYPE (node);
662 tree array_type = TREE_TYPE (type);
663 HOST_WIDE_INT length;
664
665 /* JVM spec: If the arrayref is null, the arraylength instruction
666 throws a NullPointerException. The only way we could get a node
667 of type ptr_type_node at this point is `aconst_null; arraylength'
668 or something equivalent. */
669 if (type == ptr_type_node)
670 return build (CALL_EXPR, int_type_node,
671 build_address_of (soft_nullpointer_node),
672 NULL_TREE, NULL_TREE);
673
674 if (!is_array_type_p (type))
675 abort ();
676
677 length = java_array_type_length (type);
678 if (length >= 0)
679 return build_int_2 (length, 0);
680
681 node = build (COMPONENT_REF, int_type_node,
682 build_java_indirect_ref (array_type, node,
683 flag_check_references),
684 lookup_field (&array_type, get_identifier ("length")));
685 IS_ARRAY_LENGTH_ACCESS (node) = 1;
686 return node;
687 }
688
689 /* Optionally checks a reference against the NULL pointer. ARG1: the
690 expr, ARG2: we should check the reference. Don't generate extra
691 checks if we're not generating code. */
692
693 tree
694 java_check_reference (tree expr, int check)
695 {
696 if (!flag_syntax_only && check)
697 {
698 expr = save_expr (expr);
699 expr = build (COND_EXPR, TREE_TYPE (expr),
700 build (EQ_EXPR, boolean_type_node, expr, null_pointer_node),
701 build (CALL_EXPR, void_type_node,
702 build_address_of (soft_nullpointer_node),
703 NULL_TREE, NULL_TREE),
704 expr);
705 }
706
707 return expr;
708 }
709
710 /* Reference an object: just like an INDIRECT_REF, but with checking. */
711
712 tree
713 build_java_indirect_ref (tree type, tree expr, int check)
714 {
715 return build1 (INDIRECT_REF, type, java_check_reference (expr, check));
716 }
717
718 /* Implement array indexing (either as l-value or r-value).
719 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
720 Optionally performs bounds checking and/or test to NULL.
721 At this point, ARRAY should have been verified as an array. */
722
723 tree
724 build_java_arrayaccess (tree array, tree type, tree index)
725 {
726 tree node, throw = NULL_TREE;
727 tree data_field;
728 tree ref;
729 tree array_type = TREE_TYPE (TREE_TYPE (array));
730
731 if (flag_bounds_check)
732 {
733 /* Generate:
734 * (unsigned jint) INDEX >= (unsigned jint) LEN
735 * && throw ArrayIndexOutOfBoundsException.
736 * Note this is equivalent to and more efficient than:
737 * INDEX < 0 || INDEX >= LEN && throw ... */
738 tree test;
739 tree len = build_java_array_length_access (array);
740 TREE_TYPE (len) = unsigned_int_type_node;
741 test = fold (build (GE_EXPR, boolean_type_node,
742 convert (unsigned_int_type_node, index),
743 len));
744 if (! integer_zerop (test))
745 {
746 throw = build (TRUTH_ANDIF_EXPR, int_type_node, test,
747 build_java_throw_out_of_bounds_exception (index));
748 /* allows expansion within COMPOUND */
749 TREE_SIDE_EFFECTS( throw ) = 1;
750 }
751 }
752
753 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
754 to have the bounds check evaluated first. */
755 if (throw != NULL_TREE)
756 index = build (COMPOUND_EXPR, int_type_node, throw, index);
757
758 data_field = lookup_field (&array_type, get_identifier ("data"));
759
760 ref = build (COMPONENT_REF, TREE_TYPE (data_field),
761 build_java_indirect_ref (array_type, array,
762 flag_check_references),
763 data_field);
764
765 node = build (ARRAY_REF, type, ref, index);
766 return node;
767 }
768
769 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
770 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
771 determine that no check is required. */
772
773 tree
774 build_java_arraystore_check (tree array, tree object)
775 {
776 tree check, element_type, source;
777 tree array_type_p = TREE_TYPE (array);
778 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
779
780 if (! is_array_type_p (array_type_p))
781 abort ();
782
783 /* Get the TYPE_DECL for ARRAY's element type. */
784 element_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
785
786 if (TREE_CODE (element_type) != TYPE_DECL
787 || TREE_CODE (object_type) != TYPE_DECL)
788 abort ();
789
790 if (!flag_store_check)
791 return build1 (NOP_EXPR, array_type_p, array);
792
793 /* No check is needed if the element type is final or is itself an array.
794 Also check that element_type matches object_type, since in the bytecode
795 compilation case element_type may be the actual element type of the array
796 rather than its declared type. */
797 if (element_type == object_type
798 && (TYPE_ARRAY_P (TREE_TYPE (element_type))
799 || CLASS_FINAL (element_type)))
800 return build1 (NOP_EXPR, array_type_p, array);
801
802 /* OBJECT might be wrapped by a SAVE_EXPR. */
803 if (TREE_CODE (object) == SAVE_EXPR)
804 source = TREE_OPERAND (object, 0);
805 else
806 source = object;
807
808 /* Avoid the check if OBJECT was just loaded from the same array. */
809 if (TREE_CODE (source) == ARRAY_REF)
810 {
811 tree target;
812 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
813 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
814 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
815 if (TREE_CODE (source) == SAVE_EXPR)
816 source = TREE_OPERAND (source, 0);
817
818 target = array;
819 if (TREE_CODE (target) == SAVE_EXPR)
820 target = TREE_OPERAND (target, 0);
821
822 if (source == target)
823 return build1 (NOP_EXPR, array_type_p, array);
824 }
825
826 /* Build an invocation of _Jv_CheckArrayStore */
827 check = build (CALL_EXPR, void_type_node,
828 build_address_of (soft_checkarraystore_node),
829 tree_cons (NULL_TREE, array,
830 build_tree_list (NULL_TREE, object)),
831 NULL_TREE);
832 TREE_SIDE_EFFECTS (check) = 1;
833
834 return check;
835 }
836
837 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
838 ARRAY_NODE. This function is used to retrieve something less vague than
839 a pointer type when indexing the first dimension of something like [[<t>.
840 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
841 return unchanged.
842 As a side effect, it also makes sure that ARRAY_NODE is an array. */
843
844 static tree
845 build_java_check_indexed_type (tree array_node, tree indexed_type)
846 {
847 tree elt_type;
848
849 if (!is_array_type_p (TREE_TYPE (array_node)))
850 abort ();
851
852 elt_type = (TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (array_node))));
853
854 if (indexed_type == ptr_type_node )
855 return promote_type (elt_type);
856
857 /* BYTE/BOOLEAN store and load are used for both type */
858 if (indexed_type == byte_type_node && elt_type == boolean_type_node )
859 return boolean_type_node;
860
861 if (indexed_type != elt_type )
862 abort ();
863 else
864 return indexed_type;
865 }
866
867 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
868 called with an integer code (the type of array to create), and the length
869 of the array to create. */
870
871 tree
872 build_newarray (int atype_value, tree length)
873 {
874 tree type_arg;
875
876 tree prim_type = decode_newarray_type (atype_value);
877 tree type
878 = build_java_array_type (prim_type,
879 host_integerp (length, 0) == INTEGER_CST
880 ? tree_low_cst (length, 0) : -1);
881
882 /* If compiling to native, pass a reference to the primitive type class
883 and save the runtime some work. However, the bytecode generator
884 expects to find the type_code int here. */
885 if (flag_emit_class_files)
886 type_arg = build_int_2 (atype_value, 0);
887 else
888 type_arg = build_class_ref (prim_type);
889
890 return build (CALL_EXPR, promote_type (type),
891 build_address_of (soft_newarray_node),
892 tree_cons (NULL_TREE,
893 type_arg,
894 build_tree_list (NULL_TREE, length)),
895 NULL_TREE);
896 }
897
898 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
899 of the dimension. */
900
901 tree
902 build_anewarray (tree class_type, tree length)
903 {
904 tree type
905 = build_java_array_type (class_type,
906 host_integerp (length, 0)
907 ? tree_low_cst (length, 0) : -1);
908
909 return build (CALL_EXPR, promote_type (type),
910 build_address_of (soft_anewarray_node),
911 tree_cons (NULL_TREE, length,
912 tree_cons (NULL_TREE, build_class_ref (class_type),
913 build_tree_list (NULL_TREE,
914 null_pointer_node))),
915 NULL_TREE);
916 }
917
918 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
919
920 tree
921 build_new_array (tree type, tree length)
922 {
923 if (JPRIMITIVE_TYPE_P (type))
924 return build_newarray (encode_newarray_type (type), length);
925 else
926 return build_anewarray (TREE_TYPE (type), length);
927 }
928
929 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
930 class pointer, a number of dimensions and the matching number of
931 dimensions. The argument list is NULL terminated. */
932
933 static void
934 expand_java_multianewarray (tree class_type, int ndim)
935 {
936 int i;
937 tree args = build_tree_list( NULL_TREE, null_pointer_node );
938
939 for( i = 0; i < ndim; i++ )
940 args = tree_cons (NULL_TREE, pop_value (int_type_node), args);
941
942 push_value (build (CALL_EXPR,
943 promote_type (class_type),
944 build_address_of (soft_multianewarray_node),
945 tree_cons (NULL_TREE, build_class_ref (class_type),
946 tree_cons (NULL_TREE,
947 build_int_2 (ndim, 0), args )),
948 NULL_TREE));
949 }
950
951 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
952 ARRAY is an array type. May expand some bound checking and NULL
953 pointer checking. RHS_TYPE_NODE we are going to store. In the case
954 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
955 INT. In those cases, we make the conversion.
956
957 if ARRAy is a reference type, the assignment is checked at run-time
958 to make sure that the RHS can be assigned to the array element
959 type. It is not necessary to generate this code if ARRAY is final. */
960
961 static void
962 expand_java_arraystore (tree rhs_type_node)
963 {
964 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
965 && TYPE_PRECISION (rhs_type_node) <= 32) ?
966 int_type_node : rhs_type_node);
967 tree index = pop_value (int_type_node);
968 tree array = pop_value (ptr_type_node);
969
970 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
971
972 flush_quick_stack ();
973
974 index = save_expr (index);
975 array = save_expr (array);
976
977 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
978 {
979 tree check = build_java_arraystore_check (array, rhs_node);
980 expand_expr_stmt (check);
981 }
982
983 expand_assignment (build_java_arrayaccess (array,
984 rhs_type_node,
985 index),
986 rhs_node, 0);
987 }
988
989 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
990 sure that LHS is an array type. May expand some bound checking and NULL
991 pointer checking.
992 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
993 BOOLEAN/SHORT, we push a promoted type back to the stack.
994 */
995
996 static void
997 expand_java_arrayload (tree lhs_type_node )
998 {
999 tree load_node;
1000 tree index_node = pop_value (int_type_node);
1001 tree array_node = pop_value (ptr_type_node);
1002
1003 index_node = save_expr (index_node);
1004 array_node = save_expr (array_node);
1005
1006 if (TREE_TYPE (array_node) == ptr_type_node)
1007 /* The only way we could get a node of type ptr_type_node at this
1008 point is `aconst_null; arraylength' or something equivalent, so
1009 unconditionally throw NullPointerException. */
1010 load_node = build (CALL_EXPR, lhs_type_node,
1011 build_address_of (soft_nullpointer_node),
1012 NULL_TREE, NULL_TREE);
1013 else
1014 {
1015 lhs_type_node = build_java_check_indexed_type (array_node, lhs_type_node);
1016 load_node = build_java_arrayaccess (array_node,
1017 lhs_type_node,
1018 index_node);
1019 }
1020 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1021 load_node = fold (build1 (NOP_EXPR, int_type_node, load_node));
1022 push_value (load_node);
1023 }
1024
1025 /* Expands .length. Makes sure that we deal with and array and may expand
1026 a NULL check on the array object. */
1027
1028 static void
1029 expand_java_array_length (void)
1030 {
1031 tree array = pop_value (ptr_type_node);
1032 tree length = build_java_array_length_access (array);
1033
1034 push_value (length);
1035 }
1036
1037 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1038 either soft_monitorenter_node or soft_monitorexit_node. */
1039
1040 static tree
1041 build_java_monitor (tree call, tree object)
1042 {
1043 return (build (CALL_EXPR,
1044 void_type_node,
1045 build_address_of (call),
1046 build_tree_list (NULL_TREE, object),
1047 NULL_TREE));
1048 }
1049
1050 /* Emit code for one of the PUSHC instructions. */
1051
1052 static void
1053 expand_java_pushc (int ival, tree type)
1054 {
1055 tree value;
1056 if (type == ptr_type_node && ival == 0)
1057 value = null_pointer_node;
1058 else if (type == int_type_node || type == long_type_node)
1059 {
1060 value = build_int_2 (ival, ival < 0 ? -1 : 0);
1061 TREE_TYPE (value) = type;
1062 }
1063 else if (type == float_type_node || type == double_type_node)
1064 {
1065 REAL_VALUE_TYPE x;
1066 REAL_VALUE_FROM_INT (x, ival, 0, TYPE_MODE (type));
1067 value = build_real (type, x);
1068 }
1069 else
1070 abort ();
1071
1072 push_value (value);
1073 }
1074
1075 static void
1076 expand_java_return (tree type)
1077 {
1078 if (type == void_type_node)
1079 expand_null_return ();
1080 else
1081 {
1082 tree retval = pop_value (type);
1083 tree res = DECL_RESULT (current_function_decl);
1084 retval = build (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1085
1086 /* Handle the situation where the native integer type is smaller
1087 than the JVM integer. It can happen for many cross compilers.
1088 The whole if expression just goes away if INT_TYPE_SIZE < 32
1089 is false. */
1090 if (INT_TYPE_SIZE < 32
1091 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1092 < GET_MODE_SIZE (TYPE_MODE (type))))
1093 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1094
1095 TREE_SIDE_EFFECTS (retval) = 1;
1096 expand_return (retval);
1097 }
1098 }
1099
1100 static void
1101 expand_load_internal (int index, tree type, int pc)
1102 {
1103 tree copy;
1104 tree var = find_local_variable (index, type, pc);
1105
1106 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1107 on the stack. If there is an assignment to this VAR_DECL between
1108 the stack push and the use, then the wrong code could be
1109 generated. To avoid this we create a new local and copy our
1110 value into it. Then we push this new local on the stack.
1111 Hopefully this all gets optimized out. */
1112 copy = build_decl (VAR_DECL, NULL_TREE, type);
1113 DECL_CONTEXT (copy) = current_function_decl;
1114 layout_decl (copy, 0);
1115 DECL_REGISTER (copy) = 1;
1116 expand_decl (copy);
1117 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (copy);
1118 DECL_INITIAL (copy) = var;
1119 expand_decl_init (copy);
1120 push_value (copy);
1121 }
1122
1123 tree
1124 build_address_of (tree value)
1125 {
1126 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1127 }
1128
1129 bool class_has_finalize_method (tree type)
1130 {
1131 tree super = CLASSTYPE_SUPER (type);
1132
1133 if (super == NULL_TREE)
1134 return false; /* Every class with a real finalizer inherits */
1135 /* from java.lang.Object. */
1136 else
1137 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1138 }
1139
1140 static void
1141 expand_java_NEW (tree type)
1142 {
1143 tree alloc_node;
1144
1145 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1146 : alloc_no_finalizer_node);
1147 if (! CLASS_LOADED_P (type))
1148 load_class (type, 1);
1149 safe_layout_class (type);
1150 push_value (build (CALL_EXPR, promote_type (type),
1151 build_address_of (alloc_node),
1152 tree_cons (NULL_TREE, build_class_ref (type),
1153 build_tree_list (NULL_TREE,
1154 size_in_bytes (type))),
1155 NULL_TREE));
1156 }
1157
1158 /* This returns an expression which will extract the class of an
1159 object. */
1160
1161 tree
1162 build_get_class (tree value)
1163 {
1164 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1165 tree vtable_field = lookup_field (&object_type_node,
1166 get_identifier ("vtable"));
1167 return build (COMPONENT_REF, class_ptr_type,
1168 build1 (INDIRECT_REF, dtable_type,
1169 build (COMPONENT_REF, dtable_ptr_type,
1170 build_java_indirect_ref (object_type_node, value,
1171 flag_check_references),
1172 vtable_field)),
1173 class_field);
1174 }
1175
1176 /* This builds the tree representation of the `instanceof' operator.
1177 It tries various tricks to optimize this in cases where types are
1178 known. */
1179
1180 tree
1181 build_instanceof (tree value, tree type)
1182 {
1183 tree expr;
1184 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1185 tree valtype = TREE_TYPE (TREE_TYPE (value));
1186 tree valclass = TYPE_NAME (valtype);
1187 tree klass;
1188
1189 /* When compiling from bytecode, we need to ensure that TYPE has
1190 been loaded. */
1191 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1192 {
1193 load_class (type, 1);
1194 safe_layout_class (type);
1195 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1196 return error_mark_node;
1197 }
1198 klass = TYPE_NAME (type);
1199
1200 if (type == object_type_node || inherits_from_p (valtype, type))
1201 {
1202 /* Anything except `null' is an instance of Object. Likewise,
1203 if the object is known to be an instance of the class, then
1204 we only need to check for `null'. */
1205 expr = build (NE_EXPR, itype, value, null_pointer_node);
1206 }
1207 else if (! TYPE_ARRAY_P (type)
1208 && ! TYPE_ARRAY_P (valtype)
1209 && DECL_P (klass) && DECL_P (valclass)
1210 && ! CLASS_INTERFACE (valclass)
1211 && ! CLASS_INTERFACE (klass)
1212 && ! inherits_from_p (type, valtype)
1213 && (CLASS_FINAL (klass)
1214 || ! inherits_from_p (valtype, type)))
1215 {
1216 /* The classes are from different branches of the derivation
1217 tree, so we immediately know the answer. */
1218 expr = boolean_false_node;
1219 }
1220 else if (DECL_P (klass) && CLASS_FINAL (klass))
1221 {
1222 tree save = save_expr (value);
1223 expr = build (COND_EXPR, itype,
1224 save,
1225 build (EQ_EXPR, itype,
1226 build_get_class (save),
1227 build_class_ref (type)),
1228 boolean_false_node);
1229 }
1230 else
1231 {
1232 expr = build (CALL_EXPR, itype,
1233 build_address_of (soft_instanceof_node),
1234 tree_cons (NULL_TREE, value,
1235 build_tree_list (NULL_TREE,
1236 build_class_ref (type))),
1237 NULL_TREE);
1238 }
1239 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1240 return expr;
1241 }
1242
1243 static void
1244 expand_java_INSTANCEOF (tree type)
1245 {
1246 tree value = pop_value (object_ptr_type_node);
1247 value = build_instanceof (value, type);
1248 push_value (value);
1249 }
1250
1251 static void
1252 expand_java_CHECKCAST (tree type)
1253 {
1254 tree value = pop_value (ptr_type_node);
1255 value = build (CALL_EXPR, promote_type (type),
1256 build_address_of (soft_checkcast_node),
1257 tree_cons (NULL_TREE, build_class_ref (type),
1258 build_tree_list (NULL_TREE, value)),
1259 NULL_TREE);
1260 push_value (value);
1261 }
1262
1263 static void
1264 expand_iinc (unsigned int local_var_index, int ival, int pc)
1265 {
1266 tree local_var, res;
1267 tree constant_value;
1268
1269 flush_quick_stack ();
1270 local_var = find_local_variable (local_var_index, int_type_node, pc);
1271 constant_value = build_int_2 (ival, ival < 0 ? -1 : 0);
1272 res = fold (build (PLUS_EXPR, int_type_node, local_var, constant_value));
1273 expand_assignment (local_var, res, 0);
1274 }
1275
1276
1277 tree
1278 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1279 {
1280 tree call = NULL;
1281 tree arg1 = convert (type, op1);
1282 tree arg2 = convert (type, op2);
1283
1284 if (type == int_type_node)
1285 {
1286 switch (op)
1287 {
1288 case TRUNC_DIV_EXPR:
1289 call = soft_idiv_node;
1290 break;
1291 case TRUNC_MOD_EXPR:
1292 call = soft_irem_node;
1293 break;
1294 default:
1295 break;
1296 }
1297 }
1298 else if (type == long_type_node)
1299 {
1300 switch (op)
1301 {
1302 case TRUNC_DIV_EXPR:
1303 call = soft_ldiv_node;
1304 break;
1305 case TRUNC_MOD_EXPR:
1306 call = soft_lrem_node;
1307 break;
1308 default:
1309 break;
1310 }
1311 }
1312
1313 if (! call)
1314 abort ();
1315
1316 call = build (CALL_EXPR, type,
1317 build_address_of (call),
1318 tree_cons (NULL_TREE, arg1,
1319 build_tree_list (NULL_TREE, arg2)),
1320 NULL_TREE);
1321
1322 return call;
1323 }
1324
1325 tree
1326 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1327 {
1328 tree mask;
1329 switch (op)
1330 {
1331 case URSHIFT_EXPR:
1332 {
1333 tree u_type = java_unsigned_type (type);
1334 arg1 = convert (u_type, arg1);
1335 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1336 return convert (type, arg1);
1337 }
1338 case LSHIFT_EXPR:
1339 case RSHIFT_EXPR:
1340 mask = build_int_2 (TYPE_PRECISION (TREE_TYPE (arg1)) - 1, 0);
1341 arg2 = fold (build (BIT_AND_EXPR, int_type_node, arg2, mask));
1342 break;
1343
1344 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1345 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1346 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1347 {
1348 tree ifexp1 = fold ( build (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1349 boolean_type_node, arg1, arg2));
1350 tree ifexp2 = fold ( build (EQ_EXPR, boolean_type_node, arg1, arg2));
1351 tree second_compare = fold (build (COND_EXPR, int_type_node,
1352 ifexp2, integer_zero_node,
1353 op == COMPARE_L_EXPR
1354 ? integer_minus_one_node
1355 : integer_one_node));
1356 return fold (build (COND_EXPR, int_type_node, ifexp1,
1357 op == COMPARE_L_EXPR ? integer_one_node
1358 : integer_minus_one_node,
1359 second_compare));
1360 }
1361 case COMPARE_EXPR:
1362 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1363 {
1364 tree ifexp1 = fold ( build (LT_EXPR, boolean_type_node, arg1, arg2));
1365 tree ifexp2 = fold ( build (GT_EXPR, boolean_type_node, arg1, arg2));
1366 tree second_compare = fold ( build (COND_EXPR, int_type_node,
1367 ifexp2, integer_one_node,
1368 integer_zero_node));
1369 return fold (build (COND_EXPR, int_type_node,
1370 ifexp1, integer_minus_one_node, second_compare));
1371 }
1372 case TRUNC_DIV_EXPR:
1373 case TRUNC_MOD_EXPR:
1374 if (TREE_CODE (type) == REAL_TYPE
1375 && op == TRUNC_MOD_EXPR)
1376 {
1377 tree call;
1378 if (type != double_type_node)
1379 {
1380 arg1 = convert (double_type_node, arg1);
1381 arg2 = convert (double_type_node, arg2);
1382 }
1383 call = build (CALL_EXPR, double_type_node,
1384 build_address_of (soft_fmod_node),
1385 tree_cons (NULL_TREE, arg1,
1386 build_tree_list (NULL_TREE, arg2)),
1387 NULL_TREE);
1388 if (type != double_type_node)
1389 call = convert (type, call);
1390 return call;
1391 }
1392
1393 if (TREE_CODE (type) == INTEGER_TYPE
1394 && flag_use_divide_subroutine
1395 && ! flag_syntax_only)
1396 return build_java_soft_divmod (op, type, arg1, arg2);
1397
1398 break;
1399 default: ;
1400 }
1401 return fold (build (op, type, arg1, arg2));
1402 }
1403
1404 static void
1405 expand_java_binop (tree type, enum tree_code op)
1406 {
1407 tree larg, rarg;
1408 tree ltype = type;
1409 tree rtype = type;
1410 switch (op)
1411 {
1412 case LSHIFT_EXPR:
1413 case RSHIFT_EXPR:
1414 case URSHIFT_EXPR:
1415 rtype = int_type_node;
1416 rarg = pop_value (rtype);
1417 break;
1418 default:
1419 rarg = pop_value (rtype);
1420 }
1421 larg = pop_value (ltype);
1422 push_value (build_java_binop (op, type, larg, rarg));
1423 }
1424
1425 /* Lookup the field named NAME in *TYPEP or its super classes.
1426 If not found, return NULL_TREE.
1427 (If the *TYPEP is not found, or if the field reference is
1428 ambiguous, return error_mark_node.)
1429 If found, return the FIELD_DECL, and set *TYPEP to the
1430 class containing the field. */
1431
1432 tree
1433 lookup_field (tree *typep, tree name)
1434 {
1435 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1436 {
1437 load_class (*typep, 1);
1438 safe_layout_class (*typep);
1439 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1440 return error_mark_node;
1441 }
1442 do
1443 {
1444 tree field, basetype_vec;
1445 tree save_field;
1446 int n, i;
1447
1448 for (field = TYPE_FIELDS (*typep); field; field = TREE_CHAIN (field))
1449 if (DECL_NAME (field) == name)
1450 return field;
1451
1452 /* Process implemented interfaces. */
1453 basetype_vec = TYPE_BINFO_BASETYPES (*typep);
1454 n = TREE_VEC_LENGTH (basetype_vec);
1455 save_field = NULL_TREE;
1456 for (i = 0; i < n; i++)
1457 {
1458 tree t = BINFO_TYPE (TREE_VEC_ELT (basetype_vec, i));
1459 if ((field = lookup_field (&t, name)))
1460 {
1461 if (save_field == field)
1462 continue;
1463 if (save_field == NULL_TREE)
1464 save_field = field;
1465 else
1466 {
1467 tree i1 = DECL_CONTEXT (save_field);
1468 tree i2 = DECL_CONTEXT (field);
1469 error ("reference `%s' is ambiguous: appears in interface `%s' and interface `%s'",
1470 IDENTIFIER_POINTER (name),
1471 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1472 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1473 return error_mark_node;
1474 }
1475 }
1476 }
1477
1478 if (save_field != NULL_TREE)
1479 return save_field;
1480
1481 *typep = CLASSTYPE_SUPER (*typep);
1482 } while (*typep);
1483 return NULL_TREE;
1484 }
1485
1486 /* Look up the field named NAME in object SELF_VALUE,
1487 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1488 SELF_VALUE is NULL_TREE if looking for a static field. */
1489
1490 tree
1491 build_field_ref (tree self_value, tree self_class, tree name)
1492 {
1493 tree base_class = self_class;
1494 tree field_decl = lookup_field (&base_class, name);
1495 if (field_decl == NULL_TREE)
1496 {
1497 error ("field `%s' not found", IDENTIFIER_POINTER (name));
1498 return error_mark_node;
1499 }
1500 if (self_value == NULL_TREE)
1501 {
1502 return build_static_field_ref (field_decl);
1503 }
1504 else
1505 {
1506 int check = (flag_check_references
1507 && ! (DECL_P (self_value)
1508 && DECL_NAME (self_value) == this_identifier_node));
1509
1510 tree base_type = promote_type (base_class);
1511 if (base_type != TREE_TYPE (self_value))
1512 self_value = fold (build1 (NOP_EXPR, base_type, self_value));
1513 if (flag_indirect_dispatch
1514 && output_class != self_class)
1515 /* FIXME: output_class != self_class is not exactly the right
1516 test. What we really want to know is whether self_class is
1517 in the same translation unit as output_class. If it is,
1518 we can make a direct reference. */
1519 {
1520 tree otable_index =
1521 build_int_2 (get_symbol_table_index
1522 (field_decl, &TYPE_OTABLE_METHODS (output_class)), 0);
1523 tree field_offset =
1524 build (ARRAY_REF, integer_type_node, TYPE_OTABLE_DECL (output_class),
1525 otable_index);
1526 tree address
1527 = fold (build (PLUS_EXPR,
1528 build_pointer_type (TREE_TYPE (field_decl)),
1529 self_value, field_offset));
1530 return fold (build1 (INDIRECT_REF, TREE_TYPE (field_decl), address));
1531 }
1532
1533 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1534 self_value, check);
1535 return fold (build (COMPONENT_REF, TREE_TYPE (field_decl),
1536 self_value, field_decl));
1537 }
1538 }
1539
1540 tree
1541 lookup_label (int pc)
1542 {
1543 tree name;
1544 char buf[32];
1545 ASM_GENERATE_INTERNAL_LABEL(buf, "LJpc=", pc);
1546 name = get_identifier (buf);
1547 if (IDENTIFIER_LOCAL_VALUE (name))
1548 return IDENTIFIER_LOCAL_VALUE (name);
1549 else
1550 {
1551 /* The type of the address of a label is return_address_type_node. */
1552 tree decl = create_label_decl (name);
1553 LABEL_PC (decl) = pc;
1554 label_rtx (decl);
1555 return pushdecl (decl);
1556 }
1557 }
1558
1559 /* Generate a unique name for the purpose of loops and switches
1560 labels, and try-catch-finally blocks label or temporary variables. */
1561
1562 tree
1563 generate_name (void)
1564 {
1565 static int l_number = 0;
1566 char buff [32];
1567 ASM_GENERATE_INTERNAL_LABEL(buff, "LJv", l_number);
1568 l_number++;
1569 return get_identifier (buff);
1570 }
1571
1572 tree
1573 create_label_decl (tree name)
1574 {
1575 tree decl;
1576 decl = build_decl (LABEL_DECL, name,
1577 TREE_TYPE (return_address_type_node));
1578 DECL_CONTEXT (decl) = current_function_decl;
1579 DECL_IGNORED_P (decl) = 1;
1580 return decl;
1581 }
1582
1583 /* This maps a bytecode offset (PC) to various flags. */
1584 char *instruction_bits;
1585
1586 static void
1587 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1588 {
1589 lookup_label (target_pc);
1590 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1591 }
1592
1593 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1594 where CONDITION is one of one the compare operators. */
1595
1596 static void
1597 expand_compare (enum tree_code condition, tree value1, tree value2,
1598 int target_pc)
1599 {
1600 tree target = lookup_label (target_pc);
1601 tree cond = fold (build (condition, boolean_type_node, value1, value2));
1602 expand_start_cond (java_truthvalue_conversion (cond), 0);
1603 expand_goto (target);
1604 expand_end_cond ();
1605 }
1606
1607 /* Emit code for a TEST-type opcode. */
1608
1609 static void
1610 expand_test (enum tree_code condition, tree type, int target_pc)
1611 {
1612 tree value1, value2;
1613 flush_quick_stack ();
1614 value1 = pop_value (type);
1615 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1616 expand_compare (condition, value1, value2, target_pc);
1617 }
1618
1619 /* Emit code for a COND-type opcode. */
1620
1621 static void
1622 expand_cond (enum tree_code condition, tree type, int target_pc)
1623 {
1624 tree value1, value2;
1625 flush_quick_stack ();
1626 /* note: pop values in opposite order */
1627 value2 = pop_value (type);
1628 value1 = pop_value (type);
1629 /* Maybe should check value1 and value2 for type compatibility ??? */
1630 expand_compare (condition, value1, value2, target_pc);
1631 }
1632
1633 static void
1634 expand_java_goto (int target_pc)
1635 {
1636 tree target_label = lookup_label (target_pc);
1637 flush_quick_stack ();
1638 expand_goto (target_label);
1639 }
1640
1641 #if 0
1642 static void
1643 expand_java_call (int target_pc, int return_address)
1644 int target_pc, return_address;
1645 {
1646 tree target_label = lookup_label (target_pc);
1647 tree value = build_int_2 (return_address, return_address < 0 ? -1 : 0);
1648 push_value (value);
1649 flush_quick_stack ();
1650 expand_goto (target_label);
1651 }
1652
1653 static void
1654 expand_java_ret (tree return_address ATTRIBUTE_UNUSED)
1655 {
1656 warning ("ret instruction not implemented");
1657 #if 0
1658 tree target_label = lookup_label (target_pc);
1659 flush_quick_stack ();
1660 expand_goto (target_label);
1661 #endif
1662 }
1663 #endif
1664
1665 static tree
1666 pop_arguments (tree arg_types)
1667 {
1668 if (arg_types == end_params_node)
1669 return NULL_TREE;
1670 if (TREE_CODE (arg_types) == TREE_LIST)
1671 {
1672 tree tail = pop_arguments (TREE_CHAIN (arg_types));
1673 tree type = TREE_VALUE (arg_types);
1674 tree arg = pop_value (type);
1675 if (PROMOTE_PROTOTYPES
1676 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1677 && INTEGRAL_TYPE_P (type))
1678 arg = convert (integer_type_node, arg);
1679 return tree_cons (NULL_TREE, arg, tail);
1680 }
1681 abort ();
1682 }
1683
1684 /* Build an expression to initialize the class CLAS.
1685 if EXPR is non-NULL, returns an expression to first call the initializer
1686 (if it is needed) and then calls EXPR. */
1687
1688 tree
1689 build_class_init (tree clas, tree expr)
1690 {
1691 tree init;
1692
1693 /* An optimization: if CLAS is a superclass of the class we're
1694 compiling, we don't need to initialize it. However, if CLAS is
1695 an interface, it won't necessarily be initialized, even if we
1696 implement it. */
1697 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1698 && inherits_from_p (current_class, clas))
1699 || current_class == clas)
1700 return expr;
1701
1702 if (always_initialize_class_p)
1703 {
1704 init = build (CALL_EXPR, void_type_node,
1705 build_address_of (soft_initclass_node),
1706 build_tree_list (NULL_TREE, build_class_ref (clas)),
1707 NULL_TREE);
1708 TREE_SIDE_EFFECTS (init) = 1;
1709 }
1710 else
1711 {
1712 tree *init_test_decl;
1713 init_test_decl = java_treetreehash_new
1714 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
1715
1716 if (*init_test_decl == NULL)
1717 {
1718 /* Build a declaration and mark it as a flag used to track
1719 static class initializations. */
1720 *init_test_decl = build_decl (VAR_DECL, NULL_TREE,
1721 boolean_type_node);
1722 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (*init_test_decl);
1723 LOCAL_CLASS_INITIALIZATION_FLAG (*init_test_decl) = 1;
1724 DECL_CONTEXT (*init_test_decl) = current_function_decl;
1725 DECL_FUNCTION_INIT_TEST_CLASS (*init_test_decl) = clas;
1726 /* Tell the check-init code to ignore this decl when not
1727 optimizing class initialization. */
1728 if (!STATIC_CLASS_INIT_OPT_P ())
1729 DECL_BIT_INDEX(*init_test_decl) = -1;
1730 /* Don't emit any symbolic debugging info for this decl. */
1731 DECL_IGNORED_P (*init_test_decl) = 1;
1732 }
1733
1734 init = build (CALL_EXPR, void_type_node,
1735 build_address_of (soft_initclass_node),
1736 build_tree_list (NULL_TREE, build_class_ref (clas)),
1737 NULL_TREE);
1738 TREE_SIDE_EFFECTS (init) = 1;
1739 init = build (COND_EXPR, void_type_node,
1740 build (EQ_EXPR, boolean_type_node,
1741 *init_test_decl, boolean_false_node),
1742 init, integer_zero_node);
1743 TREE_SIDE_EFFECTS (init) = 1;
1744 init = build (COMPOUND_EXPR, TREE_TYPE (expr), init,
1745 build (MODIFY_EXPR, boolean_type_node,
1746 *init_test_decl, boolean_true_node));
1747 TREE_SIDE_EFFECTS (init) = 1;
1748 }
1749
1750 if (expr != NULL_TREE)
1751 {
1752 expr = build (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
1753 TREE_SIDE_EFFECTS (expr) = 1;
1754 return expr;
1755 }
1756 return init;
1757 }
1758
1759 tree
1760 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
1761 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
1762 tree arg_list ATTRIBUTE_UNUSED)
1763 {
1764 tree func;
1765 if (is_compiled_class (self_type))
1766 {
1767 if (!flag_indirect_dispatch
1768 || (!TREE_PUBLIC (method) && DECL_CONTEXT (method)))
1769 {
1770 make_decl_rtl (method, NULL);
1771 func = build1 (ADDR_EXPR, method_ptr_type_node, method);
1772 }
1773 else
1774 {
1775 tree table_index =
1776 build_int_2 (get_symbol_table_index
1777 (method, &TYPE_ATABLE_METHODS (output_class)), 0);
1778 func =
1779 build (ARRAY_REF, method_ptr_type_node,
1780 TYPE_ATABLE_DECL (output_class), table_index);
1781 }
1782 }
1783 else
1784 {
1785 /* We don't know whether the method has been (statically) compiled.
1786 Compile this code to get a reference to the method's code:
1787
1788 SELF_TYPE->methods[METHOD_INDEX].ncode
1789
1790 */
1791
1792 int method_index = 0;
1793 tree meth, ref;
1794
1795 /* The method might actually be declared in some superclass, so
1796 we have to use its class context, not the caller's notion of
1797 where the method is. */
1798 self_type = DECL_CONTEXT (method);
1799 ref = build_class_ref (self_type);
1800 ref = build1 (INDIRECT_REF, class_type_node, ref);
1801 if (ncode_ident == NULL_TREE)
1802 ncode_ident = get_identifier ("ncode");
1803 if (methods_ident == NULL_TREE)
1804 methods_ident = get_identifier ("methods");
1805 ref = build (COMPONENT_REF, method_ptr_type_node, ref,
1806 lookup_field (&class_type_node, methods_ident));
1807 for (meth = TYPE_METHODS (self_type);
1808 ; meth = TREE_CHAIN (meth))
1809 {
1810 if (method == meth)
1811 break;
1812 if (meth == NULL_TREE)
1813 fatal_error ("method '%s' not found in class",
1814 IDENTIFIER_POINTER (DECL_NAME (method)));
1815 method_index++;
1816 }
1817 method_index *= int_size_in_bytes (method_type_node);
1818 ref = fold (build (PLUS_EXPR, method_ptr_type_node,
1819 ref, build_int_2 (method_index, 0)));
1820 ref = build1 (INDIRECT_REF, method_type_node, ref);
1821 func = build (COMPONENT_REF, nativecode_ptr_type_node,
1822 ref,
1823 lookup_field (&method_type_node, ncode_ident));
1824 }
1825 return func;
1826 }
1827
1828 tree
1829 invoke_build_dtable (int is_invoke_interface, tree arg_list)
1830 {
1831 tree dtable, objectref;
1832
1833 TREE_VALUE (arg_list) = save_expr (TREE_VALUE (arg_list));
1834
1835 /* If we're dealing with interfaces and if the objectref
1836 argument is an array then get the dispatch table of the class
1837 Object rather than the one from the objectref. */
1838 objectref = (is_invoke_interface
1839 && is_array_type_p (TREE_TYPE (TREE_VALUE (arg_list))) ?
1840 object_type_node : TREE_VALUE (arg_list));
1841
1842 if (dtable_ident == NULL_TREE)
1843 dtable_ident = get_identifier ("vtable");
1844 dtable = build_java_indirect_ref (object_type_node, objectref,
1845 flag_check_references);
1846 dtable = build (COMPONENT_REF, dtable_ptr_type, dtable,
1847 lookup_field (&object_type_node, dtable_ident));
1848
1849 return dtable;
1850 }
1851
1852 /* Determine the index in SYMBOL_TABLE for a reference to the decl
1853 T. If this decl has not been seen before, it will be added to the
1854 otable_methods. If it has, the existing table slot will be
1855 reused. */
1856
1857 int
1858 get_symbol_table_index (tree t, tree *symbol_table)
1859 {
1860 int i = 1;
1861 tree method_list;
1862
1863 if (*symbol_table == NULL_TREE)
1864 {
1865 *symbol_table = build_tree_list (t, t);
1866 return 1;
1867 }
1868
1869 method_list = *symbol_table;
1870
1871 while (1)
1872 {
1873 tree value = TREE_VALUE (method_list);
1874 if (value == t)
1875 return i;
1876 i++;
1877 if (TREE_CHAIN (method_list) == NULL_TREE)
1878 break;
1879 else
1880 method_list = TREE_CHAIN (method_list);
1881 }
1882
1883 TREE_CHAIN (method_list) = build_tree_list (t, t);
1884 return i;
1885 }
1886
1887 tree
1888 build_invokevirtual (tree dtable, tree method)
1889 {
1890 tree func;
1891 tree nativecode_ptr_ptr_type_node
1892 = build_pointer_type (nativecode_ptr_type_node);
1893 tree method_index;
1894 tree otable_index;
1895
1896 if (flag_indirect_dispatch)
1897 {
1898 otable_index
1899 = build_int_2 (get_symbol_table_index
1900 (method, &TYPE_OTABLE_METHODS (output_class)), 0);
1901 method_index = build (ARRAY_REF, integer_type_node,
1902 TYPE_OTABLE_DECL (output_class),
1903 otable_index);
1904 }
1905 else
1906 {
1907 method_index = convert (sizetype, DECL_VINDEX (method));
1908
1909 if (TARGET_VTABLE_USES_DESCRIPTORS)
1910 /* Add one to skip bogus descriptor for class and GC descriptor. */
1911 method_index = size_binop (PLUS_EXPR, method_index, size_int (1));
1912 else
1913 /* Add 1 to skip "class" field of dtable, and 1 to skip GC descriptor. */
1914 method_index = size_binop (PLUS_EXPR, method_index, size_int (2));
1915
1916 method_index = size_binop (MULT_EXPR, method_index,
1917 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
1918
1919 if (TARGET_VTABLE_USES_DESCRIPTORS)
1920 method_index = size_binop (MULT_EXPR, method_index,
1921 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
1922 }
1923
1924 func = fold (build (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable,
1925 convert (nativecode_ptr_ptr_type_node, method_index)));
1926
1927 if (TARGET_VTABLE_USES_DESCRIPTORS)
1928 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
1929 else
1930 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
1931
1932 return func;
1933 }
1934
1935 static GTY(()) tree class_ident;
1936 tree
1937 build_invokeinterface (tree dtable, tree method)
1938 {
1939 tree lookup_arg;
1940 tree interface;
1941 tree idx;
1942 tree meth;
1943 tree otable_index;
1944 int i;
1945
1946 /* We expand invokeinterface here. _Jv_LookupInterfaceMethod() will
1947 ensure that the selected method exists, is public and not
1948 abstract nor static. */
1949
1950 if (class_ident == NULL_TREE)
1951 class_ident = get_identifier ("class");
1952
1953 dtable = build_java_indirect_ref (dtable_type, dtable,
1954 flag_check_references);
1955 dtable = build (COMPONENT_REF, class_ptr_type, dtable,
1956 lookup_field (&dtable_type, class_ident));
1957
1958 interface = DECL_CONTEXT (method);
1959 if (! CLASS_INTERFACE (TYPE_NAME (interface)))
1960 abort ();
1961 layout_class_methods (interface);
1962
1963 if (flag_indirect_dispatch)
1964 {
1965 otable_index =
1966 build_int_2 (get_symbol_table_index
1967 (method, &TYPE_OTABLE_METHODS (output_class)), 0);
1968 idx =
1969 build (ARRAY_REF, integer_type_node, TYPE_OTABLE_DECL (output_class),
1970 otable_index);
1971 }
1972 else
1973 {
1974 i = 1;
1975 for (meth = TYPE_METHODS (interface); ; meth = TREE_CHAIN (meth), i++)
1976 {
1977 if (meth == method)
1978 {
1979 idx = build_int_2 (i, 0);
1980 break;
1981 }
1982 if (meth == NULL_TREE)
1983 abort ();
1984 }
1985 }
1986
1987 lookup_arg = tree_cons (NULL_TREE, dtable,
1988 tree_cons (NULL_TREE, build_class_ref (interface),
1989 build_tree_list (NULL_TREE, idx)));
1990
1991 return build (CALL_EXPR, ptr_type_node,
1992 build_address_of (soft_lookupinterfacemethod_node),
1993 lookup_arg, NULL_TREE);
1994 }
1995
1996 /* Expand one of the invoke_* opcodes.
1997 OCPODE is the specific opcode.
1998 METHOD_REF_INDEX is an index into the constant pool.
1999 NARGS is the number of arguments, or -1 if not specified. */
2000
2001 static void
2002 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2003 {
2004 tree method_signature = COMPONENT_REF_SIGNATURE(&current_jcf->cpool, method_ref_index);
2005 tree method_name = COMPONENT_REF_NAME (&current_jcf->cpool, method_ref_index);
2006 tree self_type = get_class_constant
2007 (current_jcf, COMPONENT_REF_CLASS_INDEX(&current_jcf->cpool, method_ref_index));
2008 const char *const self_name
2009 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2010 tree call, func, method, arg_list, method_type;
2011 tree check = NULL_TREE;
2012
2013 if (! CLASS_LOADED_P (self_type))
2014 {
2015 load_class (self_type, 1);
2016 safe_layout_class (self_type);
2017 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2018 fatal_error ("failed to find class '%s'", self_name);
2019 }
2020 layout_class_methods (self_type);
2021
2022 if (ID_INIT_P (method_name))
2023 method = lookup_java_constructor (self_type, method_signature);
2024 else
2025 method = lookup_java_method (self_type, method_name, method_signature);
2026 if (method == NULL_TREE)
2027 {
2028 error ("class '%s' has no method named '%s' matching signature '%s'",
2029 self_name,
2030 IDENTIFIER_POINTER (method_name),
2031 IDENTIFIER_POINTER (method_signature));
2032 }
2033 /* Invoke static can't invoke static/abstract method */
2034 else if (opcode == OPCODE_invokestatic)
2035 {
2036 if (!METHOD_STATIC (method))
2037 {
2038 error ("invokestatic on non static method");
2039 method = NULL_TREE;
2040 }
2041 else if (METHOD_ABSTRACT (method))
2042 {
2043 error ("invokestatic on abstract method");
2044 method = NULL_TREE;
2045 }
2046 }
2047 else
2048 {
2049 if (METHOD_STATIC (method))
2050 {
2051 error ("invoke[non-static] on static method");
2052 method = NULL_TREE;
2053 }
2054 }
2055
2056 if (method == NULL_TREE)
2057 {
2058 method_type = get_type_from_signature (method_signature);
2059 pop_arguments (TYPE_ARG_TYPES (method_type));
2060 if (opcode != OPCODE_invokestatic)
2061 pop_type (self_type);
2062 method_type = promote_type (TREE_TYPE (method_type));
2063 push_value (convert (method_type, integer_zero_node));
2064 return;
2065 }
2066
2067 method_type = TREE_TYPE (method);
2068 arg_list = pop_arguments (TYPE_ARG_TYPES (method_type));
2069 flush_quick_stack ();
2070
2071 func = NULL_TREE;
2072 if (opcode == OPCODE_invokestatic)
2073 func = build_known_method_ref (method, method_type, self_type,
2074 method_signature, arg_list);
2075 else if (opcode == OPCODE_invokespecial
2076 || (opcode == OPCODE_invokevirtual
2077 && (METHOD_PRIVATE (method)
2078 || METHOD_FINAL (method)
2079 || CLASS_FINAL (TYPE_NAME (self_type)))))
2080 {
2081 /* If the object for the method call is null, we throw an
2082 exception. We don't do this if the object is the current
2083 method's `this'. In other cases we just rely on an
2084 optimization pass to eliminate redundant checks. FIXME:
2085 Unfortunately there doesn't seem to be a way to determine
2086 what the current method is right now.
2087 We do omit the check if we're calling <init>. */
2088 /* We use a SAVE_EXPR here to make sure we only evaluate
2089 the new `self' expression once. */
2090 tree save_arg = save_expr (TREE_VALUE (arg_list));
2091 TREE_VALUE (arg_list) = save_arg;
2092 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2093 func = build_known_method_ref (method, method_type, self_type,
2094 method_signature, arg_list);
2095 }
2096 else
2097 {
2098 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2099 arg_list);
2100 if (opcode == OPCODE_invokevirtual)
2101 func = build_invokevirtual (dtable, method);
2102 else
2103 func = build_invokeinterface (dtable, method);
2104 }
2105 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2106
2107 call = build (CALL_EXPR, TREE_TYPE (method_type), func, arg_list, NULL_TREE);
2108 TREE_SIDE_EFFECTS (call) = 1;
2109 call = check_for_builtin (method, call);
2110
2111 if (check != NULL_TREE)
2112 {
2113 call = build (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2114 TREE_SIDE_EFFECTS (call) = 1;
2115 }
2116
2117 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2118 expand_expr_stmt (call);
2119 else
2120 {
2121 push_value (call);
2122 flush_quick_stack ();
2123 }
2124 }
2125
2126 /* Create a stub which will be put into the vtable but which will call
2127 a JNI function. */
2128
2129 tree
2130 build_jni_stub (tree method)
2131 {
2132 tree jnifunc, call, args, body, lookup_arg, method_sig, arg_types;
2133 tree jni_func_type, tem;
2134 tree env_var, res_var = NULL_TREE, block;
2135 tree method_args, res_type;
2136 tree meth_var;
2137
2138 int args_size = 0;
2139
2140 tree klass = DECL_CONTEXT (method);
2141 int from_class = ! CLASS_FROM_SOURCE_P (klass);
2142 klass = build_class_ref (klass);
2143
2144 if (! METHOD_NATIVE (method) || ! flag_jni)
2145 abort ();
2146
2147 DECL_ARTIFICIAL (method) = 1;
2148 DECL_EXTERNAL (method) = 0;
2149
2150 env_var = build_decl (VAR_DECL, get_identifier ("env"), ptr_type_node);
2151 DECL_CONTEXT (env_var) = method;
2152
2153 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2154 {
2155 res_var = build_decl (VAR_DECL, get_identifier ("res"),
2156 TREE_TYPE (TREE_TYPE (method)));
2157 DECL_CONTEXT (res_var) = method;
2158 TREE_CHAIN (env_var) = res_var;
2159 }
2160
2161 meth_var = build_decl (VAR_DECL, get_identifier ("meth"), ptr_type_node);
2162 TREE_STATIC (meth_var) = 1;
2163 TREE_PUBLIC (meth_var) = 0;
2164 DECL_EXTERNAL (meth_var) = 0;
2165 DECL_CONTEXT (meth_var) = method;
2166 DECL_ARTIFICIAL (meth_var) = 1;
2167 DECL_INITIAL (meth_var) = null_pointer_node;
2168 TREE_USED (meth_var) = 1;
2169 chainon (env_var, meth_var);
2170 layout_decl (meth_var, 0);
2171 make_decl_rtl (meth_var, NULL);
2172 rest_of_decl_compilation (meth_var, NULL, 0, 0);
2173
2174 /* One strange way that the front ends are different is that they
2175 store arguments differently. */
2176 if (from_class)
2177 method_args = DECL_ARGUMENTS (method);
2178 else
2179 method_args = BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (method));
2180 block = build_block (env_var, NULL_TREE, NULL_TREE,
2181 method_args, NULL_TREE);
2182 TREE_SIDE_EFFECTS (block) = 1;
2183 /* When compiling from source we don't set the type of the block,
2184 because that will prevent patch_return from ever being run. */
2185 if (from_class)
2186 TREE_TYPE (block) = TREE_TYPE (TREE_TYPE (method));
2187
2188 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2189 body = build (MODIFY_EXPR, ptr_type_node, env_var,
2190 build (CALL_EXPR, ptr_type_node,
2191 build_address_of (soft_getjnienvnewframe_node),
2192 build_tree_list (NULL_TREE, klass),
2193 NULL_TREE));
2194 CAN_COMPLETE_NORMALLY (body) = 1;
2195
2196 /* All the arguments to this method become arguments to the
2197 underlying JNI function. If we had to wrap object arguments in a
2198 special way, we would do that here. */
2199 args = NULL_TREE;
2200 for (tem = method_args; tem != NULL_TREE; tem = TREE_CHAIN (tem))
2201 {
2202 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (tem)));
2203 #ifdef PARM_BOUNDARY
2204 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2205 * PARM_BOUNDARY);
2206 #endif
2207 args_size += (arg_bits / BITS_PER_UNIT);
2208
2209 args = tree_cons (NULL_TREE, tem, args);
2210 }
2211 args = nreverse (args);
2212 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2213
2214 /* For a static method the second argument is the class. For a
2215 non-static method the second argument is `this'; that is already
2216 available in the argument list. */
2217 if (METHOD_STATIC (method))
2218 {
2219 args_size += int_size_in_bytes (TREE_TYPE (klass));
2220 args = tree_cons (NULL_TREE, klass, args);
2221 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2222 }
2223
2224 /* The JNIEnv structure is the first argument to the JNI function. */
2225 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2226 args = tree_cons (NULL_TREE, env_var, args);
2227 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2228
2229 /* We call _Jv_LookupJNIMethod to find the actual underlying
2230 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2231 exception if this function is not found at runtime. */
2232 tem = build_tree_list (NULL_TREE, build_int_2 (args_size, 0));
2233 method_sig = build_java_signature (TREE_TYPE (method));
2234 lookup_arg = tree_cons (NULL_TREE,
2235 build_utf8_ref (unmangle_classname
2236 (IDENTIFIER_POINTER (method_sig),
2237 IDENTIFIER_LENGTH (method_sig))),
2238 tem);
2239 tem = DECL_NAME (method);
2240 lookup_arg
2241 = tree_cons (NULL_TREE, klass,
2242 tree_cons (NULL_TREE, build_utf8_ref (tem), lookup_arg));
2243
2244 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2245
2246 #ifdef MODIFY_JNI_METHOD_CALL
2247 tem = MODIFY_JNI_METHOD_CALL (tem);
2248 #endif
2249
2250 jni_func_type = build_pointer_type (tem);
2251
2252 jnifunc = build (COND_EXPR, ptr_type_node,
2253 meth_var, meth_var,
2254 build (MODIFY_EXPR, ptr_type_node,
2255 meth_var,
2256 build (CALL_EXPR, ptr_type_node,
2257 build_address_of (soft_lookupjnimethod_node),
2258 lookup_arg, NULL_TREE)));
2259
2260 /* Now we make the actual JNI call via the resulting function
2261 pointer. */
2262 call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (method)),
2263 build1 (NOP_EXPR, jni_func_type, jnifunc),
2264 args, NULL_TREE);
2265
2266 /* If the JNI call returned a result, capture it here. If we had to
2267 unwrap JNI object results, we would do that here. */
2268 if (res_var != NULL_TREE)
2269 call = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2270 res_var, call);
2271
2272 TREE_SIDE_EFFECTS (call) = 1;
2273 CAN_COMPLETE_NORMALLY (call) = 1;
2274
2275 body = build (COMPOUND_EXPR, void_type_node, body, call);
2276 TREE_SIDE_EFFECTS (body) = 1;
2277
2278 /* Now free the environment we allocated. */
2279 call = build (CALL_EXPR, ptr_type_node,
2280 build_address_of (soft_jnipopsystemframe_node),
2281 build_tree_list (NULL_TREE, env_var),
2282 NULL_TREE);
2283 TREE_SIDE_EFFECTS (call) = 1;
2284 CAN_COMPLETE_NORMALLY (call) = 1;
2285 body = build (COMPOUND_EXPR, void_type_node, body, call);
2286 TREE_SIDE_EFFECTS (body) = 1;
2287
2288 /* Finally, do the return. When compiling from source we rely on
2289 patch_return to patch the return value -- because DECL_RESULT is
2290 not set at the time this function is called. */
2291 if (from_class)
2292 {
2293 res_type = void_type_node;
2294 if (res_var != NULL_TREE)
2295 {
2296 tree drt;
2297 if (! DECL_RESULT (method))
2298 abort ();
2299 /* Make sure we copy the result variable to the actual
2300 result. We use the type of the DECL_RESULT because it
2301 might be different from the return type of the function:
2302 it might be promoted. */
2303 drt = TREE_TYPE (DECL_RESULT (method));
2304 if (drt != TREE_TYPE (res_var))
2305 res_var = build1 (CONVERT_EXPR, drt, res_var);
2306 res_var = build (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2307 TREE_SIDE_EFFECTS (res_var) = 1;
2308 }
2309 }
2310 else
2311 {
2312 /* This is necessary to get patch_return to run. */
2313 res_type = NULL_TREE;
2314 }
2315 body = build (COMPOUND_EXPR, void_type_node, body,
2316 build1 (RETURN_EXPR, res_type, res_var));
2317 TREE_SIDE_EFFECTS (body) = 1;
2318
2319 BLOCK_EXPR_BODY (block) = body;
2320 return block;
2321 }
2322
2323 /* Expand an operation to extract from or store into a field.
2324 IS_STATIC is 1 iff the field is static.
2325 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2326 FIELD_REF_INDEX is an index into the constant pool. */
2327
2328 static void
2329 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2330 {
2331 tree self_type =
2332 get_class_constant (current_jcf,
2333 COMPONENT_REF_CLASS_INDEX (&current_jcf->cpool,
2334 field_ref_index));
2335 const char *self_name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2336 tree field_name = COMPONENT_REF_NAME (&current_jcf->cpool, field_ref_index);
2337 tree field_signature = COMPONENT_REF_SIGNATURE (&current_jcf->cpool,
2338 field_ref_index);
2339 tree field_type = get_type_from_signature (field_signature);
2340 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2341 tree field_ref;
2342 int is_error = 0;
2343 tree field_decl = lookup_field (&self_type, field_name);
2344 if (field_decl == error_mark_node)
2345 {
2346 is_error = 1;
2347 }
2348 else if (field_decl == NULL_TREE)
2349 {
2350 error ("missing field '%s' in '%s'",
2351 IDENTIFIER_POINTER (field_name), self_name);
2352 is_error = 1;
2353 }
2354 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2355 {
2356 error ("mismatching signature for field '%s' in '%s'",
2357 IDENTIFIER_POINTER (field_name), self_name);
2358 is_error = 1;
2359 }
2360 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2361 if (is_error)
2362 {
2363 if (! is_putting)
2364 push_value (convert (field_type, integer_zero_node));
2365 flush_quick_stack ();
2366 return;
2367 }
2368
2369 field_ref = build_field_ref (field_ref, self_type, field_name);
2370 if (is_static)
2371 field_ref = build_class_init (self_type, field_ref);
2372 if (is_putting)
2373 {
2374 flush_quick_stack ();
2375 if (FIELD_FINAL (field_decl))
2376 {
2377 if (DECL_CONTEXT (field_decl) != current_class)
2378 error ("%Jassignment to final field '%D' not in field's class",
2379 field_decl, field_decl);
2380 else if (FIELD_STATIC (field_decl))
2381 {
2382 if (!DECL_CLINIT_P (current_function_decl))
2383 warning ("%Jassignment to final static field `%D' not in "
2384 "class initializer", field_decl, field_decl);
2385 }
2386 else
2387 {
2388 tree cfndecl_name = DECL_NAME (current_function_decl);
2389 if (! DECL_CONSTRUCTOR_P (current_function_decl)
2390 && !ID_FINIT_P (cfndecl_name))
2391 warning ("%Jassignment to final field '%D' not in constructor",
2392 field_decl, field_decl);
2393 }
2394 }
2395 expand_assignment (field_ref, new_value, 0);
2396 }
2397 else
2398 push_value (field_ref);
2399 }
2400
2401 void
2402 load_type_state (tree label)
2403 {
2404 int i;
2405 tree vec = LABEL_TYPE_STATE (label);
2406 int cur_length = TREE_VEC_LENGTH (vec);
2407 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2408 for (i = 0; i < cur_length; i++)
2409 type_map [i] = TREE_VEC_ELT (vec, i);
2410 }
2411
2412 /* Do the expansion of a Java switch. With Gcc, switches are front-end
2413 dependent things, but they rely on gcc routines. This function is
2414 placed here because it uses things defined locally in parse.y. */
2415
2416 static tree
2417 case_identity (tree t __attribute__ ((__unused__)), tree v)
2418 {
2419 return v;
2420 }
2421
2422 /* Return the name of the vtable for an array of a given primitive
2423 type. */
2424 static tree
2425 get_primitive_array_vtable (tree elt)
2426 {
2427 tree r;
2428 if (elt == boolean_type_node)
2429 r = boolean_array_vtable;
2430 else if (elt == byte_type_node)
2431 r = byte_array_vtable;
2432 else if (elt == char_type_node)
2433 r = char_array_vtable;
2434 else if (elt == short_type_node)
2435 r = short_array_vtable;
2436 else if (elt == int_type_node)
2437 r = int_array_vtable;
2438 else if (elt == long_type_node)
2439 r = long_array_vtable;
2440 else if (elt == float_type_node)
2441 r = float_array_vtable;
2442 else if (elt == double_type_node)
2443 r = double_array_vtable;
2444 else
2445 abort ();
2446 return build_address_of (r);
2447 }
2448
2449 struct rtx_def *
2450 java_expand_expr (tree exp, rtx target, enum machine_mode tmode,
2451 int modifier /* Actually an enum expand_modifier. */,
2452 rtx *alt_rtl ATTRIBUTE_UNUSED)
2453 {
2454 tree current;
2455
2456 switch (TREE_CODE (exp))
2457 {
2458 case NEW_ARRAY_INIT:
2459 {
2460 rtx tmp;
2461 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2462 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2463 tree data_fld = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (array_type)));
2464 HOST_WIDE_INT ilength = java_array_type_length (array_type);
2465 tree length = build_int_2 (ilength, 0);
2466 tree init = TREE_OPERAND (exp, 0);
2467 tree array_decl;
2468
2469 /* See if we can generate the array statically. */
2470 if (TREE_CONSTANT (init) && TREE_STATIC (exp)
2471 && JPRIMITIVE_TYPE_P (element_type))
2472 {
2473 tree temp, value, init_decl;
2474 struct rtx_def *r;
2475 START_RECORD_CONSTRUCTOR (temp, object_type_node);
2476 PUSH_FIELD_VALUE (temp, "vtable",
2477 get_primitive_array_vtable (element_type));
2478 if (! flag_hash_synchronization)
2479 PUSH_FIELD_VALUE (temp, "sync_info", null_pointer_node);
2480 FINISH_RECORD_CONSTRUCTOR (temp);
2481 START_RECORD_CONSTRUCTOR (value, array_type);
2482 PUSH_SUPER_VALUE (value, temp);
2483 PUSH_FIELD_VALUE (value, "length", length);
2484 PUSH_FIELD_VALUE (value, "data", init);
2485 FINISH_RECORD_CONSTRUCTOR (value);
2486
2487 init_decl = build_decl (VAR_DECL, generate_name (), array_type);
2488 pushdecl_top_level (init_decl);
2489 TREE_STATIC (init_decl) = 1;
2490 DECL_INITIAL (init_decl) = value;
2491 DECL_IGNORED_P (init_decl) = 1;
2492 TREE_READONLY (init_decl) = 1;
2493 /* Hash synchronization requires at least 64-bit alignment. */
2494 if (flag_hash_synchronization && POINTER_SIZE < 64)
2495 DECL_ALIGN (init_decl) = 64;
2496 rest_of_decl_compilation (init_decl, NULL, 1, 0);
2497 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (init_decl)) = 1;
2498 init = build1 (ADDR_EXPR, TREE_TYPE (exp), init_decl);
2499 r = expand_expr (init, target, tmode, modifier);
2500 return r;
2501 }
2502
2503 array_decl = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
2504 expand_decl (array_decl);
2505 tmp = expand_assignment (array_decl,
2506 build_new_array (element_type, length),
2507 1);
2508 if (TREE_CONSTANT (init)
2509 && ilength >= 10 && JPRIMITIVE_TYPE_P (element_type))
2510 {
2511 tree init_decl;
2512 init_decl = build_decl (VAR_DECL, generate_name (),
2513 TREE_TYPE (init));
2514 pushdecl_top_level (init_decl);
2515 TREE_STATIC (init_decl) = 1;
2516 DECL_INITIAL (init_decl) = init;
2517 DECL_IGNORED_P (init_decl) = 1;
2518 TREE_READONLY (init_decl) = 1;
2519 rest_of_decl_compilation (init_decl, NULL, 1, 0);
2520 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (init_decl)) = 1;
2521 init = init_decl;
2522 }
2523 expand_assignment (build (COMPONENT_REF, TREE_TYPE (data_fld),
2524 build_java_indirect_ref (array_type,
2525 array_decl, flag_check_references),
2526 data_fld), init, 0);
2527 return tmp;
2528 }
2529 case BLOCK:
2530 if (BLOCK_EXPR_BODY (exp))
2531 {
2532 tree local;
2533 rtx last;
2534 tree body = BLOCK_EXPR_BODY (exp);
2535 /* Set to 1 or more when we found a static class
2536 initialization flag. */
2537 int found_class_initialization_flag = 0;
2538
2539 pushlevel (2); /* 2 and above */
2540 expand_start_bindings (0);
2541 local = BLOCK_EXPR_DECLS (exp);
2542 while (local)
2543 {
2544 tree next = TREE_CHAIN (local);
2545 found_class_initialization_flag +=
2546 LOCAL_CLASS_INITIALIZATION_FLAG_P (local);
2547 layout_decl (local, 0);
2548 expand_decl (pushdecl (local));
2549 local = next;
2550 }
2551
2552 /* Emit initialization code for test flags if we saw one. */
2553 if (! always_initialize_class_p
2554 && current_function_decl
2555 && found_class_initialization_flag)
2556 htab_traverse
2557 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl),
2558 emit_init_test_initialization, NULL);
2559
2560 /* Avoid deep recursion for long block. */
2561 while (TREE_CODE (body) == COMPOUND_EXPR)
2562 {
2563 expand_expr (TREE_OPERAND (body, 0), const0_rtx, VOIDmode, 0);
2564 emit_queue ();
2565 body = TREE_OPERAND (body, 1);
2566 }
2567 last = expand_expr (body, NULL_RTX, VOIDmode, 0);
2568 emit_queue ();
2569 expand_end_bindings (getdecls (), 1, 0);
2570 poplevel (1, 1, 0);
2571 return last;
2572 }
2573 return const0_rtx;
2574
2575 case CASE_EXPR:
2576 {
2577 tree duplicate;
2578 if (pushcase (TREE_OPERAND (exp, 0), case_identity,
2579 build_decl (LABEL_DECL, NULL_TREE, NULL_TREE),
2580 &duplicate) == 2)
2581 {
2582 EXPR_WFL_LINECOL (wfl_operator) = EXPR_WFL_LINECOL (exp);
2583 parse_error_context
2584 (wfl_operator, "Duplicate case label: `%s'",
2585 print_int_node (TREE_OPERAND (exp, 0)));
2586 }
2587 return const0_rtx;
2588 }
2589
2590 case DEFAULT_EXPR:
2591 pushcase (NULL_TREE, 0,
2592 build_decl (LABEL_DECL, NULL_TREE, NULL_TREE), NULL);
2593 return const0_rtx;
2594
2595 case SWITCH_EXPR:
2596 expand_start_case (0, TREE_OPERAND (exp, 0), int_type_node, "switch");
2597 expand_expr_stmt (TREE_OPERAND (exp, 1));
2598 expand_end_case (TREE_OPERAND (exp, 0));
2599 return const0_rtx;
2600
2601 case TRY_EXPR:
2602 /* We expand a try[-catch] block */
2603
2604 /* Expand the try block */
2605 expand_eh_region_start ();
2606 expand_expr_stmt (TREE_OPERAND (exp, 0));
2607 expand_start_all_catch ();
2608
2609 /* Expand all catch clauses (EH handlers) */
2610 for (current = TREE_OPERAND (exp, 1); current;
2611 current = TREE_CHAIN (current))
2612 {
2613 tree catch = TREE_OPERAND (current, 0);
2614 tree decl = BLOCK_EXPR_DECLS (catch);
2615 tree type = (decl ? TREE_TYPE (TREE_TYPE (decl)) : NULL_TREE);
2616
2617 expand_start_catch (prepare_eh_table_type (type));
2618 expand_expr_stmt (TREE_OPERAND (current, 0));
2619 expand_end_catch ();
2620 }
2621 expand_end_all_catch ();
2622 return const0_rtx;
2623
2624 case JAVA_EXC_OBJ_EXPR:
2625 return expand_expr (build_exception_object_ref (TREE_TYPE (exp)),
2626 target, tmode, modifier);
2627
2628 case LABEL_EXPR:
2629 /* Used only by expanded inline functions. */
2630 expand_label (TREE_OPERAND (exp, 0));
2631 return const0_rtx;
2632
2633 default:
2634 internal_error ("can't expand %s", tree_code_name [TREE_CODE (exp)]);
2635 }
2636 }
2637
2638 /* Go over METHOD's bytecode and note instruction starts in
2639 instruction_bits[]. */
2640
2641 void
2642 note_instructions (JCF *jcf, tree method)
2643 {
2644 int PC;
2645 unsigned char* byte_ops;
2646 long length = DECL_CODE_LENGTH (method);
2647
2648 int saw_index;
2649 jint INT_temp;
2650
2651 #undef RET /* Defined by config/i386/i386.h */
2652 #undef PTR
2653 #define BCODE byte_ops
2654 #define BYTE_type_node byte_type_node
2655 #define SHORT_type_node short_type_node
2656 #define INT_type_node int_type_node
2657 #define LONG_type_node long_type_node
2658 #define CHAR_type_node char_type_node
2659 #define PTR_type_node ptr_type_node
2660 #define FLOAT_type_node float_type_node
2661 #define DOUBLE_type_node double_type_node
2662 #define VOID_type_node void_type_node
2663 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2664 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2665 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2666 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2667
2668 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
2669
2670 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
2671 byte_ops = jcf->read_ptr;
2672 instruction_bits = xrealloc (instruction_bits, length + 1);
2673 memset (instruction_bits, 0, length + 1);
2674
2675 /* This pass figures out which PC can be the targets of jumps. */
2676 for (PC = 0; PC < length;)
2677 {
2678 int oldpc = PC; /* PC at instruction start. */
2679 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
2680 switch (byte_ops[PC++])
2681 {
2682 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
2683 case OPCODE: \
2684 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
2685 break;
2686
2687 #define NOTE_LABEL(PC) note_label(oldpc, PC)
2688
2689 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
2690 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
2691 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
2692 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2693 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2694 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2695 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2696 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2697
2698 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
2699 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
2700 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
2701 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
2702 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
2703 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
2704 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
2705 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
2706
2707 /* two forms of wide instructions */
2708 #define PRE_SPECIAL_WIDE(IGNORE) \
2709 { \
2710 int modified_opcode = IMMEDIATE_u1; \
2711 if (modified_opcode == OPCODE_iinc) \
2712 { \
2713 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
2714 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
2715 } \
2716 else \
2717 { \
2718 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
2719 } \
2720 }
2721
2722 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
2723
2724 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2725
2726 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
2727 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
2728 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
2729 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
2730 #define PRE_ARRAY_STORE(TYPE) /* nothing */
2731 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
2732 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
2733 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
2734 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
2735 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
2736
2737 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
2738 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
2739 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
2740 saw_index = 0; INT_temp = (OPERAND_VALUE); \
2741 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
2742 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
2743 saw_index = 0; INT_temp = (OPERAND_VALUE); \
2744 NOTE_LABEL (PC); \
2745 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
2746
2747 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
2748
2749 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
2750 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
2751
2752 #define PRE_LOOKUP_SWITCH \
2753 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
2754 NOTE_LABEL (default_offset+oldpc); \
2755 if (npairs >= 0) \
2756 while (--npairs >= 0) { \
2757 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
2758 jint offset = IMMEDIATE_s4; \
2759 NOTE_LABEL (offset+oldpc); } \
2760 }
2761
2762 #define PRE_TABLE_SWITCH \
2763 { jint default_offset = IMMEDIATE_s4; \
2764 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
2765 NOTE_LABEL (default_offset+oldpc); \
2766 if (low <= high) \
2767 while (low++ <= high) { \
2768 jint offset = IMMEDIATE_s4; \
2769 NOTE_LABEL (offset+oldpc); } \
2770 }
2771
2772 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
2773 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
2774 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
2775 (void)(IMMEDIATE_u2); \
2776 PC += 2 * IS_INTERFACE /* for invokeinterface */;
2777
2778 #include "javaop.def"
2779 #undef JAVAOP
2780 }
2781 } /* for */
2782 }
2783
2784 void
2785 expand_byte_code (JCF *jcf, tree method)
2786 {
2787 int PC;
2788 int i;
2789 const unsigned char *linenumber_pointer;
2790 int dead_code_index = -1;
2791 unsigned char* byte_ops;
2792 long length = DECL_CODE_LENGTH (method);
2793
2794 stack_pointer = 0;
2795 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
2796 byte_ops = jcf->read_ptr;
2797
2798 /* We make an initial pass of the line number table, to note
2799 which instructions have associated line number entries. */
2800 linenumber_pointer = linenumber_table;
2801 for (i = 0; i < linenumber_count; i++)
2802 {
2803 int pc = GET_u2 (linenumber_pointer);
2804 linenumber_pointer += 4;
2805 if (pc >= length)
2806 warning ("invalid PC in line number table");
2807 else
2808 {
2809 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
2810 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
2811 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
2812 }
2813 }
2814
2815 if (! verify_jvm_instructions (jcf, byte_ops, length))
2816 return;
2817
2818 /* Translate bytecodes to rtl instructions. */
2819 linenumber_pointer = linenumber_table;
2820 for (PC = 0; PC < length;)
2821 {
2822 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
2823 {
2824 tree label = lookup_label (PC);
2825 flush_quick_stack ();
2826 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
2827 expand_label (label);
2828 if (LABEL_VERIFIED (label) || PC == 0)
2829 load_type_state (label);
2830 }
2831
2832 if (! (instruction_bits [PC] & BCODE_VERIFIED))
2833 {
2834 if (dead_code_index == -1)
2835 {
2836 /* This is the start of a region of unreachable bytecodes.
2837 They still need to be processed in order for EH ranges
2838 to get handled correctly. However, we can simply
2839 replace these bytecodes with nops. */
2840 dead_code_index = PC;
2841 }
2842
2843 /* Turn this bytecode into a nop. */
2844 byte_ops[PC] = 0x0;
2845 }
2846 else
2847 {
2848 if (dead_code_index != -1)
2849 {
2850 /* We've just reached the end of a region of dead code. */
2851 if (extra_warnings)
2852 warning ("unreachable bytecode from %d to before %d",
2853 dead_code_index, PC);
2854 dead_code_index = -1;
2855 }
2856 }
2857
2858 /* Handle possible line number entry for this PC.
2859
2860 This code handles out-of-order and multiple linenumbers per PC,
2861 but is optimized for the case of line numbers increasing
2862 monotonically with PC. */
2863 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
2864 {
2865 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
2866 || GET_u2 (linenumber_pointer) != PC)
2867 linenumber_pointer = linenumber_table;
2868 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
2869 {
2870 int pc = GET_u2 (linenumber_pointer);
2871 linenumber_pointer += 4;
2872 if (pc == PC)
2873 {
2874 input_line = GET_u2 (linenumber_pointer - 2);
2875 emit_line_note (input_location);
2876 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
2877 break;
2878 }
2879 }
2880 }
2881 maybe_pushlevels (PC);
2882 PC = process_jvm_instruction (PC, byte_ops, length);
2883 maybe_poplevels (PC);
2884 } /* for */
2885
2886 if (dead_code_index != -1)
2887 {
2888 /* We've just reached the end of a region of dead code. */
2889 if (extra_warnings)
2890 warning ("unreachable bytecode from %d to the end of the method",
2891 dead_code_index);
2892 }
2893 }
2894
2895 static void
2896 java_push_constant_from_pool (JCF *jcf, int index)
2897 {
2898 tree c;
2899 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
2900 {
2901 tree name;
2902 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
2903 index = alloc_name_constant (CONSTANT_String, name);
2904 c = build_ref_from_constant_pool (index);
2905 TREE_TYPE (c) = promote_type (string_type_node);
2906 }
2907 else
2908 c = get_constant (jcf, index);
2909 push_value (c);
2910 }
2911
2912 int
2913 process_jvm_instruction (int PC, const unsigned char* byte_ops,
2914 long length ATTRIBUTE_UNUSED)
2915 {
2916 const char *opname; /* Temporary ??? */
2917 int oldpc = PC; /* PC at instruction start. */
2918
2919 /* If the instruction is at the beginning of a exception handler,
2920 replace the top of the stack with the thrown object reference */
2921 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
2922 {
2923 tree type = pop_type (ptr_type_node);
2924 push_value (build (JAVA_EXC_OBJ_EXPR, type));
2925 }
2926
2927 switch (byte_ops[PC++])
2928 {
2929 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
2930 case OPCODE: \
2931 opname = #OPNAME; \
2932 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
2933 break;
2934
2935 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
2936 { \
2937 int saw_index = 0; \
2938 int index = OPERAND_VALUE; \
2939 build_java_ret (find_local_variable (index, ptr_type_node, oldpc)); \
2940 }
2941
2942 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
2943 { \
2944 /* OPERAND_VALUE may have side-effects on PC */ \
2945 int opvalue = OPERAND_VALUE; \
2946 build_java_jsr (oldpc + opvalue, PC); \
2947 }
2948
2949 /* Push a constant onto the stack. */
2950 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
2951 { int saw_index = 0; int ival = (OPERAND_VALUE); \
2952 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
2953 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
2954
2955 /* internal macro added for use by the WIDE case */
2956 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
2957 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
2958
2959 /* Push local variable onto the opcode stack. */
2960 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
2961 { \
2962 /* have to do this since OPERAND_VALUE may have side-effects */ \
2963 int opvalue = OPERAND_VALUE; \
2964 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
2965 }
2966
2967 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
2968 expand_java_return (OPERAND_TYPE##_type_node)
2969
2970 #define REM_EXPR TRUNC_MOD_EXPR
2971 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
2972 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
2973
2974 #define FIELD(IS_STATIC, IS_PUT) \
2975 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
2976
2977 #define TEST(OPERAND_TYPE, CONDITION) \
2978 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
2979
2980 #define COND(OPERAND_TYPE, CONDITION) \
2981 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
2982
2983 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
2984 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
2985
2986 #define BRANCH_GOTO(OPERAND_VALUE) \
2987 expand_java_goto (oldpc + OPERAND_VALUE)
2988
2989 #define BRANCH_CALL(OPERAND_VALUE) \
2990 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
2991
2992 #if 0
2993 #define BRANCH_RETURN(OPERAND_VALUE) \
2994 { \
2995 tree type = OPERAND_TYPE##_type_node; \
2996 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
2997 expand_java_ret (value); \
2998 }
2999 #endif
3000
3001 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3002 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3003 fprintf (stderr, "(not implemented)\n")
3004 #define NOT_IMPL1(OPERAND_VALUE) \
3005 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3006 fprintf (stderr, "(not implemented)\n")
3007
3008 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3009
3010 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3011
3012 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3013
3014 #define STACK_SWAP(COUNT) java_stack_swap()
3015
3016 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3017 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3018 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3019
3020 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3021 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3022
3023 #define LOOKUP_SWITCH \
3024 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3025 tree selector = pop_value (INT_type_node); \
3026 tree duplicate, label; \
3027 tree type = TREE_TYPE (selector); \
3028 flush_quick_stack (); \
3029 expand_start_case (0, selector, type, "switch statement");\
3030 while (--npairs >= 0) \
3031 { \
3032 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3033 tree value = build_int_2 (match, match < 0 ? -1 : 0); \
3034 TREE_TYPE (value) = type; \
3035 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \
3036 pushcase (value, convert, label, &duplicate); \
3037 expand_java_goto (oldpc + offset); \
3038 } \
3039 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \
3040 pushcase (NULL_TREE, 0, label, &duplicate); \
3041 expand_java_goto (oldpc + default_offset); \
3042 expand_end_case (selector); \
3043 }
3044
3045 #define TABLE_SWITCH \
3046 { jint default_offset = IMMEDIATE_s4; \
3047 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3048 tree selector = pop_value (INT_type_node); \
3049 tree duplicate, label; \
3050 tree type = TREE_TYPE (selector); \
3051 flush_quick_stack (); \
3052 expand_start_case (0, selector, type, "switch statement");\
3053 for (; low <= high; low++) \
3054 { \
3055 jint offset = IMMEDIATE_s4; \
3056 tree value = build_int_2 (low, low < 0 ? -1 : 0); \
3057 TREE_TYPE (value) = type; \
3058 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \
3059 pushcase (value, convert, label, &duplicate); \
3060 expand_java_goto (oldpc + offset); \
3061 } \
3062 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); \
3063 pushcase (NULL_TREE, 0, label, &duplicate); \
3064 expand_java_goto (oldpc + default_offset); \
3065 expand_end_case (selector); \
3066 }
3067
3068 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3069 { int opcode = byte_ops[PC-1]; \
3070 int method_ref_index = IMMEDIATE_u2; \
3071 int nargs; \
3072 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3073 else nargs = -1; \
3074 expand_invoke (opcode, method_ref_index, nargs); \
3075 }
3076
3077 /* Handle new, checkcast, instanceof */
3078 #define OBJECT(TYPE, OP) \
3079 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3080
3081 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3082
3083 #define ARRAY_LOAD(OPERAND_TYPE) \
3084 { \
3085 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3086 }
3087
3088 #define ARRAY_STORE(OPERAND_TYPE) \
3089 { \
3090 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3091 }
3092
3093 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3094 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3095 #define ARRAY_NEW_PTR() \
3096 push_value (build_anewarray (get_class_constant (current_jcf, \
3097 IMMEDIATE_u2), \
3098 pop_value (int_type_node)));
3099 #define ARRAY_NEW_NUM() \
3100 { \
3101 int atype = IMMEDIATE_u1; \
3102 push_value (build_newarray (atype, pop_value (int_type_node)));\
3103 }
3104 #define ARRAY_NEW_MULTI() \
3105 { \
3106 tree class = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3107 int ndims = IMMEDIATE_u1; \
3108 expand_java_multianewarray( class, ndims ); \
3109 }
3110
3111 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3112 push_value (fold (build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3113 pop_value (OPERAND_TYPE##_type_node))));
3114
3115 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3116 { \
3117 push_value (build1 (NOP_EXPR, int_type_node, \
3118 (convert (TO_TYPE##_type_node, \
3119 pop_value (FROM_TYPE##_type_node))))); \
3120 }
3121
3122 #define CONVERT(FROM_TYPE, TO_TYPE) \
3123 { \
3124 push_value (convert (TO_TYPE##_type_node, \
3125 pop_value (FROM_TYPE##_type_node))); \
3126 }
3127
3128 /* internal macro added for use by the WIDE case
3129 Added TREE_TYPE (decl) assignment, apbianco */
3130 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3131 { \
3132 tree decl, value; \
3133 int var = OPVALUE; \
3134 tree type = OPTYPE; \
3135 value = pop_value (type); \
3136 type = TREE_TYPE (value); \
3137 decl = find_local_variable (var, type, oldpc); \
3138 set_local_type (var, type ); \
3139 expand_assignment (decl, value, 0); \
3140 }
3141
3142 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3143 { \
3144 /* have to do this since OPERAND_VALUE may have side-effects */ \
3145 int opvalue = OPERAND_VALUE; \
3146 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3147 }
3148
3149 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3150 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3151
3152 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3153 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3154
3155 #define MONITOR_OPERATION(call) \
3156 { \
3157 tree o = pop_value (ptr_type_node); \
3158 tree c; \
3159 flush_quick_stack (); \
3160 c = build_java_monitor (call, o); \
3161 TREE_SIDE_EFFECTS (c) = 1; \
3162 expand_expr_stmt (c); \
3163 }
3164
3165 #define SPECIAL_IINC(IGNORED) \
3166 { \
3167 unsigned int local_var_index = IMMEDIATE_u1; \
3168 int ival = IMMEDIATE_s1; \
3169 expand_iinc(local_var_index, ival, oldpc); \
3170 }
3171
3172 #define SPECIAL_WIDE(IGNORED) \
3173 { \
3174 int modified_opcode = IMMEDIATE_u1; \
3175 unsigned int local_var_index = IMMEDIATE_u2; \
3176 switch (modified_opcode) \
3177 { \
3178 case OPCODE_iinc: \
3179 { \
3180 int ival = IMMEDIATE_s2; \
3181 expand_iinc (local_var_index, ival, oldpc); \
3182 break; \
3183 } \
3184 case OPCODE_iload: \
3185 case OPCODE_lload: \
3186 case OPCODE_fload: \
3187 case OPCODE_dload: \
3188 case OPCODE_aload: \
3189 { \
3190 /* duplicate code from LOAD macro */ \
3191 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3192 break; \
3193 } \
3194 case OPCODE_istore: \
3195 case OPCODE_lstore: \
3196 case OPCODE_fstore: \
3197 case OPCODE_dstore: \
3198 case OPCODE_astore: \
3199 { \
3200 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3201 break; \
3202 } \
3203 default: \
3204 error ("unrecogized wide sub-instruction"); \
3205 } \
3206 }
3207
3208 #define SPECIAL_THROW(IGNORED) \
3209 build_java_athrow (pop_value (throwable_type_node))
3210
3211 #define SPECIAL_BREAK NOT_IMPL1
3212 #define IMPL NOT_IMPL
3213
3214 #include "javaop.def"
3215 #undef JAVAOP
3216 default:
3217 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3218 }
3219 return PC;
3220 }
3221
3222 /* Return the opcode at PC in the code section pointed to by
3223 CODE_OFFSET. */
3224
3225 static unsigned char
3226 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3227 {
3228 unsigned char opcode;
3229 long absolute_offset = (long)JCF_TELL (jcf);
3230
3231 JCF_SEEK (jcf, code_offset);
3232 opcode = jcf->read_ptr [pc];
3233 JCF_SEEK (jcf, absolute_offset);
3234 return opcode;
3235 }
3236
3237 /* Some bytecode compilers are emitting accurate LocalVariableTable
3238 attributes. Here's an example:
3239
3240 PC <t>store_<n>
3241 PC+1 ...
3242
3243 Attribute "LocalVariableTable"
3244 slot #<n>: ... (PC: PC+1 length: L)
3245
3246 This is accurate because the local in slot <n> really exists after
3247 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3248
3249 This procedure recognizes this situation and extends the live range
3250 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3251 length of the store instruction.)
3252
3253 This function is used by `give_name_to_locals' so that a local's
3254 DECL features a DECL_LOCAL_START_PC such that the first related
3255 store operation will use DECL as a destination, not a unrelated
3256 temporary created for the occasion.
3257
3258 This function uses a global (instruction_bits) `note_instructions' should
3259 have allocated and filled properly. */
3260
3261 int
3262 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3263 int start_pc, int slot)
3264 {
3265 int first, index, opcode;
3266 int pc, insn_pc;
3267 int wide_found = 0;
3268
3269 if (!start_pc)
3270 return start_pc;
3271
3272 first = index = -1;
3273
3274 /* Find last previous instruction and remember it */
3275 for (pc = start_pc-1; pc; pc--)
3276 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3277 break;
3278 insn_pc = pc;
3279
3280 /* Retrieve the instruction, handle `wide'. */
3281 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3282 if (opcode == OPCODE_wide)
3283 {
3284 wide_found = 1;
3285 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3286 }
3287
3288 switch (opcode)
3289 {
3290 case OPCODE_astore_0:
3291 case OPCODE_astore_1:
3292 case OPCODE_astore_2:
3293 case OPCODE_astore_3:
3294 first = OPCODE_astore_0;
3295 break;
3296
3297 case OPCODE_istore_0:
3298 case OPCODE_istore_1:
3299 case OPCODE_istore_2:
3300 case OPCODE_istore_3:
3301 first = OPCODE_istore_0;
3302 break;
3303
3304 case OPCODE_lstore_0:
3305 case OPCODE_lstore_1:
3306 case OPCODE_lstore_2:
3307 case OPCODE_lstore_3:
3308 first = OPCODE_lstore_0;
3309 break;
3310
3311 case OPCODE_fstore_0:
3312 case OPCODE_fstore_1:
3313 case OPCODE_fstore_2:
3314 case OPCODE_fstore_3:
3315 first = OPCODE_fstore_0;
3316 break;
3317
3318 case OPCODE_dstore_0:
3319 case OPCODE_dstore_1:
3320 case OPCODE_dstore_2:
3321 case OPCODE_dstore_3:
3322 first = OPCODE_dstore_0;
3323 break;
3324
3325 case OPCODE_astore:
3326 case OPCODE_istore:
3327 case OPCODE_lstore:
3328 case OPCODE_fstore:
3329 case OPCODE_dstore:
3330 index = peek_opcode_at_pc (jcf, code_offset, pc);
3331 if (wide_found)
3332 {
3333 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3334 index = (other << 8) + index;
3335 }
3336 break;
3337 }
3338
3339 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3340 means we have a <t>store. */
3341 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3342 start_pc = insn_pc;
3343
3344 return start_pc;
3345 }
3346
3347 /* Force the (direct) sub-operands of NODE to be evaluated in left-to-right
3348 order, as specified by Java Language Specification.
3349
3350 The problem is that while expand_expr will evaluate its sub-operands in
3351 left-to-right order, for variables it will just return an rtx (i.e.
3352 an lvalue) for the variable (rather than an rvalue). So it is possible
3353 that a later sub-operand will change the register, and when the
3354 actual operation is done, it will use the new value, when it should
3355 have used the original value.
3356
3357 We fix this by using save_expr. This forces the sub-operand to be
3358 copied into a fresh virtual register,
3359
3360 For method invocation, we modify the arguments so that a
3361 left-to-right order evaluation is performed. Saved expressions
3362 will, in CALL_EXPR order, be reused when the call will be expanded.
3363 */
3364
3365 tree
3366 force_evaluation_order (tree node)
3367 {
3368 if (flag_syntax_only)
3369 return node;
3370 if (TREE_CODE (node) == CALL_EXPR
3371 || TREE_CODE (node) == NEW_CLASS_EXPR
3372 || (TREE_CODE (node) == COMPOUND_EXPR
3373 && TREE_CODE (TREE_OPERAND (node, 0)) == CALL_EXPR
3374 && TREE_CODE (TREE_OPERAND (node, 1)) == SAVE_EXPR))
3375 {
3376 tree arg, cmp;
3377
3378 if (!TREE_OPERAND (node, 1))
3379 return node;
3380
3381 arg = node;
3382
3383 /* Position arg properly, account for wrapped around ctors. */
3384 if (TREE_CODE (node) == COMPOUND_EXPR)
3385 arg = TREE_OPERAND (node, 0);
3386
3387 arg = TREE_OPERAND (arg, 1);
3388
3389 /* Not having a list of argument here is an error. */
3390 if (TREE_CODE (arg) != TREE_LIST)
3391 abort ();
3392
3393 /* This reverses the evaluation order. This is a desired effect. */
3394 for (cmp = NULL_TREE; arg; arg = TREE_CHAIN (arg))
3395 {
3396 tree saved = save_expr (force_evaluation_order (TREE_VALUE (arg)));
3397 cmp = (cmp == NULL_TREE ? saved :
3398 build (COMPOUND_EXPR, void_type_node, cmp, saved));
3399 TREE_VALUE (arg) = saved;
3400 }
3401
3402 if (cmp && TREE_CODE (cmp) == COMPOUND_EXPR)
3403 TREE_SIDE_EFFECTS (cmp) = 1;
3404
3405 if (cmp)
3406 {
3407 cmp = save_expr (build (COMPOUND_EXPR, TREE_TYPE (node), cmp, node));
3408 CAN_COMPLETE_NORMALLY (cmp) = CAN_COMPLETE_NORMALLY (node);
3409 TREE_SIDE_EFFECTS (cmp) = 1;
3410 node = cmp;
3411 }
3412 }
3413 return node;
3414 }
3415
3416 /* Called for every element in DECL_FUNCTION_INIT_TEST_TABLE of a
3417 method in order to emit initialization code for each test flag. */
3418
3419 static int
3420 emit_init_test_initialization (void **entry, void *x ATTRIBUTE_UNUSED)
3421 {
3422 struct treetreehash_entry *ite = (struct treetreehash_entry *) *entry;
3423 tree klass = build_class_ref (ite->key);
3424 tree rhs;
3425
3426 /* If the DECL_INITIAL of the test flag is set to true, it
3427 means that the class is already initialized the time it
3428 is in use. */
3429 if (DECL_INITIAL (ite->value) == boolean_true_node)
3430 rhs = boolean_true_node;
3431 /* Otherwise, we initialize the class init check variable by looking
3432 at the `state' field of the class to see if it is already
3433 initialized. This makes things a bit faster if the class is
3434 already initialized, which should be the common case. */
3435 else
3436 rhs = build (GE_EXPR, boolean_type_node,
3437 build (COMPONENT_REF, byte_type_node,
3438 build1 (INDIRECT_REF, class_type_node, klass),
3439 lookup_field (&class_type_node,
3440 get_identifier ("state"))),
3441 build_int_2 (JV_STATE_DONE, 0));
3442
3443 expand_expr_stmt (build (MODIFY_EXPR, boolean_type_node,
3444 ite->value, rhs));
3445 return true;
3446 }
3447
3448 #include "gt-java-expr.h"
3449