jcf-write.c (write_classfile): Remove target class file...
[gcc.git] / gcc / java / jcf-write.c
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "jcf.h"
27 #include "tree.h"
28 #include "real.h"
29 #include "java-tree.h"
30 #include "obstack.h"
31 #undef AND
32 #include "rtl.h"
33 #include "flags.h"
34 #include "java-opcodes.h"
35 #include "parse.h" /* for BLOCK_EXPR_BODY */
36 #include "buffer.h"
37 #include "toplev.h"
38 #include "ggc.h"
39
40 #ifndef DIR_SEPARATOR
41 #define DIR_SEPARATOR '/'
42 #endif
43
44 extern struct obstack temporary_obstack;
45
46 /* Base directory in which `.class' files should be written.
47 NULL means to put the file into the same directory as the
48 corresponding .java file. */
49 char *jcf_write_base_directory = NULL;
50
51 /* Make sure bytecode.data is big enough for at least N more bytes. */
52
53 #define RESERVE(N) \
54 do { CHECK_OP(state); \
55 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
56 buffer_grow (&state->bytecode, N); } while (0)
57
58 /* Add a 1-byte instruction/operand I to bytecode.data,
59 assuming space has already been RESERVE'd. */
60
61 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62
63 /* Like OP1, but I is a 2-byte big endian integer. */
64
65 #define OP2(I) \
66 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
67
68 /* Like OP1, but I is a 4-byte big endian integer. */
69
70 #define OP4(I) \
71 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
72 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73
74 /* Macro to call each time we push I words on the JVM stack. */
75
76 #define NOTE_PUSH(I) \
77 do { state->code_SP += (I); \
78 if (state->code_SP > state->code_SP_max) \
79 state->code_SP_max = state->code_SP; } while (0)
80
81 /* Macro to call each time we pop I words from the JVM stack. */
82
83 #define NOTE_POP(I) \
84 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85
86 /* A chunk or segment of a .class file. */
87
88 struct chunk
89 {
90 /* The next segment of this .class file. */
91 struct chunk *next;
92
93 /* The actual data in this segment to be written to the .class file. */
94 unsigned char *data;
95
96 /* The size of the segment to be written to the .class file. */
97 int size;
98 };
99
100 #define PENDING_CLEANUP_PC (-3)
101 #define PENDING_EXIT_PC (-2)
102 #define UNDEFINED_PC (-1)
103
104 /* Each "block" represents a label plus the bytecode instructions following.
105 There may be branches out of the block, but no incoming jumps, except
106 to the beginning of the block.
107
108 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
109 associated code yet), but it is an undefined label.
110 */
111
112 struct jcf_block
113 {
114 /* For blocks that that are defined, the next block (in pc order).
115 For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
116 or a cleanup expression (from a TRY_FINALLY_EXPR),
117 this is the next (outer) such end label, in a stack headed by
118 labeled_blocks in jcf_partial. */
119 struct jcf_block *next;
120
121 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
122 pc is PENDING_EXIT_PC.
123 In the not-yet-defined end label for pending cleanup subroutine,
124 pc is PENDING_CLEANUP_PC.
125 For other not-yet-defined labels, pc is UNDEFINED_PC.
126
127 If the label has been defined:
128 Until perform_relocations is finished, this is the maximum possible
129 value of the bytecode offset at the begnning of this block.
130 After perform_relocations, it is the actual offset (pc). */
131 int pc;
132
133 int linenumber;
134
135 /* After finish_jcf_block is called, the actual instructions
136 contained in this block. Before that NULL, and the instructions
137 are in state->bytecode. */
138 union {
139 struct chunk *chunk;
140
141 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
142 covered by the cleanup. */
143 struct jcf_block *start_label;
144 } v;
145
146 union {
147 /* Set of relocations (in reverse offset order) for this block. */
148 struct jcf_relocation *relocations;
149
150 /* If this block is that of the not-yet-defined end label of
151 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
152 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
153 tree labeled_block;
154 } u;
155 };
156
157 /* A "relocation" type for the 0-3 bytes of padding at the start
158 of a tableswitch or a lookupswitch. */
159 #define SWITCH_ALIGN_RELOC 4
160
161 /* A relocation type for the labels in a tableswitch or a lookupswitch;
162 these are relative to the start of the instruction, but (due to
163 th 0-3 bytes of padding), we don't know the offset before relocation. */
164 #define BLOCK_START_RELOC 1
165
166 struct jcf_relocation
167 {
168 /* Next relocation for the current jcf_block. */
169 struct jcf_relocation *next;
170
171 /* The (byte) offset within the current block that needs to be relocated. */
172 HOST_WIDE_INT offset;
173
174 /* 0 if offset is a 4-byte relative offset.
175 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
176 for proper alignment in tableswitch/lookupswitch instructions.
177 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
178 to the start of the containing block.
179 -1 if offset is a 2-byte relative offset.
180 < -1 if offset is the address of an instruction with a 2-byte offset
181 that does not have a corresponding 4-byte offset version, in which
182 case the absolute value of kind is the inverted opcode.
183 > 4 if offset is the address of an instruction (such as jsr) with a
184 2-byte offset that does have a corresponding 4-byte offset version,
185 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
186 int kind;
187
188 /* The label the relocation wants to actually transfer to. */
189 struct jcf_block *label;
190 };
191
192 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
193 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194
195 /* State for single catch clause. */
196
197 struct jcf_handler
198 {
199 struct jcf_handler *next;
200
201 struct jcf_block *start_label;
202 struct jcf_block *end_label;
203 struct jcf_block *handler_label;
204
205 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
206 tree type;
207 };
208
209 /* State for the current switch statement. */
210
211 struct jcf_switch_state
212 {
213 struct jcf_switch_state *prev;
214 struct jcf_block *default_label;
215
216 struct jcf_relocation *cases;
217 int num_cases;
218 HOST_WIDE_INT min_case, max_case;
219 };
220
221 /* This structure is used to contain the various pieces that will
222 become a .class file. */
223
224 struct jcf_partial
225 {
226 struct chunk *first;
227 struct chunk *chunk;
228 struct obstack *chunk_obstack;
229 tree current_method;
230
231 /* List of basic blocks for the current method. */
232 struct jcf_block *blocks;
233 struct jcf_block *last_block;
234
235 struct localvar_info *first_lvar;
236 struct localvar_info *last_lvar;
237 int lvar_count;
238
239 CPool cpool;
240
241 int linenumber_count;
242
243 /* Until perform_relocations, this is a upper bound on the number
244 of bytes (so far) in the instructions for the current method. */
245 int code_length;
246
247 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
248 struct jcf_block *labeled_blocks;
249
250 /* The current stack size (stack pointer) in the current method. */
251 int code_SP;
252
253 /* The largest extent of stack size (stack pointer) in the current method. */
254 int code_SP_max;
255
256 /* Contains a mapping from local var slot number to localvar_info. */
257 struct buffer localvars;
258
259 /* The buffer allocated for bytecode for the current jcf_block. */
260 struct buffer bytecode;
261
262 /* Chain of exception handlers for the current method. */
263 struct jcf_handler *handlers;
264
265 /* Last element in handlers chain. */
266 struct jcf_handler *last_handler;
267
268 /* Number of exception handlers for the current method. */
269 int num_handlers;
270
271 /* Number of finalizers we are currently nested within. */
272 int num_finalizers;
273
274 /* If non-NULL, use this for the return value. */
275 tree return_value_decl;
276
277 /* Information about the current switch statement. */
278 struct jcf_switch_state *sw_state;
279 };
280
281 static void generate_bytecode_insns PARAMS ((tree, int, struct jcf_partial *));
282 static struct chunk * alloc_chunk PARAMS ((struct chunk *, unsigned char *,
283 int, struct obstack *));
284 static unsigned char * append_chunk PARAMS ((unsigned char *, int,
285 struct jcf_partial *));
286 static void append_chunk_copy PARAMS ((unsigned char *, int,
287 struct jcf_partial *));
288 static struct jcf_block * gen_jcf_label PARAMS ((struct jcf_partial *));
289 static void finish_jcf_block PARAMS ((struct jcf_partial *));
290 static void define_jcf_label PARAMS ((struct jcf_block *,
291 struct jcf_partial *));
292 static struct jcf_block * get_jcf_label_here PARAMS ((struct jcf_partial *));
293 static void put_linenumber PARAMS ((int, struct jcf_partial *));
294 static void localvar_alloc PARAMS ((tree, struct jcf_partial *));
295 static void localvar_free PARAMS ((tree, struct jcf_partial *));
296 static int get_access_flags PARAMS ((tree));
297 static void write_chunks PARAMS ((FILE *, struct chunk *));
298 static int adjust_typed_op PARAMS ((tree, int));
299 static void generate_bytecode_conditional PARAMS ((tree, struct jcf_block *,
300 struct jcf_block *, int,
301 struct jcf_partial *));
302 static void generate_bytecode_return PARAMS ((tree, struct jcf_partial *));
303 static void perform_relocations PARAMS ((struct jcf_partial *));
304 static void init_jcf_state PARAMS ((struct jcf_partial *, struct obstack *));
305 static void init_jcf_method PARAMS ((struct jcf_partial *, tree));
306 static void release_jcf_state PARAMS ((struct jcf_partial *));
307 static struct chunk * generate_classfile PARAMS ((tree, struct jcf_partial *));
308 static struct jcf_handler *alloc_handler PARAMS ((struct jcf_block *,
309 struct jcf_block *,
310 struct jcf_partial *));
311 static void emit_iinc PARAMS ((tree, HOST_WIDE_INT, struct jcf_partial *));
312 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
313 struct jcf_partial *));
314 static void push_constant1 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
315 static void push_constant2 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
316 static void push_int_const PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
317 static int find_constant_wide PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
318 struct jcf_partial *));
319 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *));
321 static int find_constant_index PARAMS ((tree, struct jcf_partial *));
322 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
323 struct jcf_partial *));
324 static void field_op PARAMS ((tree, int, struct jcf_partial *));
325 static void maybe_wide PARAMS ((int, int, struct jcf_partial *));
326 static void emit_dup PARAMS ((int, int, struct jcf_partial *));
327 static void emit_pop PARAMS ((int, struct jcf_partial *));
328 static void emit_load_or_store PARAMS ((tree, int, struct jcf_partial *));
329 static void emit_load PARAMS ((tree, struct jcf_partial *));
330 static void emit_store PARAMS ((tree, struct jcf_partial *));
331 static void emit_unop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
332 static void emit_binop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
333 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
334 struct jcf_partial *));
335 static void emit_switch_reloc PARAMS ((struct jcf_block *,
336 struct jcf_partial *));
337 static void emit_case_reloc PARAMS ((struct jcf_relocation *,
338 struct jcf_partial *));
339 static void emit_if PARAMS ((struct jcf_block *, int, int,
340 struct jcf_partial *));
341 static void emit_goto PARAMS ((struct jcf_block *, struct jcf_partial *));
342 static void emit_jsr PARAMS ((struct jcf_block *, struct jcf_partial *));
343 static void call_cleanups PARAMS ((struct jcf_block *, struct jcf_partial *));
344 static char *make_class_file_name PARAMS ((tree));
345 static unsigned char *append_synthetic_attribute PARAMS ((struct jcf_partial *));
346 static void append_innerclasses_attribute PARAMS ((struct jcf_partial *, tree));
347 static void append_innerclasses_attribute_entry PARAMS ((struct jcf_partial *, tree, tree));
348 static void append_gcj_attribute PARAMS ((struct jcf_partial *, tree));
349
350 /* Utility macros for appending (big-endian) data to a buffer.
351 We assume a local variable 'ptr' points into where we want to
352 write next, and we assume enough space has been allocated. */
353
354 #ifdef ENABLE_JC1_CHECKING
355 static int CHECK_PUT PARAMS ((void *, struct jcf_partial *, int));
356
357 static int
358 CHECK_PUT (ptr, state, i)
359 void *ptr;
360 struct jcf_partial *state;
361 int i;
362 {
363 if ((unsigned char *) ptr < state->chunk->data
364 || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
365 abort ();
366
367 return 0;
368 }
369 #else
370 #define CHECK_PUT(PTR, STATE, I) ((void)0)
371 #endif
372
373 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
374 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
375 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
376 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
377
378 /* There are some cases below where CHECK_PUT is guaranteed to fail.
379 Use the following macros in those specific cases. */
380 #define UNSAFE_PUT1(X) (*ptr++ = (X))
381 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
382 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
383 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
384
385 \f
386 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
387 Set the data and size fields to DATA and SIZE, respectively.
388 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
389
390 static struct chunk *
391 alloc_chunk (last, data, size, work)
392 struct chunk *last;
393 unsigned char *data;
394 int size;
395 struct obstack *work;
396 {
397 struct chunk *chunk = (struct chunk *)
398 obstack_alloc (work, sizeof(struct chunk));
399
400 if (data == NULL && size > 0)
401 data = obstack_alloc (work, size);
402
403 chunk->next = NULL;
404 chunk->data = data;
405 chunk->size = size;
406 if (last != NULL)
407 last->next = chunk;
408 return chunk;
409 }
410
411 #ifdef ENABLE_JC1_CHECKING
412 static int CHECK_OP PARAMS ((struct jcf_partial *));
413
414 static int
415 CHECK_OP (state)
416 struct jcf_partial *state;
417 {
418 if (state->bytecode.ptr > state->bytecode.limit)
419 abort ();
420
421 return 0;
422 }
423 #else
424 #define CHECK_OP(STATE) ((void) 0)
425 #endif
426
427 static unsigned char *
428 append_chunk (data, size, state)
429 unsigned char *data;
430 int size;
431 struct jcf_partial *state;
432 {
433 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
434 if (state->first == NULL)
435 state->first = state->chunk;
436 return state->chunk->data;
437 }
438
439 static void
440 append_chunk_copy (data, size, state)
441 unsigned char *data;
442 int size;
443 struct jcf_partial *state;
444 {
445 unsigned char *ptr = append_chunk (NULL, size, state);
446 memcpy (ptr, data, size);
447 }
448 \f
449 static struct jcf_block *
450 gen_jcf_label (state)
451 struct jcf_partial *state;
452 {
453 struct jcf_block *block = (struct jcf_block *)
454 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
455 block->next = NULL;
456 block->linenumber = -1;
457 block->pc = UNDEFINED_PC;
458 return block;
459 }
460
461 static void
462 finish_jcf_block (state)
463 struct jcf_partial *state;
464 {
465 struct jcf_block *block = state->last_block;
466 struct jcf_relocation *reloc;
467 int code_length = BUFFER_LENGTH (&state->bytecode);
468 int pc = state->code_length;
469 append_chunk_copy (state->bytecode.data, code_length, state);
470 BUFFER_RESET (&state->bytecode);
471 block->v.chunk = state->chunk;
472
473 /* Calculate code_length to the maximum value it can have. */
474 pc += block->v.chunk->size;
475 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
476 {
477 int kind = reloc->kind;
478 if (kind == SWITCH_ALIGN_RELOC)
479 pc += 3;
480 else if (kind > BLOCK_START_RELOC)
481 pc += 2; /* 2-byte offset may grow to 4-byte offset */
482 else if (kind < -1)
483 pc += 5; /* May need to add a goto_w. */
484 }
485 state->code_length = pc;
486 }
487
488 static void
489 define_jcf_label (label, state)
490 struct jcf_block *label;
491 struct jcf_partial *state;
492 {
493 if (state->last_block != NULL)
494 finish_jcf_block (state);
495 label->pc = state->code_length;
496 if (state->blocks == NULL)
497 state->blocks = label;
498 else
499 state->last_block->next = label;
500 state->last_block = label;
501 label->next = NULL;
502 label->u.relocations = NULL;
503 }
504
505 static struct jcf_block *
506 get_jcf_label_here (state)
507 struct jcf_partial *state;
508 {
509 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
510 return state->last_block;
511 else
512 {
513 struct jcf_block *label = gen_jcf_label (state);
514 define_jcf_label (label, state);
515 return label;
516 }
517 }
518
519 /* Note a line number entry for the current PC and given LINE. */
520
521 static void
522 put_linenumber (line, state)
523 int line;
524 struct jcf_partial *state;
525 {
526 struct jcf_block *label = get_jcf_label_here (state);
527 if (label->linenumber > 0)
528 {
529 label = gen_jcf_label (state);
530 define_jcf_label (label, state);
531 }
532 label->linenumber = line;
533 state->linenumber_count++;
534 }
535
536 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
537 in the range (START_LABEL, END_LABEL). */
538
539 static struct jcf_handler *
540 alloc_handler (start_label, end_label, state)
541 struct jcf_block *start_label;
542 struct jcf_block *end_label;
543 struct jcf_partial *state;
544 {
545 struct jcf_handler *handler = (struct jcf_handler *)
546 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
547 handler->start_label = start_label;
548 handler->end_label = end_label;
549 handler->handler_label = get_jcf_label_here (state);
550 if (state->handlers == NULL)
551 state->handlers = handler;
552 else
553 state->last_handler->next = handler;
554 state->last_handler = handler;
555 handler->next = NULL;
556 state->num_handlers++;
557 return handler;
558 }
559
560 \f
561 /* The index of jvm local variable allocated for this DECL.
562 This is assigned when generating .class files;
563 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
564 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
565
566 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
567
568 struct localvar_info
569 {
570 struct localvar_info *next;
571
572 tree decl;
573 struct jcf_block *start_label;
574 struct jcf_block *end_label;
575 };
576
577 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
578 #define localvar_max \
579 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
580
581 static void
582 localvar_alloc (decl, state)
583 tree decl;
584 struct jcf_partial *state;
585 {
586 struct jcf_block *start_label = get_jcf_label_here (state);
587 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
588 int index;
589 register struct localvar_info *info;
590 register struct localvar_info **ptr = localvar_buffer;
591 register struct localvar_info **limit
592 = (struct localvar_info**) state->localvars.ptr;
593 for (index = 0; ptr < limit; index++, ptr++)
594 {
595 if (ptr[0] == NULL
596 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
597 break;
598 }
599 if (ptr == limit)
600 {
601 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
602 ptr = (struct localvar_info**) state->localvars.data + index;
603 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
604 }
605 info = (struct localvar_info *)
606 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
607 ptr[0] = info;
608 if (wide)
609 ptr[1] = (struct localvar_info *)(~0);
610 DECL_LOCAL_INDEX (decl) = index;
611 info->decl = decl;
612 info->start_label = start_label;
613
614 if (debug_info_level > DINFO_LEVEL_TERSE
615 && DECL_NAME (decl) != NULL_TREE)
616 {
617 /* Generate debugging info. */
618 info->next = NULL;
619 if (state->last_lvar != NULL)
620 state->last_lvar->next = info;
621 else
622 state->first_lvar = info;
623 state->last_lvar = info;
624 state->lvar_count++;
625 }
626 }
627
628 static void
629 localvar_free (decl, state)
630 tree decl;
631 struct jcf_partial *state;
632 {
633 struct jcf_block *end_label = get_jcf_label_here (state);
634 int index = DECL_LOCAL_INDEX (decl);
635 register struct localvar_info **ptr = &localvar_buffer [index];
636 register struct localvar_info *info = *ptr;
637 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
638
639 info->end_label = end_label;
640
641 if (info->decl != decl)
642 abort ();
643 ptr[0] = NULL;
644 if (wide)
645 {
646 if (ptr[1] != (struct localvar_info *)(~0))
647 abort ();
648 ptr[1] = NULL;
649 }
650 }
651
652 \f
653 #define STACK_TARGET 1
654 #define IGNORE_TARGET 2
655
656 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
657 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
658
659 static int
660 get_access_flags (decl)
661 tree decl;
662 {
663 int flags = 0;
664 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
665 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
666 flags |= ACC_PUBLIC;
667 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
668 flags |= ACC_FINAL;
669 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
670 {
671 if (TREE_PROTECTED (decl))
672 flags |= ACC_PROTECTED;
673 if (TREE_PRIVATE (decl))
674 flags |= ACC_PRIVATE;
675 }
676 else if (TREE_CODE (decl) == TYPE_DECL)
677 {
678 if (CLASS_SUPER (decl))
679 flags |= ACC_SUPER;
680 if (CLASS_ABSTRACT (decl))
681 flags |= ACC_ABSTRACT;
682 if (CLASS_INTERFACE (decl))
683 flags |= ACC_INTERFACE;
684 if (CLASS_STATIC (decl))
685 flags |= ACC_STATIC;
686 if (CLASS_PRIVATE (decl))
687 flags |= ACC_PRIVATE;
688 if (CLASS_PROTECTED (decl))
689 flags |= ACC_PROTECTED;
690 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
691 || LOCAL_CLASS_P (TREE_TYPE (decl)))
692 flags |= ACC_PRIVATE;
693 if (CLASS_STRICTFP (decl))
694 flags |= ACC_STRICT;
695 }
696 else
697 abort ();
698
699 if (TREE_CODE (decl) == FUNCTION_DECL)
700 {
701 if (METHOD_NATIVE (decl))
702 flags |= ACC_NATIVE;
703 if (METHOD_STATIC (decl))
704 flags |= ACC_STATIC;
705 if (METHOD_SYNCHRONIZED (decl))
706 flags |= ACC_SYNCHRONIZED;
707 if (METHOD_ABSTRACT (decl))
708 flags |= ACC_ABSTRACT;
709 if (METHOD_STRICTFP (decl))
710 flags |= ACC_STRICT;
711 }
712 if (isfield)
713 {
714 if (FIELD_STATIC (decl))
715 flags |= ACC_STATIC;
716 if (FIELD_VOLATILE (decl))
717 flags |= ACC_VOLATILE;
718 if (FIELD_TRANSIENT (decl))
719 flags |= ACC_TRANSIENT;
720 }
721 return flags;
722 }
723
724 /* Write the list of segments starting at CHUNKS to STREAM. */
725
726 static void
727 write_chunks (stream, chunks)
728 FILE* stream;
729 struct chunk *chunks;
730 {
731 for (; chunks != NULL; chunks = chunks->next)
732 fwrite (chunks->data, chunks->size, 1, stream);
733 }
734
735 /* Push a 1-word constant in the constant pool at the given INDEX.
736 (Caller is responsible for doing NOTE_PUSH.) */
737
738 static void
739 push_constant1 (index, state)
740 HOST_WIDE_INT index;
741 struct jcf_partial *state;
742 {
743 RESERVE (3);
744 if (index < 256)
745 {
746 OP1 (OPCODE_ldc);
747 OP1 (index);
748 }
749 else
750 {
751 OP1 (OPCODE_ldc_w);
752 OP2 (index);
753 }
754 }
755
756 /* Push a 2-word constant in the constant pool at the given INDEX.
757 (Caller is responsible for doing NOTE_PUSH.) */
758
759 static void
760 push_constant2 (index, state)
761 HOST_WIDE_INT index;
762 struct jcf_partial *state;
763 {
764 RESERVE (3);
765 OP1 (OPCODE_ldc2_w);
766 OP2 (index);
767 }
768
769 /* Push 32-bit integer constant on VM stack.
770 Caller is responsible for doing NOTE_PUSH. */
771
772 static void
773 push_int_const (i, state)
774 HOST_WIDE_INT i;
775 struct jcf_partial *state;
776 {
777 RESERVE(3);
778 if (i >= -1 && i <= 5)
779 OP1(OPCODE_iconst_0 + i);
780 else if (i >= -128 && i < 128)
781 {
782 OP1(OPCODE_bipush);
783 OP1(i);
784 }
785 else if (i >= -32768 && i < 32768)
786 {
787 OP1(OPCODE_sipush);
788 OP2(i);
789 }
790 else
791 {
792 i = find_constant1 (&state->cpool, CONSTANT_Integer,
793 (jword)(i & 0xFFFFFFFF));
794 push_constant1 (i, state);
795 }
796 }
797
798 static int
799 find_constant_wide (lo, hi, state)
800 HOST_WIDE_INT lo, hi;
801 struct jcf_partial *state;
802 {
803 HOST_WIDE_INT w1, w2;
804 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
805 return find_constant2 (&state->cpool, CONSTANT_Long,
806 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
807 }
808
809 /* Find or allocate a constant pool entry for the given VALUE.
810 Return the index in the constant pool. */
811
812 static int
813 find_constant_index (value, state)
814 tree value;
815 struct jcf_partial *state;
816 {
817 if (TREE_CODE (value) == INTEGER_CST)
818 {
819 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
820 return find_constant1 (&state->cpool, CONSTANT_Integer,
821 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
822 else
823 return find_constant_wide (TREE_INT_CST_LOW (value),
824 TREE_INT_CST_HIGH (value), state);
825 }
826 else if (TREE_CODE (value) == REAL_CST)
827 {
828 long words[2];
829
830 real_to_target (words, &TREE_REAL_CST (value),
831 TYPE_MODE (TREE_TYPE (value)));
832 words[0] &= 0xffffffff;
833 words[1] &= 0xffffffff;
834
835 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
836 return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
837 else
838 return find_constant2 (&state->cpool, CONSTANT_Double,
839 (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
840 (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
841 }
842 else if (TREE_CODE (value) == STRING_CST)
843 return find_string_constant (&state->cpool, value);
844
845 else
846 abort ();
847 }
848
849 /* Push 64-bit long constant on VM stack.
850 Caller is responsible for doing NOTE_PUSH. */
851
852 static void
853 push_long_const (lo, hi, state)
854 HOST_WIDE_INT lo, hi;
855 struct jcf_partial *state;
856 {
857 HOST_WIDE_INT highpart, dummy;
858 jint lowpart = WORD_TO_INT (lo);
859
860 rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
861
862 if (highpart == 0 && (lowpart == 0 || lowpart == 1))
863 {
864 RESERVE(1);
865 OP1(OPCODE_lconst_0 + lowpart);
866 }
867 else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
868 || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
869 {
870 push_int_const (lowpart, state);
871 RESERVE (1);
872 OP1 (OPCODE_i2l);
873 }
874 else
875 push_constant2 (find_constant_wide (lo, hi, state), state);
876 }
877
878 static void
879 field_op (field, opcode, state)
880 tree field;
881 int opcode;
882 struct jcf_partial *state;
883 {
884 int index = find_fieldref_index (&state->cpool, field);
885 RESERVE (3);
886 OP1 (opcode);
887 OP2 (index);
888 }
889
890 /* Returns an integer in the range 0 (for 'int') through 4 (for object
891 reference) to 7 (for 'short') which matches the pattern of how JVM
892 opcodes typically depend on the operand type. */
893
894 static int
895 adjust_typed_op (type, max)
896 tree type;
897 int max;
898 {
899 switch (TREE_CODE (type))
900 {
901 case POINTER_TYPE:
902 case RECORD_TYPE: return 4;
903 case BOOLEAN_TYPE:
904 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
905 case CHAR_TYPE:
906 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
907 case INTEGER_TYPE:
908 switch (TYPE_PRECISION (type))
909 {
910 case 8: return max < 5 ? 0 : 5;
911 case 16: return max < 7 ? 0 : 7;
912 case 32: return 0;
913 case 64: return 1;
914 }
915 break;
916 case REAL_TYPE:
917 switch (TYPE_PRECISION (type))
918 {
919 case 32: return 2;
920 case 64: return 3;
921 }
922 break;
923 default:
924 break;
925 }
926 abort ();
927 }
928
929 static void
930 maybe_wide (opcode, index, state)
931 int opcode, index;
932 struct jcf_partial *state;
933 {
934 if (index >= 256)
935 {
936 RESERVE (4);
937 OP1 (OPCODE_wide);
938 OP1 (opcode);
939 OP2 (index);
940 }
941 else
942 {
943 RESERVE (2);
944 OP1 (opcode);
945 OP1 (index);
946 }
947 }
948
949 /* Compile code to duplicate with offset, where
950 SIZE is the size of the stack item to duplicate (1 or 2), abd
951 OFFSET is where to insert the result (must be 0, 1, or 2).
952 (The new words get inserted at stack[SP-size-offset].) */
953
954 static void
955 emit_dup (size, offset, state)
956 int size, offset;
957 struct jcf_partial *state;
958 {
959 int kind;
960 if (size == 0)
961 return;
962 RESERVE(1);
963 if (offset == 0)
964 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
965 else if (offset == 1)
966 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
967 else if (offset == 2)
968 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
969 else
970 abort();
971 OP1 (kind);
972 NOTE_PUSH (size);
973 }
974
975 static void
976 emit_pop (size, state)
977 int size;
978 struct jcf_partial *state;
979 {
980 RESERVE (1);
981 OP1 (OPCODE_pop - 1 + size);
982 }
983
984 static void
985 emit_iinc (var, value, state)
986 tree var;
987 HOST_WIDE_INT value;
988 struct jcf_partial *state;
989 {
990 int slot = DECL_LOCAL_INDEX (var);
991
992 if (value < -128 || value > 127 || slot >= 256)
993 {
994 RESERVE (6);
995 OP1 (OPCODE_wide);
996 OP1 (OPCODE_iinc);
997 OP2 (slot);
998 OP2 (value);
999 }
1000 else
1001 {
1002 RESERVE (3);
1003 OP1 (OPCODE_iinc);
1004 OP1 (slot);
1005 OP1 (value);
1006 }
1007 }
1008
1009 static void
1010 emit_load_or_store (var, opcode, state)
1011 tree var; /* Variable to load from or store into. */
1012 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
1013 struct jcf_partial *state;
1014 {
1015 tree type = TREE_TYPE (var);
1016 int kind = adjust_typed_op (type, 4);
1017 int index = DECL_LOCAL_INDEX (var);
1018 if (index <= 3)
1019 {
1020 RESERVE (1);
1021 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
1022 }
1023 else
1024 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
1025 }
1026
1027 static void
1028 emit_load (var, state)
1029 tree var;
1030 struct jcf_partial *state;
1031 {
1032 emit_load_or_store (var, OPCODE_iload, state);
1033 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1034 }
1035
1036 static void
1037 emit_store (var, state)
1038 tree var;
1039 struct jcf_partial *state;
1040 {
1041 emit_load_or_store (var, OPCODE_istore, state);
1042 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1043 }
1044
1045 static void
1046 emit_unop (opcode, type, state)
1047 enum java_opcode opcode;
1048 tree type ATTRIBUTE_UNUSED;
1049 struct jcf_partial *state;
1050 {
1051 RESERVE(1);
1052 OP1 (opcode);
1053 }
1054
1055 static void
1056 emit_binop (opcode, type, state)
1057 enum java_opcode opcode;
1058 tree type;
1059 struct jcf_partial *state;
1060 {
1061 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1062 RESERVE(1);
1063 OP1 (opcode);
1064 NOTE_POP (size);
1065 }
1066
1067 static void
1068 emit_reloc (value, kind, target, state)
1069 HOST_WIDE_INT value;
1070 int kind;
1071 struct jcf_block *target;
1072 struct jcf_partial *state;
1073 {
1074 struct jcf_relocation *reloc = (struct jcf_relocation *)
1075 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1076 struct jcf_block *block = state->last_block;
1077 reloc->next = block->u.relocations;
1078 block->u.relocations = reloc;
1079 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1080 reloc->label = target;
1081 reloc->kind = kind;
1082 if (kind == 0 || kind == BLOCK_START_RELOC)
1083 OP4 (value);
1084 else if (kind != SWITCH_ALIGN_RELOC)
1085 OP2 (value);
1086 }
1087
1088 static void
1089 emit_switch_reloc (label, state)
1090 struct jcf_block *label;
1091 struct jcf_partial *state;
1092 {
1093 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1094 }
1095
1096 /* Similar to emit_switch_reloc,
1097 but re-uses an existing case reloc. */
1098
1099 static void
1100 emit_case_reloc (reloc, state)
1101 struct jcf_relocation *reloc;
1102 struct jcf_partial *state;
1103 {
1104 struct jcf_block *block = state->last_block;
1105 reloc->next = block->u.relocations;
1106 block->u.relocations = reloc;
1107 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1108 reloc->kind = BLOCK_START_RELOC;
1109 OP4 (0);
1110 }
1111
1112 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1113 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1114
1115 static void
1116 emit_if (target, opcode, inv_opcode, state)
1117 struct jcf_block *target;
1118 int opcode, inv_opcode;
1119 struct jcf_partial *state;
1120 {
1121 RESERVE(3);
1122 OP1 (opcode);
1123 /* value is 1 byte from reloc back to start of instruction. */
1124 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1125 }
1126
1127 static void
1128 emit_goto (target, state)
1129 struct jcf_block *target;
1130 struct jcf_partial *state;
1131 {
1132 RESERVE(3);
1133 OP1 (OPCODE_goto);
1134 /* Value is 1 byte from reloc back to start of instruction. */
1135 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1136 }
1137
1138 static void
1139 emit_jsr (target, state)
1140 struct jcf_block *target;
1141 struct jcf_partial *state;
1142 {
1143 RESERVE(3);
1144 OP1 (OPCODE_jsr);
1145 /* Value is 1 byte from reloc back to start of instruction. */
1146 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1147 }
1148
1149 /* Generate code to evaluate EXP. If the result is true,
1150 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1151 TRUE_BRANCH_FIRST is a code geneation hint that the
1152 TRUE_LABEL may follow right after this. (The idea is that we
1153 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1154
1155 static void
1156 generate_bytecode_conditional (exp, true_label, false_label,
1157 true_branch_first, state)
1158 tree exp;
1159 struct jcf_block *true_label;
1160 struct jcf_block *false_label;
1161 int true_branch_first;
1162 struct jcf_partial *state;
1163 {
1164 tree exp0, exp1, type;
1165 int save_SP = state->code_SP;
1166 enum java_opcode op, negop;
1167 switch (TREE_CODE (exp))
1168 {
1169 case INTEGER_CST:
1170 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1171 break;
1172 case COND_EXPR:
1173 {
1174 struct jcf_block *then_label = gen_jcf_label (state);
1175 struct jcf_block *else_label = gen_jcf_label (state);
1176 int save_SP_before, save_SP_after;
1177 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1178 then_label, else_label, 1, state);
1179 define_jcf_label (then_label, state);
1180 save_SP_before = state->code_SP;
1181 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1182 true_label, false_label, 1, state);
1183 save_SP_after = state->code_SP;
1184 state->code_SP = save_SP_before;
1185 define_jcf_label (else_label, state);
1186 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1187 true_label, false_label,
1188 true_branch_first, state);
1189 if (state->code_SP != save_SP_after)
1190 abort ();
1191 }
1192 break;
1193 case TRUTH_NOT_EXPR:
1194 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1195 true_label, ! true_branch_first, state);
1196 break;
1197 case TRUTH_ANDIF_EXPR:
1198 {
1199 struct jcf_block *next_label = gen_jcf_label (state);
1200 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1201 next_label, false_label, 1, state);
1202 define_jcf_label (next_label, state);
1203 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1204 true_label, false_label, 1, state);
1205 }
1206 break;
1207 case TRUTH_ORIF_EXPR:
1208 {
1209 struct jcf_block *next_label = gen_jcf_label (state);
1210 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1211 true_label, next_label, 1, state);
1212 define_jcf_label (next_label, state);
1213 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1214 true_label, false_label, 1, state);
1215 }
1216 break;
1217 compare_1:
1218 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1219 set it to the corresponding 1-operand if<COND> instructions. */
1220 op = op - 6;
1221 /* FALLTHROUGH */
1222 compare_2:
1223 /* The opcodes with their inverses are allocated in pairs.
1224 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1225 negop = (op & 1) ? op + 1 : op - 1;
1226 compare_2_ptr:
1227 if (true_branch_first)
1228 {
1229 emit_if (false_label, negop, op, state);
1230 emit_goto (true_label, state);
1231 }
1232 else
1233 {
1234 emit_if (true_label, op, negop, state);
1235 emit_goto (false_label, state);
1236 }
1237 break;
1238 case EQ_EXPR:
1239 op = OPCODE_if_icmpeq;
1240 goto compare;
1241 case NE_EXPR:
1242 op = OPCODE_if_icmpne;
1243 goto compare;
1244 case GT_EXPR:
1245 op = OPCODE_if_icmpgt;
1246 goto compare;
1247 case LT_EXPR:
1248 op = OPCODE_if_icmplt;
1249 goto compare;
1250 case GE_EXPR:
1251 op = OPCODE_if_icmpge;
1252 goto compare;
1253 case LE_EXPR:
1254 op = OPCODE_if_icmple;
1255 goto compare;
1256 compare:
1257 exp0 = TREE_OPERAND (exp, 0);
1258 exp1 = TREE_OPERAND (exp, 1);
1259 type = TREE_TYPE (exp0);
1260 switch (TREE_CODE (type))
1261 {
1262 int opf;
1263 case POINTER_TYPE: case RECORD_TYPE:
1264 switch (TREE_CODE (exp))
1265 {
1266 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1267 case NE_EXPR: op = OPCODE_if_acmpne; break;
1268 default: abort();
1269 }
1270 if (integer_zerop (exp1) || integer_zerop (exp0))
1271 {
1272 generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1273 STACK_TARGET, state);
1274 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1275 negop = (op & 1) ? op - 1 : op + 1;
1276 NOTE_POP (1);
1277 goto compare_2_ptr;
1278 }
1279 generate_bytecode_insns (exp0, STACK_TARGET, state);
1280 generate_bytecode_insns (exp1, STACK_TARGET, state);
1281 NOTE_POP (2);
1282 goto compare_2;
1283 case REAL_TYPE:
1284 generate_bytecode_insns (exp0, STACK_TARGET, state);
1285 generate_bytecode_insns (exp1, STACK_TARGET, state);
1286 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1287 opf = OPCODE_fcmpg;
1288 else
1289 opf = OPCODE_fcmpl;
1290 if (TYPE_PRECISION (type) > 32)
1291 {
1292 opf += 2;
1293 NOTE_POP (4);
1294 }
1295 else
1296 NOTE_POP (2);
1297 RESERVE (1);
1298 OP1 (opf);
1299 goto compare_1;
1300 case INTEGER_TYPE:
1301 if (TYPE_PRECISION (type) > 32)
1302 {
1303 generate_bytecode_insns (exp0, STACK_TARGET, state);
1304 generate_bytecode_insns (exp1, STACK_TARGET, state);
1305 NOTE_POP (4);
1306 RESERVE (1);
1307 OP1 (OPCODE_lcmp);
1308 goto compare_1;
1309 }
1310 /* FALLTHOUGH */
1311 default:
1312 if (integer_zerop (exp1))
1313 {
1314 generate_bytecode_insns (exp0, STACK_TARGET, state);
1315 NOTE_POP (1);
1316 goto compare_1;
1317 }
1318 if (integer_zerop (exp0))
1319 {
1320 switch (op)
1321 {
1322 case OPCODE_if_icmplt:
1323 case OPCODE_if_icmpge:
1324 op += 2;
1325 break;
1326 case OPCODE_if_icmpgt:
1327 case OPCODE_if_icmple:
1328 op -= 2;
1329 break;
1330 default:
1331 break;
1332 }
1333 generate_bytecode_insns (exp1, STACK_TARGET, state);
1334 NOTE_POP (1);
1335 goto compare_1;
1336 }
1337 generate_bytecode_insns (exp0, STACK_TARGET, state);
1338 generate_bytecode_insns (exp1, STACK_TARGET, state);
1339 NOTE_POP (2);
1340 goto compare_2;
1341 }
1342
1343 default:
1344 generate_bytecode_insns (exp, STACK_TARGET, state);
1345 NOTE_POP (1);
1346 if (true_branch_first)
1347 {
1348 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1349 emit_goto (true_label, state);
1350 }
1351 else
1352 {
1353 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1354 emit_goto (false_label, state);
1355 }
1356 break;
1357 }
1358 if (save_SP != state->code_SP)
1359 abort ();
1360 }
1361
1362 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1363 but only as far out as LIMIT (since we are about to jump to the
1364 emit label that is LIMIT). */
1365
1366 static void
1367 call_cleanups (limit, state)
1368 struct jcf_block *limit;
1369 struct jcf_partial *state;
1370 {
1371 struct jcf_block *block = state->labeled_blocks;
1372 for (; block != limit; block = block->next)
1373 {
1374 if (block->pc == PENDING_CLEANUP_PC)
1375 emit_jsr (block, state);
1376 }
1377 }
1378
1379 static void
1380 generate_bytecode_return (exp, state)
1381 tree exp;
1382 struct jcf_partial *state;
1383 {
1384 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1385 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1386 int op;
1387 again:
1388 if (exp != NULL)
1389 {
1390 switch (TREE_CODE (exp))
1391 {
1392 case COMPOUND_EXPR:
1393 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1394 state);
1395 exp = TREE_OPERAND (exp, 1);
1396 goto again;
1397 case COND_EXPR:
1398 {
1399 struct jcf_block *then_label = gen_jcf_label (state);
1400 struct jcf_block *else_label = gen_jcf_label (state);
1401 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1402 then_label, else_label, 1, state);
1403 define_jcf_label (then_label, state);
1404 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1405 define_jcf_label (else_label, state);
1406 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1407 }
1408 return;
1409 default:
1410 generate_bytecode_insns (exp,
1411 returns_void ? IGNORE_TARGET
1412 : STACK_TARGET, state);
1413 }
1414 }
1415 if (returns_void)
1416 {
1417 op = OPCODE_return;
1418 call_cleanups (NULL, state);
1419 }
1420 else
1421 {
1422 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1423 if (state->num_finalizers > 0)
1424 {
1425 if (state->return_value_decl == NULL_TREE)
1426 {
1427 state->return_value_decl
1428 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1429 localvar_alloc (state->return_value_decl, state);
1430 }
1431 emit_store (state->return_value_decl, state);
1432 call_cleanups (NULL, state);
1433 emit_load (state->return_value_decl, state);
1434 /* If we call localvar_free (state->return_value_decl, state),
1435 then we risk the save decl erroneously re-used in the
1436 finalizer. Instead, we keep the state->return_value_decl
1437 allocated through the rest of the method. This is not
1438 the greatest solution, but it is at least simple and safe. */
1439 }
1440 }
1441 RESERVE (1);
1442 OP1 (op);
1443 }
1444
1445 /* Generate bytecode for sub-expression EXP of METHOD.
1446 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1447
1448 static void
1449 generate_bytecode_insns (exp, target, state)
1450 tree exp;
1451 int target;
1452 struct jcf_partial *state;
1453 {
1454 tree type, arg;
1455 enum java_opcode jopcode;
1456 int op;
1457 HOST_WIDE_INT value;
1458 int post_op;
1459 int size;
1460 int offset;
1461
1462 if (exp == NULL && target == IGNORE_TARGET)
1463 return;
1464
1465 type = TREE_TYPE (exp);
1466
1467 switch (TREE_CODE (exp))
1468 {
1469 case BLOCK:
1470 if (BLOCK_EXPR_BODY (exp))
1471 {
1472 tree local;
1473 tree body = BLOCK_EXPR_BODY (exp);
1474 for (local = BLOCK_EXPR_DECLS (exp); local; )
1475 {
1476 tree next = TREE_CHAIN (local);
1477 localvar_alloc (local, state);
1478 local = next;
1479 }
1480 /* Avoid deep recursion for long blocks. */
1481 while (TREE_CODE (body) == COMPOUND_EXPR)
1482 {
1483 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1484 body = TREE_OPERAND (body, 1);
1485 }
1486 generate_bytecode_insns (body, target, state);
1487 for (local = BLOCK_EXPR_DECLS (exp); local; )
1488 {
1489 tree next = TREE_CHAIN (local);
1490 localvar_free (local, state);
1491 local = next;
1492 }
1493 }
1494 break;
1495 case COMPOUND_EXPR:
1496 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1497 /* Normally the first operand to a COMPOUND_EXPR must complete
1498 normally. However, in the special case of a do-while
1499 statement this is not necessarily the case. */
1500 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1501 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1502 break;
1503 case EXPR_WITH_FILE_LOCATION:
1504 {
1505 const char *saved_input_filename = input_filename;
1506 tree body = EXPR_WFL_NODE (exp);
1507 int saved_lineno = lineno;
1508 if (body == empty_stmt_node)
1509 break;
1510 input_filename = EXPR_WFL_FILENAME (exp);
1511 lineno = EXPR_WFL_LINENO (exp);
1512 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1513 && debug_info_level > DINFO_LEVEL_NONE)
1514 put_linenumber (lineno, state);
1515 generate_bytecode_insns (body, target, state);
1516 input_filename = saved_input_filename;
1517 lineno = saved_lineno;
1518 }
1519 break;
1520 case INTEGER_CST:
1521 if (target == IGNORE_TARGET) ; /* do nothing */
1522 else if (TREE_CODE (type) == POINTER_TYPE)
1523 {
1524 if (! integer_zerop (exp))
1525 abort();
1526 RESERVE(1);
1527 OP1 (OPCODE_aconst_null);
1528 NOTE_PUSH (1);
1529 }
1530 else if (TYPE_PRECISION (type) <= 32)
1531 {
1532 push_int_const (TREE_INT_CST_LOW (exp), state);
1533 NOTE_PUSH (1);
1534 }
1535 else
1536 {
1537 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1538 state);
1539 NOTE_PUSH (2);
1540 }
1541 break;
1542 case REAL_CST:
1543 {
1544 int prec = TYPE_PRECISION (type) >> 5;
1545 RESERVE(1);
1546 if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1547 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1548 else if (real_onep (exp))
1549 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1550 /* FIXME Should also use fconst_2 for 2.0f.
1551 Also, should use iconst_2/ldc followed by i2f/i2d
1552 for other float/double when the value is a small integer. */
1553 else
1554 {
1555 offset = find_constant_index (exp, state);
1556 if (prec == 1)
1557 push_constant1 (offset, state);
1558 else
1559 push_constant2 (offset, state);
1560 }
1561 NOTE_PUSH (prec);
1562 }
1563 break;
1564 case STRING_CST:
1565 push_constant1 (find_string_constant (&state->cpool, exp), state);
1566 NOTE_PUSH (1);
1567 break;
1568 case VAR_DECL:
1569 if (TREE_STATIC (exp))
1570 {
1571 field_op (exp, OPCODE_getstatic, state);
1572 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1573 break;
1574 }
1575 /* ... fall through ... */
1576 case PARM_DECL:
1577 emit_load (exp, state);
1578 break;
1579 case NON_LVALUE_EXPR:
1580 case INDIRECT_REF:
1581 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1582 break;
1583 case ARRAY_REF:
1584 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1585 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1586 if (target != IGNORE_TARGET)
1587 {
1588 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1589 RESERVE(1);
1590 OP1 (jopcode);
1591 if (! TYPE_IS_WIDE (type))
1592 NOTE_POP (1);
1593 }
1594 break;
1595 case COMPONENT_REF:
1596 {
1597 tree obj = TREE_OPERAND (exp, 0);
1598 tree field = TREE_OPERAND (exp, 1);
1599 int is_static = FIELD_STATIC (field);
1600 generate_bytecode_insns (obj,
1601 is_static ? IGNORE_TARGET : target, state);
1602 if (target != IGNORE_TARGET)
1603 {
1604 if (DECL_NAME (field) == length_identifier_node && !is_static
1605 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1606 {
1607 RESERVE (1);
1608 OP1 (OPCODE_arraylength);
1609 }
1610 else
1611 {
1612 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1613 state);
1614 if (! is_static)
1615 NOTE_POP (1);
1616 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1617 }
1618 }
1619 }
1620 break;
1621 case TRUTH_ANDIF_EXPR:
1622 case TRUTH_ORIF_EXPR:
1623 case EQ_EXPR:
1624 case NE_EXPR:
1625 case GT_EXPR:
1626 case LT_EXPR:
1627 case GE_EXPR:
1628 case LE_EXPR:
1629 {
1630 struct jcf_block *then_label = gen_jcf_label (state);
1631 struct jcf_block *else_label = gen_jcf_label (state);
1632 struct jcf_block *end_label = gen_jcf_label (state);
1633 generate_bytecode_conditional (exp,
1634 then_label, else_label, 1, state);
1635 define_jcf_label (then_label, state);
1636 push_int_const (1, state);
1637 emit_goto (end_label, state);
1638 define_jcf_label (else_label, state);
1639 push_int_const (0, state);
1640 define_jcf_label (end_label, state);
1641 NOTE_PUSH (1);
1642 }
1643 break;
1644 case COND_EXPR:
1645 {
1646 struct jcf_block *then_label = gen_jcf_label (state);
1647 struct jcf_block *else_label = gen_jcf_label (state);
1648 struct jcf_block *end_label = gen_jcf_label (state);
1649 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1650 then_label, else_label, 1, state);
1651 define_jcf_label (then_label, state);
1652 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1653 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1654 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1655 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1656 emit_goto (end_label, state);
1657 define_jcf_label (else_label, state);
1658 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1659 define_jcf_label (end_label, state);
1660 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1661 if (TREE_TYPE (exp) != void_type_node)
1662 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1663 }
1664 break;
1665 case CASE_EXPR:
1666 {
1667 struct jcf_switch_state *sw_state = state->sw_state;
1668 struct jcf_relocation *reloc = (struct jcf_relocation *)
1669 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1670 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1671 reloc->kind = 0;
1672 reloc->label = get_jcf_label_here (state);
1673 reloc->offset = case_value;
1674 reloc->next = sw_state->cases;
1675 sw_state->cases = reloc;
1676 if (sw_state->num_cases == 0)
1677 {
1678 sw_state->min_case = case_value;
1679 sw_state->max_case = case_value;
1680 }
1681 else
1682 {
1683 if (case_value < sw_state->min_case)
1684 sw_state->min_case = case_value;
1685 if (case_value > sw_state->max_case)
1686 sw_state->max_case = case_value;
1687 }
1688 sw_state->num_cases++;
1689 }
1690 break;
1691 case DEFAULT_EXPR:
1692 state->sw_state->default_label = get_jcf_label_here (state);
1693 break;
1694
1695 case SWITCH_EXPR:
1696 {
1697 /* The SWITCH_EXPR has three parts, generated in the following order:
1698 1. the switch_expression (the value used to select the correct case);
1699 2. the switch_body;
1700 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1701 After code generation, we will re-order them in the order 1, 3, 2.
1702 This is to avoid any extra GOTOs. */
1703 struct jcf_switch_state sw_state;
1704 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1705 struct jcf_block *body_last; /* Last block of the switch_body. */
1706 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1707 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1708 struct jcf_block *body_block;
1709 int switch_length;
1710 sw_state.prev = state->sw_state;
1711 state->sw_state = &sw_state;
1712 sw_state.cases = NULL;
1713 sw_state.num_cases = 0;
1714 sw_state.default_label = NULL;
1715 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1716 expression_last = state->last_block;
1717 /* Force a new block here. */
1718 body_block = gen_jcf_label (state);
1719 define_jcf_label (body_block, state);
1720 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1721 body_last = state->last_block;
1722
1723 switch_instruction = gen_jcf_label (state);
1724 define_jcf_label (switch_instruction, state);
1725 if (sw_state.default_label == NULL)
1726 sw_state.default_label = gen_jcf_label (state);
1727
1728 if (sw_state.num_cases <= 1)
1729 {
1730 if (sw_state.num_cases == 0)
1731 {
1732 emit_pop (1, state);
1733 NOTE_POP (1);
1734 }
1735 else
1736 {
1737 push_int_const (sw_state.cases->offset, state);
1738 NOTE_PUSH (1);
1739 emit_if (sw_state.cases->label,
1740 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1741 }
1742 emit_goto (sw_state.default_label, state);
1743 }
1744 else
1745 {
1746 HOST_WIDE_INT i;
1747 /* Copy the chain of relocs into a sorted array. */
1748 struct jcf_relocation **relocs = (struct jcf_relocation **)
1749 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1750 /* The relocs arrays is a buffer with a gap.
1751 The assumption is that cases will normally come in "runs". */
1752 int gap_start = 0;
1753 int gap_end = sw_state.num_cases;
1754 struct jcf_relocation *reloc;
1755 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1756 {
1757 HOST_WIDE_INT case_value = reloc->offset;
1758 while (gap_end < sw_state.num_cases)
1759 {
1760 struct jcf_relocation *end = relocs[gap_end];
1761 if (case_value <= end->offset)
1762 break;
1763 relocs[gap_start++] = end;
1764 gap_end++;
1765 }
1766 while (gap_start > 0)
1767 {
1768 struct jcf_relocation *before = relocs[gap_start-1];
1769 if (case_value >= before->offset)
1770 break;
1771 relocs[--gap_end] = before;
1772 gap_start--;
1773 }
1774 relocs[gap_start++] = reloc;
1775 /* Note we don't check for duplicates. This is
1776 handled by the parser. */
1777 }
1778
1779 if (2 * sw_state.num_cases
1780 >= sw_state.max_case - sw_state.min_case)
1781 { /* Use tableswitch. */
1782 int index = 0;
1783 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1784 OP1 (OPCODE_tableswitch);
1785 emit_reloc (RELOCATION_VALUE_0,
1786 SWITCH_ALIGN_RELOC, NULL, state);
1787 emit_switch_reloc (sw_state.default_label, state);
1788 OP4 (sw_state.min_case);
1789 OP4 (sw_state.max_case);
1790 for (i = sw_state.min_case; ; )
1791 {
1792 reloc = relocs[index];
1793 if (i == reloc->offset)
1794 {
1795 emit_case_reloc (reloc, state);
1796 if (i == sw_state.max_case)
1797 break;
1798 index++;
1799 }
1800 else
1801 emit_switch_reloc (sw_state.default_label, state);
1802 i++;
1803 }
1804 }
1805 else
1806 { /* Use lookupswitch. */
1807 RESERVE(9 + 8 * sw_state.num_cases);
1808 OP1 (OPCODE_lookupswitch);
1809 emit_reloc (RELOCATION_VALUE_0,
1810 SWITCH_ALIGN_RELOC, NULL, state);
1811 emit_switch_reloc (sw_state.default_label, state);
1812 OP4 (sw_state.num_cases);
1813 for (i = 0; i < sw_state.num_cases; i++)
1814 {
1815 struct jcf_relocation *reloc = relocs[i];
1816 OP4 (reloc->offset);
1817 emit_case_reloc (reloc, state);
1818 }
1819 }
1820 free (relocs);
1821 }
1822
1823 instruction_last = state->last_block;
1824 if (sw_state.default_label->pc < 0)
1825 define_jcf_label (sw_state.default_label, state);
1826 else /* Force a new block. */
1827 sw_state.default_label = get_jcf_label_here (state);
1828 /* Now re-arrange the blocks so the switch_instruction
1829 comes before the switch_body. */
1830 switch_length = state->code_length - switch_instruction->pc;
1831 switch_instruction->pc = body_block->pc;
1832 instruction_last->next = body_block;
1833 instruction_last->v.chunk->next = body_block->v.chunk;
1834 expression_last->next = switch_instruction;
1835 expression_last->v.chunk->next = switch_instruction->v.chunk;
1836 body_last->next = sw_state.default_label;
1837 body_last->v.chunk->next = NULL;
1838 state->chunk = body_last->v.chunk;
1839 for (; body_block != sw_state.default_label; body_block = body_block->next)
1840 body_block->pc += switch_length;
1841
1842 state->sw_state = sw_state.prev;
1843 break;
1844 }
1845
1846 case RETURN_EXPR:
1847 exp = TREE_OPERAND (exp, 0);
1848 if (exp == NULL_TREE)
1849 exp = empty_stmt_node;
1850 else if (TREE_CODE (exp) != MODIFY_EXPR)
1851 abort ();
1852 else
1853 exp = TREE_OPERAND (exp, 1);
1854 generate_bytecode_return (exp, state);
1855 break;
1856 case LABELED_BLOCK_EXPR:
1857 {
1858 struct jcf_block *end_label = gen_jcf_label (state);
1859 end_label->next = state->labeled_blocks;
1860 state->labeled_blocks = end_label;
1861 end_label->pc = PENDING_EXIT_PC;
1862 end_label->u.labeled_block = exp;
1863 if (LABELED_BLOCK_BODY (exp))
1864 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1865 if (state->labeled_blocks != end_label)
1866 abort();
1867 state->labeled_blocks = end_label->next;
1868 define_jcf_label (end_label, state);
1869 }
1870 break;
1871 case LOOP_EXPR:
1872 {
1873 tree body = TREE_OPERAND (exp, 0);
1874 #if 0
1875 if (TREE_CODE (body) == COMPOUND_EXPR
1876 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1877 {
1878 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1879 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1880 struct jcf_block *head_label;
1881 struct jcf_block *body_label;
1882 struct jcf_block *end_label = gen_jcf_label (state);
1883 struct jcf_block *exit_label = state->labeled_blocks;
1884 head_label = gen_jcf_label (state);
1885 emit_goto (head_label, state);
1886 body_label = get_jcf_label_here (state);
1887 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1888 define_jcf_label (head_label, state);
1889 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1890 end_label, body_label, 1, state);
1891 define_jcf_label (end_label, state);
1892 }
1893 else
1894 #endif
1895 {
1896 struct jcf_block *head_label = get_jcf_label_here (state);
1897 generate_bytecode_insns (body, IGNORE_TARGET, state);
1898 if (CAN_COMPLETE_NORMALLY (body))
1899 emit_goto (head_label, state);
1900 }
1901 }
1902 break;
1903 case EXIT_EXPR:
1904 {
1905 struct jcf_block *label = state->labeled_blocks;
1906 struct jcf_block *end_label = gen_jcf_label (state);
1907 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1908 label, end_label, 0, state);
1909 define_jcf_label (end_label, state);
1910 }
1911 break;
1912 case EXIT_BLOCK_EXPR:
1913 {
1914 struct jcf_block *label = state->labeled_blocks;
1915 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1916 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1917 label = label->next;
1918 call_cleanups (label, state);
1919 emit_goto (label, state);
1920 }
1921 break;
1922
1923 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1924 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1925 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1926 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1927 increment:
1928
1929 arg = TREE_OPERAND (exp, 1);
1930 exp = TREE_OPERAND (exp, 0);
1931 type = TREE_TYPE (exp);
1932 size = TYPE_IS_WIDE (type) ? 2 : 1;
1933 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1934 && ! TREE_STATIC (exp)
1935 && TREE_CODE (type) == INTEGER_TYPE
1936 && TYPE_PRECISION (type) == 32)
1937 {
1938 if (target != IGNORE_TARGET && post_op)
1939 emit_load (exp, state);
1940 emit_iinc (exp, value, state);
1941 if (target != IGNORE_TARGET && ! post_op)
1942 emit_load (exp, state);
1943 break;
1944 }
1945 if (TREE_CODE (exp) == COMPONENT_REF)
1946 {
1947 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1948 emit_dup (1, 0, state);
1949 /* Stack: ..., objectref, objectref. */
1950 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1951 NOTE_PUSH (size-1);
1952 /* Stack: ..., objectref, oldvalue. */
1953 offset = 1;
1954 }
1955 else if (TREE_CODE (exp) == ARRAY_REF)
1956 {
1957 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1958 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1959 emit_dup (2, 0, state);
1960 /* Stack: ..., array, index, array, index. */
1961 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1962 RESERVE(1);
1963 OP1 (jopcode);
1964 NOTE_POP (2-size);
1965 /* Stack: ..., array, index, oldvalue. */
1966 offset = 2;
1967 }
1968 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1969 {
1970 generate_bytecode_insns (exp, STACK_TARGET, state);
1971 /* Stack: ..., oldvalue. */
1972 offset = 0;
1973 }
1974 else
1975 abort ();
1976
1977 if (target != IGNORE_TARGET && post_op)
1978 emit_dup (size, offset, state);
1979 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1980 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1981 /* Stack, otherwise: ..., [result, ] oldvalue. */
1982 generate_bytecode_insns (arg, STACK_TARGET, state);
1983 emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1984 + adjust_typed_op (type, 3),
1985 type, state);
1986 if (target != IGNORE_TARGET && ! post_op)
1987 emit_dup (size, offset, state);
1988 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1989 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1990 /* Stack, otherwise: ..., [result, ] newvalue. */
1991 goto finish_assignment;
1992
1993 case MODIFY_EXPR:
1994 {
1995 tree lhs = TREE_OPERAND (exp, 0);
1996 tree rhs = TREE_OPERAND (exp, 1);
1997 int offset = 0;
1998
1999 /* See if we can use the iinc instruction. */
2000 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
2001 && ! TREE_STATIC (lhs)
2002 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
2003 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
2004 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
2005 {
2006 tree arg0 = TREE_OPERAND (rhs, 0);
2007 tree arg1 = TREE_OPERAND (rhs, 1);
2008 HOST_WIDE_INT min_value = -32768;
2009 HOST_WIDE_INT max_value = 32767;
2010 if (TREE_CODE (rhs) == MINUS_EXPR)
2011 {
2012 min_value++;
2013 max_value++;
2014 }
2015 else if (arg1 == lhs)
2016 {
2017 arg0 = arg1;
2018 arg1 = TREE_OPERAND (rhs, 0);
2019 }
2020 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
2021 {
2022 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
2023 value = TREE_INT_CST_LOW (arg1);
2024 if ((hi_value == 0 && value <= max_value)
2025 || (hi_value == -1 && value >= min_value))
2026 {
2027 if (TREE_CODE (rhs) == MINUS_EXPR)
2028 value = -value;
2029 emit_iinc (lhs, value, state);
2030 if (target != IGNORE_TARGET)
2031 emit_load (lhs, state);
2032 break;
2033 }
2034 }
2035 }
2036
2037 if (TREE_CODE (lhs) == COMPONENT_REF)
2038 {
2039 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2040 STACK_TARGET, state);
2041 offset = 1;
2042 }
2043 else if (TREE_CODE (lhs) == ARRAY_REF)
2044 {
2045 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2046 STACK_TARGET, state);
2047 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2048 STACK_TARGET, state);
2049 offset = 2;
2050 }
2051 else
2052 offset = 0;
2053
2054 /* If the rhs is a binary expression and the left operand is
2055 `==' to the lhs then we have an OP= expression. In this
2056 case we must do some special processing. */
2057 if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
2058 && lhs == TREE_OPERAND (rhs, 0))
2059 {
2060 if (TREE_CODE (lhs) == COMPONENT_REF)
2061 {
2062 tree field = TREE_OPERAND (lhs, 1);
2063 if (! FIELD_STATIC (field))
2064 {
2065 /* Duplicate the object reference so we can get
2066 the field. */
2067 emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
2068 NOTE_POP (1);
2069 }
2070 field_op (field, (FIELD_STATIC (field)
2071 ? OPCODE_getstatic
2072 : OPCODE_getfield),
2073 state);
2074
2075 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2076 }
2077 else if (TREE_CODE (lhs) == VAR_DECL
2078 || TREE_CODE (lhs) == PARM_DECL)
2079 {
2080 if (FIELD_STATIC (lhs))
2081 {
2082 field_op (lhs, OPCODE_getstatic, state);
2083 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2084 }
2085 else
2086 emit_load (lhs, state);
2087 }
2088 else if (TREE_CODE (lhs) == ARRAY_REF)
2089 {
2090 /* Duplicate the array and index, which are on the
2091 stack, so that we can load the old value. */
2092 emit_dup (2, 0, state);
2093 NOTE_POP (2);
2094 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2095 RESERVE (1);
2096 OP1 (jopcode);
2097 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2098 }
2099 else
2100 abort ();
2101
2102 /* This function correctly handles the case where the LHS
2103 of a binary expression is NULL_TREE. */
2104 rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2105 NULL_TREE, TREE_OPERAND (rhs, 1));
2106 }
2107
2108 generate_bytecode_insns (rhs, STACK_TARGET, state);
2109 if (target != IGNORE_TARGET)
2110 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2111 exp = lhs;
2112 }
2113 /* FALLTHOUGH */
2114
2115 finish_assignment:
2116 if (TREE_CODE (exp) == COMPONENT_REF)
2117 {
2118 tree field = TREE_OPERAND (exp, 1);
2119 if (! FIELD_STATIC (field))
2120 NOTE_POP (1);
2121 field_op (field,
2122 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2123 state);
2124
2125 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2126 }
2127 else if (TREE_CODE (exp) == VAR_DECL
2128 || TREE_CODE (exp) == PARM_DECL)
2129 {
2130 if (FIELD_STATIC (exp))
2131 {
2132 field_op (exp, OPCODE_putstatic, state);
2133 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2134 }
2135 else
2136 emit_store (exp, state);
2137 }
2138 else if (TREE_CODE (exp) == ARRAY_REF)
2139 {
2140 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2141 RESERVE (1);
2142 OP1 (jopcode);
2143 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2144 }
2145 else
2146 abort ();
2147 break;
2148 case PLUS_EXPR:
2149 jopcode = OPCODE_iadd;
2150 goto binop;
2151 case MINUS_EXPR:
2152 jopcode = OPCODE_isub;
2153 goto binop;
2154 case MULT_EXPR:
2155 jopcode = OPCODE_imul;
2156 goto binop;
2157 case TRUNC_DIV_EXPR:
2158 case RDIV_EXPR:
2159 jopcode = OPCODE_idiv;
2160 goto binop;
2161 case TRUNC_MOD_EXPR:
2162 jopcode = OPCODE_irem;
2163 goto binop;
2164 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2165 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2166 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2167 case TRUTH_AND_EXPR:
2168 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2169 case TRUTH_OR_EXPR:
2170 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2171 case TRUTH_XOR_EXPR:
2172 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2173 binop:
2174 {
2175 tree arg0 = TREE_OPERAND (exp, 0);
2176 tree arg1 = TREE_OPERAND (exp, 1);
2177 jopcode += adjust_typed_op (type, 3);
2178 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2179 {
2180 /* fold may (e.g) convert 2*x to x+x. */
2181 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2182 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2183 }
2184 else
2185 {
2186 /* ARG0 will be NULL_TREE if we're handling an `OP='
2187 expression. In this case the stack already holds the
2188 LHS. See the MODIFY_EXPR case. */
2189 if (arg0 != NULL_TREE)
2190 generate_bytecode_insns (arg0, target, state);
2191 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2192 arg1 = convert (int_type_node, arg1);
2193 generate_bytecode_insns (arg1, target, state);
2194 }
2195 /* For most binary operations, both operands and the result have the
2196 same type. Shift operations are different. Using arg1's type
2197 gets us the correct SP adjustment in all cases. */
2198 if (target == STACK_TARGET)
2199 emit_binop (jopcode, TREE_TYPE (arg1), state);
2200 break;
2201 }
2202 case TRUTH_NOT_EXPR:
2203 case BIT_NOT_EXPR:
2204 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2205 if (target == STACK_TARGET)
2206 {
2207 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2208 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2209 RESERVE (2);
2210 if (is_long)
2211 OP1 (OPCODE_i2l);
2212 NOTE_PUSH (1 + is_long);
2213 OP1 (OPCODE_ixor + is_long);
2214 NOTE_POP (1 + is_long);
2215 }
2216 break;
2217 case NEGATE_EXPR:
2218 jopcode = OPCODE_ineg;
2219 jopcode += adjust_typed_op (type, 3);
2220 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2221 if (target == STACK_TARGET)
2222 emit_unop (jopcode, type, state);
2223 break;
2224 case INSTANCEOF_EXPR:
2225 {
2226 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2227 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2228 RESERVE (3);
2229 OP1 (OPCODE_instanceof);
2230 OP2 (index);
2231 }
2232 break;
2233 case SAVE_EXPR:
2234 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2235 break;
2236 case CONVERT_EXPR:
2237 case NOP_EXPR:
2238 case FLOAT_EXPR:
2239 case FIX_TRUNC_EXPR:
2240 {
2241 tree src = TREE_OPERAND (exp, 0);
2242 tree src_type = TREE_TYPE (src);
2243 tree dst_type = TREE_TYPE (exp);
2244 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2245 if (target == IGNORE_TARGET || src_type == dst_type)
2246 break;
2247 if (TREE_CODE (dst_type) == POINTER_TYPE)
2248 {
2249 if (TREE_CODE (exp) == CONVERT_EXPR)
2250 {
2251 int index = find_class_constant (&state->cpool,
2252 TREE_TYPE (dst_type));
2253 RESERVE (3);
2254 OP1 (OPCODE_checkcast);
2255 OP2 (index);
2256 }
2257 }
2258 else /* Convert numeric types. */
2259 {
2260 int wide_src = TYPE_PRECISION (src_type) > 32;
2261 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2262 NOTE_POP (1 + wide_src);
2263 RESERVE (1);
2264 if (TREE_CODE (dst_type) == REAL_TYPE)
2265 {
2266 if (TREE_CODE (src_type) == REAL_TYPE)
2267 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2268 else if (TYPE_PRECISION (src_type) == 64)
2269 OP1 (OPCODE_l2f + wide_dst);
2270 else
2271 OP1 (OPCODE_i2f + wide_dst);
2272 }
2273 else /* Convert to integral type. */
2274 {
2275 if (TREE_CODE (src_type) == REAL_TYPE)
2276 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2277 else if (wide_dst)
2278 OP1 (OPCODE_i2l);
2279 else if (wide_src)
2280 OP1 (OPCODE_l2i);
2281 if (TYPE_PRECISION (dst_type) < 32)
2282 {
2283 RESERVE (1);
2284 /* Already converted to int, if needed. */
2285 if (TYPE_PRECISION (dst_type) <= 8)
2286 OP1 (OPCODE_i2b);
2287 else if (TREE_UNSIGNED (dst_type))
2288 OP1 (OPCODE_i2c);
2289 else
2290 OP1 (OPCODE_i2s);
2291 }
2292 }
2293 NOTE_PUSH (1 + wide_dst);
2294 }
2295 }
2296 break;
2297
2298 case TRY_EXPR:
2299 {
2300 tree try_clause = TREE_OPERAND (exp, 0);
2301 struct jcf_block *start_label = get_jcf_label_here (state);
2302 struct jcf_block *end_label; /* End of try clause. */
2303 struct jcf_block *finished_label = gen_jcf_label (state);
2304 tree clause = TREE_OPERAND (exp, 1);
2305 if (target != IGNORE_TARGET)
2306 abort ();
2307 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2308 end_label = get_jcf_label_here (state);
2309 if (end_label == start_label)
2310 break;
2311 if (CAN_COMPLETE_NORMALLY (try_clause))
2312 emit_goto (finished_label, state);
2313 while (clause != NULL_TREE)
2314 {
2315 tree catch_clause = TREE_OPERAND (clause, 0);
2316 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2317 struct jcf_handler *handler = alloc_handler (start_label,
2318 end_label, state);
2319 if (exception_decl == NULL_TREE)
2320 handler->type = NULL_TREE;
2321 else
2322 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2323 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2324 clause = TREE_CHAIN (clause);
2325 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2326 emit_goto (finished_label, state);
2327 }
2328 define_jcf_label (finished_label, state);
2329 }
2330 break;
2331
2332 case TRY_FINALLY_EXPR:
2333 {
2334 struct jcf_block *finished_label = NULL;
2335 struct jcf_block *finally_label, *start_label, *end_label;
2336 struct jcf_handler *handler;
2337 tree try_block = TREE_OPERAND (exp, 0);
2338 tree finally = TREE_OPERAND (exp, 1);
2339 tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2340
2341 tree exception_type;
2342
2343 finally_label = gen_jcf_label (state);
2344 start_label = get_jcf_label_here (state);
2345 /* If the `finally' clause can complete normally, we emit it
2346 as a subroutine and let the other clauses call it via
2347 `jsr'. If it can't complete normally, then we simply emit
2348 `goto's directly to it. */
2349 if (CAN_COMPLETE_NORMALLY (finally))
2350 {
2351 finally_label->pc = PENDING_CLEANUP_PC;
2352 finally_label->next = state->labeled_blocks;
2353 state->labeled_blocks = finally_label;
2354 state->num_finalizers++;
2355 }
2356
2357 generate_bytecode_insns (try_block, target, state);
2358
2359 if (CAN_COMPLETE_NORMALLY (finally))
2360 {
2361 if (state->labeled_blocks != finally_label)
2362 abort();
2363 state->labeled_blocks = finally_label->next;
2364 }
2365 end_label = get_jcf_label_here (state);
2366
2367 if (end_label == start_label)
2368 {
2369 state->num_finalizers--;
2370 define_jcf_label (finally_label, state);
2371 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2372 break;
2373 }
2374
2375 if (CAN_COMPLETE_NORMALLY (finally))
2376 {
2377 return_link = build_decl (VAR_DECL, NULL_TREE,
2378 return_address_type_node);
2379 finished_label = gen_jcf_label (state);
2380 }
2381
2382 if (CAN_COMPLETE_NORMALLY (try_block))
2383 {
2384 if (CAN_COMPLETE_NORMALLY (finally))
2385 {
2386 emit_jsr (finally_label, state);
2387 emit_goto (finished_label, state);
2388 }
2389 else
2390 emit_goto (finally_label, state);
2391 }
2392
2393 /* Handle exceptions. */
2394
2395 exception_type = build_pointer_type (throwable_type_node);
2396 if (CAN_COMPLETE_NORMALLY (finally))
2397 {
2398 /* We're going to generate a subroutine, so we'll need to
2399 save and restore the exception around the `jsr'. */
2400 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2401 localvar_alloc (return_link, state);
2402 }
2403 handler = alloc_handler (start_label, end_label, state);
2404 handler->type = NULL_TREE;
2405 if (CAN_COMPLETE_NORMALLY (finally))
2406 {
2407 localvar_alloc (exception_decl, state);
2408 NOTE_PUSH (1);
2409 emit_store (exception_decl, state);
2410 emit_jsr (finally_label, state);
2411 emit_load (exception_decl, state);
2412 RESERVE (1);
2413 OP1 (OPCODE_athrow);
2414 NOTE_POP (1);
2415 }
2416 else
2417 {
2418 /* We're not generating a subroutine. In this case we can
2419 simply have the exception handler pop the exception and
2420 then fall through to the `finally' block. */
2421 NOTE_PUSH (1);
2422 emit_pop (1, state);
2423 NOTE_POP (1);
2424 }
2425
2426 /* The finally block. If we're generating a subroutine, first
2427 save return PC into return_link. Otherwise, just generate
2428 the code for the `finally' block. */
2429 define_jcf_label (finally_label, state);
2430 if (CAN_COMPLETE_NORMALLY (finally))
2431 {
2432 NOTE_PUSH (1);
2433 emit_store (return_link, state);
2434 }
2435
2436 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2437 if (CAN_COMPLETE_NORMALLY (finally))
2438 {
2439 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2440 localvar_free (exception_decl, state);
2441 localvar_free (return_link, state);
2442 define_jcf_label (finished_label, state);
2443 }
2444 }
2445 break;
2446 case THROW_EXPR:
2447 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2448 RESERVE (1);
2449 OP1 (OPCODE_athrow);
2450 break;
2451 case NEW_ARRAY_INIT:
2452 {
2453 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2454 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2455 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2456 HOST_WIDE_INT length = java_array_type_length (array_type);
2457 if (target == IGNORE_TARGET)
2458 {
2459 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2460 generate_bytecode_insns (TREE_VALUE (values), target, state);
2461 break;
2462 }
2463 push_int_const (length, state);
2464 NOTE_PUSH (1);
2465 RESERVE (3);
2466 if (JPRIMITIVE_TYPE_P (element_type))
2467 {
2468 int atype = encode_newarray_type (element_type);
2469 OP1 (OPCODE_newarray);
2470 OP1 (atype);
2471 }
2472 else
2473 {
2474 int index = find_class_constant (&state->cpool,
2475 TREE_TYPE (element_type));
2476 OP1 (OPCODE_anewarray);
2477 OP2 (index);
2478 }
2479 offset = 0;
2480 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2481 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2482 {
2483 int save_SP = state->code_SP;
2484 emit_dup (1, 0, state);
2485 push_int_const (offset, state);
2486 NOTE_PUSH (1);
2487 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2488 RESERVE (1);
2489 OP1 (jopcode);
2490 state->code_SP = save_SP;
2491 }
2492 }
2493 break;
2494 case JAVA_EXC_OBJ_EXPR:
2495 NOTE_PUSH (1); /* Pushed by exception system. */
2496 break;
2497 case NEW_CLASS_EXPR:
2498 {
2499 tree class = TREE_TYPE (TREE_TYPE (exp));
2500 int need_result = target != IGNORE_TARGET;
2501 int index = find_class_constant (&state->cpool, class);
2502 RESERVE (4);
2503 OP1 (OPCODE_new);
2504 OP2 (index);
2505 if (need_result)
2506 OP1 (OPCODE_dup);
2507 NOTE_PUSH (1 + need_result);
2508 }
2509 /* ... fall though ... */
2510 case CALL_EXPR:
2511 {
2512 tree f = TREE_OPERAND (exp, 0);
2513 tree x = TREE_OPERAND (exp, 1);
2514 int save_SP = state->code_SP;
2515 int nargs;
2516 if (TREE_CODE (f) == ADDR_EXPR)
2517 f = TREE_OPERAND (f, 0);
2518 if (f == soft_newarray_node)
2519 {
2520 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2521 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2522 STACK_TARGET, state);
2523 RESERVE (2);
2524 OP1 (OPCODE_newarray);
2525 OP1 (type_code);
2526 break;
2527 }
2528 else if (f == soft_multianewarray_node)
2529 {
2530 int ndims;
2531 int idim;
2532 int index = find_class_constant (&state->cpool,
2533 TREE_TYPE (TREE_TYPE (exp)));
2534 x = TREE_CHAIN (x); /* Skip class argument. */
2535 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2536 for (idim = ndims; --idim >= 0; )
2537 {
2538 x = TREE_CHAIN (x);
2539 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2540 }
2541 RESERVE (4);
2542 OP1 (OPCODE_multianewarray);
2543 OP2 (index);
2544 OP1 (ndims);
2545 break;
2546 }
2547 else if (f == soft_anewarray_node)
2548 {
2549 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2550 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2551 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2552 RESERVE (3);
2553 OP1 (OPCODE_anewarray);
2554 OP2 (index);
2555 break;
2556 }
2557 else if (f == soft_monitorenter_node
2558 || f == soft_monitorexit_node
2559 || f == throw_node)
2560 {
2561 if (f == soft_monitorenter_node)
2562 op = OPCODE_monitorenter;
2563 else if (f == soft_monitorexit_node)
2564 op = OPCODE_monitorexit;
2565 else
2566 op = OPCODE_athrow;
2567 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2568 RESERVE (1);
2569 OP1 (op);
2570 NOTE_POP (1);
2571 break;
2572 }
2573 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2574 {
2575 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2576 }
2577 nargs = state->code_SP - save_SP;
2578 state->code_SP = save_SP;
2579 if (f == soft_fmod_node)
2580 {
2581 RESERVE (1);
2582 OP1 (OPCODE_drem);
2583 NOTE_PUSH (2);
2584 break;
2585 }
2586 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2587 NOTE_POP (1); /* Pop implicit this. */
2588 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2589 {
2590 tree context = DECL_CONTEXT (f);
2591 int index, interface = 0;
2592 RESERVE (5);
2593 if (METHOD_STATIC (f))
2594 OP1 (OPCODE_invokestatic);
2595 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2596 || METHOD_PRIVATE (f))
2597 OP1 (OPCODE_invokespecial);
2598 else
2599 {
2600 if (CLASS_INTERFACE (TYPE_NAME (context)))
2601 {
2602 tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2603 context = TREE_TYPE (TREE_TYPE (arg1));
2604 if (CLASS_INTERFACE (TYPE_NAME (context)))
2605 interface = 1;
2606 }
2607 if (interface)
2608 OP1 (OPCODE_invokeinterface);
2609 else
2610 OP1 (OPCODE_invokevirtual);
2611 }
2612 index = find_methodref_with_class_index (&state->cpool, f, context);
2613 OP2 (index);
2614 if (interface)
2615 {
2616 if (nargs <= 0)
2617 abort ();
2618
2619 OP1 (nargs);
2620 OP1 (0);
2621 }
2622 f = TREE_TYPE (TREE_TYPE (f));
2623 if (TREE_CODE (f) != VOID_TYPE)
2624 {
2625 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2626 if (target == IGNORE_TARGET)
2627 emit_pop (size, state);
2628 else
2629 NOTE_PUSH (size);
2630 }
2631 break;
2632 }
2633 }
2634 /* fall through */
2635 notimpl:
2636 default:
2637 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2638 tree_code_name [(int) TREE_CODE (exp)]);
2639 }
2640 }
2641
2642 static void
2643 perform_relocations (state)
2644 struct jcf_partial *state;
2645 {
2646 struct jcf_block *block;
2647 struct jcf_relocation *reloc;
2648 int pc;
2649 int shrink;
2650
2651 /* Before we start, the pc field of each block is an upper bound on
2652 the block's start pc (it may be less, if previous blocks need less
2653 than their maximum).
2654
2655 The minimum size of each block is in the block's chunk->size. */
2656
2657 /* First, figure out the actual locations of each block. */
2658 pc = 0;
2659 shrink = 0;
2660 for (block = state->blocks; block != NULL; block = block->next)
2661 {
2662 int block_size = block->v.chunk->size;
2663
2664 block->pc = pc;
2665
2666 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2667 Assumes relocations are in reverse order. */
2668 reloc = block->u.relocations;
2669 while (reloc != NULL
2670 && reloc->kind == OPCODE_goto_w
2671 && reloc->label->pc == block->next->pc
2672 && reloc->offset + 2 == block_size)
2673 {
2674 reloc = reloc->next;
2675 block->u.relocations = reloc;
2676 block->v.chunk->size -= 3;
2677 block_size -= 3;
2678 shrink += 3;
2679 }
2680
2681 /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2682 jump directly to X. We're careful here to avoid an infinite
2683 loop if the `goto's themselves form one. We do this
2684 optimization because we can generate a goto-to-goto for some
2685 try/finally blocks. */
2686 while (reloc != NULL
2687 && reloc->kind == OPCODE_goto_w
2688 && reloc->label != block
2689 && reloc->label->v.chunk->data != NULL
2690 && reloc->label->v.chunk->data[0] == OPCODE_goto)
2691 {
2692 /* Find the reloc for the first instruction of the
2693 destination block. */
2694 struct jcf_relocation *first_reloc;
2695 for (first_reloc = reloc->label->u.relocations;
2696 first_reloc;
2697 first_reloc = first_reloc->next)
2698 {
2699 if (first_reloc->offset == 1
2700 && first_reloc->kind == OPCODE_goto_w)
2701 {
2702 reloc->label = first_reloc->label;
2703 break;
2704 }
2705 }
2706
2707 /* If we didn't do anything, exit the loop. */
2708 if (first_reloc == NULL)
2709 break;
2710 }
2711
2712 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2713 {
2714 if (reloc->kind == SWITCH_ALIGN_RELOC)
2715 {
2716 /* We assume this is the first relocation in this block,
2717 so we know its final pc. */
2718 int where = pc + reloc->offset;
2719 int pad = ((where + 3) & ~3) - where;
2720 block_size += pad;
2721 }
2722 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2723 {
2724 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2725 int expand = reloc->kind > 0 ? 2 : 5;
2726
2727 if (delta > 0)
2728 delta -= shrink;
2729 if (delta >= -32768 && delta <= 32767)
2730 {
2731 shrink += expand;
2732 reloc->kind = -1;
2733 }
2734 else
2735 block_size += expand;
2736 }
2737 }
2738 pc += block_size;
2739 }
2740
2741 for (block = state->blocks; block != NULL; block = block->next)
2742 {
2743 struct chunk *chunk = block->v.chunk;
2744 int old_size = chunk->size;
2745 int next_pc = block->next == NULL ? pc : block->next->pc;
2746 int new_size = next_pc - block->pc;
2747 unsigned char *new_ptr;
2748 unsigned char *old_buffer = chunk->data;
2749 unsigned char *old_ptr = old_buffer + old_size;
2750 if (new_size != old_size)
2751 {
2752 chunk->data = (unsigned char *)
2753 obstack_alloc (state->chunk_obstack, new_size);
2754 chunk->size = new_size;
2755 }
2756 new_ptr = chunk->data + new_size;
2757
2758 /* We do the relocations from back to front, because
2759 the relocations are in reverse order. */
2760 for (reloc = block->u.relocations; ; reloc = reloc->next)
2761 {
2762 /* new_ptr and old_ptr point into the old and new buffers,
2763 respectively. (If no relocations cause the buffer to
2764 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2765 The bytes at higher address have been copied and relocations
2766 handled; those at lower addresses remain to process. */
2767
2768 /* Lower old index of piece to be copied with no relocation.
2769 I.e. high index of the first piece that does need relocation. */
2770 int start = reloc == NULL ? 0
2771 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2772 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2773 ? reloc->offset + 4
2774 : reloc->offset + 2;
2775 int32 value;
2776 int new_offset;
2777 int n = (old_ptr - old_buffer) - start;
2778 new_ptr -= n;
2779 old_ptr -= n;
2780 if (n > 0)
2781 memcpy (new_ptr, old_ptr, n);
2782 if (old_ptr == old_buffer)
2783 break;
2784
2785 new_offset = new_ptr - chunk->data;
2786 new_offset -= (reloc->kind == -1 ? 2 : 4);
2787 if (reloc->kind == 0)
2788 {
2789 old_ptr -= 4;
2790 value = GET_u4 (old_ptr);
2791 }
2792 else if (reloc->kind == BLOCK_START_RELOC)
2793 {
2794 old_ptr -= 4;
2795 value = 0;
2796 new_offset = 0;
2797 }
2798 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2799 {
2800 int where = block->pc + reloc->offset;
2801 int pad = ((where + 3) & ~3) - where;
2802 while (--pad >= 0)
2803 *--new_ptr = 0;
2804 continue;
2805 }
2806 else
2807 {
2808 old_ptr -= 2;
2809 value = GET_u2 (old_ptr);
2810 }
2811 value += reloc->label->pc - (block->pc + new_offset);
2812 *--new_ptr = (unsigned char) value; value >>= 8;
2813 *--new_ptr = (unsigned char) value; value >>= 8;
2814 if (reloc->kind != -1)
2815 {
2816 *--new_ptr = (unsigned char) value; value >>= 8;
2817 *--new_ptr = (unsigned char) value;
2818 }
2819 if (reloc->kind > BLOCK_START_RELOC)
2820 {
2821 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2822 --old_ptr;
2823 *--new_ptr = reloc->kind;
2824 }
2825 else if (reloc->kind < -1)
2826 {
2827 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2828 --old_ptr;
2829 *--new_ptr = OPCODE_goto_w;
2830 *--new_ptr = 3;
2831 *--new_ptr = 0;
2832 *--new_ptr = - reloc->kind;
2833 }
2834 }
2835 if (new_ptr != chunk->data)
2836 abort ();
2837 }
2838 state->code_length = pc;
2839 }
2840
2841 static void
2842 init_jcf_state (state, work)
2843 struct jcf_partial *state;
2844 struct obstack *work;
2845 {
2846 state->chunk_obstack = work;
2847 state->first = state->chunk = NULL;
2848 CPOOL_INIT (&state->cpool);
2849 BUFFER_INIT (&state->localvars);
2850 BUFFER_INIT (&state->bytecode);
2851 }
2852
2853 static void
2854 init_jcf_method (state, method)
2855 struct jcf_partial *state;
2856 tree method;
2857 {
2858 state->current_method = method;
2859 state->blocks = state->last_block = NULL;
2860 state->linenumber_count = 0;
2861 state->first_lvar = state->last_lvar = NULL;
2862 state->lvar_count = 0;
2863 state->labeled_blocks = NULL;
2864 state->code_length = 0;
2865 BUFFER_RESET (&state->bytecode);
2866 BUFFER_RESET (&state->localvars);
2867 state->code_SP = 0;
2868 state->code_SP_max = 0;
2869 state->handlers = NULL;
2870 state->last_handler = NULL;
2871 state->num_handlers = 0;
2872 state->num_finalizers = 0;
2873 state->return_value_decl = NULL_TREE;
2874 }
2875
2876 static void
2877 release_jcf_state (state)
2878 struct jcf_partial *state;
2879 {
2880 CPOOL_FINISH (&state->cpool);
2881 obstack_free (state->chunk_obstack, state->first);
2882 }
2883
2884 /* Generate and return a list of chunks containing the class CLAS
2885 in the .class file representation. The list can be written to a
2886 .class file using write_chunks. Allocate chunks from obstack WORK. */
2887
2888 static GTY(()) tree SourceFile_node;
2889 static struct chunk *
2890 generate_classfile (clas, state)
2891 tree clas;
2892 struct jcf_partial *state;
2893 {
2894 struct chunk *cpool_chunk;
2895 const char *source_file, *s;
2896 char *ptr;
2897 int i;
2898 char *fields_count_ptr;
2899 int fields_count = 0;
2900 char *methods_count_ptr;
2901 int methods_count = 0;
2902 tree part;
2903 int total_supers
2904 = clas == object_type_node ? 0
2905 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2906
2907 ptr = append_chunk (NULL, 8, state);
2908 PUT4 (0xCafeBabe); /* Magic number */
2909 PUT2 (3); /* Minor version */
2910 PUT2 (45); /* Major version */
2911
2912 append_chunk (NULL, 0, state);
2913 cpool_chunk = state->chunk;
2914
2915 /* Next allocate the chunk containing acces_flags through fields_counr. */
2916 if (clas == object_type_node)
2917 i = 10;
2918 else
2919 i = 8 + 2 * total_supers;
2920 ptr = append_chunk (NULL, i, state);
2921 i = get_access_flags (TYPE_NAME (clas));
2922 if (! (i & ACC_INTERFACE))
2923 i |= ACC_SUPER;
2924 PUT2 (i); /* acces_flags */
2925 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2926 if (clas == object_type_node)
2927 {
2928 PUT2(0); /* super_class */
2929 PUT2(0); /* interfaces_count */
2930 }
2931 else
2932 {
2933 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2934 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2935 int j = find_class_constant (&state->cpool, base);
2936 PUT2 (j); /* super_class */
2937 PUT2 (total_supers - 1); /* interfaces_count */
2938 for (i = 1; i < total_supers; i++)
2939 {
2940 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2941 j = find_class_constant (&state->cpool, base);
2942 PUT2 (j);
2943 }
2944 }
2945 fields_count_ptr = ptr;
2946
2947 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2948 {
2949 int have_value, attr_count = 0;
2950 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2951 continue;
2952 ptr = append_chunk (NULL, 8, state);
2953 i = get_access_flags (part); PUT2 (i);
2954 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2955 i = find_utf8_constant (&state->cpool,
2956 build_java_signature (TREE_TYPE (part)));
2957 PUT2(i);
2958 have_value = DECL_INITIAL (part) != NULL_TREE
2959 && FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2960 && FIELD_FINAL (part)
2961 && (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2962 || TREE_TYPE (part) == string_ptr_type_node);
2963 if (have_value)
2964 attr_count++;
2965
2966 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part) || FIELD_SYNTHETIC (part))
2967 attr_count++;
2968
2969 PUT2 (attr_count); /* attributes_count */
2970 if (have_value)
2971 {
2972 tree init = DECL_INITIAL (part);
2973 static tree ConstantValue_node = NULL_TREE;
2974 if (TREE_TYPE (part) != TREE_TYPE (init))
2975 fatal_error ("field initializer type mismatch");
2976 ptr = append_chunk (NULL, 8, state);
2977 if (ConstantValue_node == NULL_TREE)
2978 ConstantValue_node = get_identifier ("ConstantValue");
2979 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2980 PUT2 (i); /* attribute_name_index */
2981 PUT4 (2); /* attribute_length */
2982 i = find_constant_index (init, state); PUT2 (i);
2983 }
2984 /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2985 fields and other fields which need it. */
2986 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
2987 || FIELD_SYNTHETIC (part))
2988 ptr = append_synthetic_attribute (state);
2989 fields_count++;
2990 }
2991 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2992
2993 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2994 PUT2 (0);
2995
2996 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2997 {
2998 struct jcf_block *block;
2999 tree function_body = DECL_FUNCTION_BODY (part);
3000 tree body = function_body == NULL_TREE ? NULL_TREE
3001 : BLOCK_EXPR_BODY (function_body);
3002 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
3003 : DECL_NAME (part);
3004 tree type = TREE_TYPE (part);
3005 tree save_function = current_function_decl;
3006 int synthetic_p = 0;
3007 current_function_decl = part;
3008 ptr = append_chunk (NULL, 8, state);
3009 i = get_access_flags (part); PUT2 (i);
3010 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3011 i = find_utf8_constant (&state->cpool, build_java_signature (type));
3012 PUT2 (i);
3013 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
3014
3015 /* Make room for the Synthetic attribute (of zero length.) */
3016 if (DECL_FINIT_P (part)
3017 || DECL_INSTINIT_P (part)
3018 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
3019 || TYPE_DOT_CLASS (clas) == part)
3020 {
3021 i++;
3022 synthetic_p = 1;
3023 }
3024
3025 PUT2 (i); /* attributes_count */
3026
3027 if (synthetic_p)
3028 ptr = append_synthetic_attribute (state);
3029
3030 if (body != NULL_TREE)
3031 {
3032 int code_attributes_count = 0;
3033 static tree Code_node = NULL_TREE;
3034 tree t;
3035 char *attr_len_ptr;
3036 struct jcf_handler *handler;
3037 if (Code_node == NULL_TREE)
3038 Code_node = get_identifier ("Code");
3039 ptr = append_chunk (NULL, 14, state);
3040 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
3041 attr_len_ptr = ptr;
3042 init_jcf_method (state, part);
3043 get_jcf_label_here (state); /* Force a first block. */
3044 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3045 localvar_alloc (t, state);
3046 generate_bytecode_insns (body, IGNORE_TARGET, state);
3047 if (CAN_COMPLETE_NORMALLY (body))
3048 {
3049 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
3050 abort();
3051 RESERVE (1);
3052 OP1 (OPCODE_return);
3053 }
3054 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
3055 localvar_free (t, state);
3056 if (state->return_value_decl != NULL_TREE)
3057 localvar_free (state->return_value_decl, state);
3058 finish_jcf_block (state);
3059 perform_relocations (state);
3060
3061 ptr = attr_len_ptr;
3062 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3063 if (state->linenumber_count > 0)
3064 {
3065 code_attributes_count++;
3066 i += 8 + 4 * state->linenumber_count;
3067 }
3068 if (state->lvar_count > 0)
3069 {
3070 code_attributes_count++;
3071 i += 8 + 10 * state->lvar_count;
3072 }
3073 UNSAFE_PUT4 (i); /* attribute_length */
3074 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
3075 UNSAFE_PUT2 (localvar_max); /* max_locals */
3076 UNSAFE_PUT4 (state->code_length);
3077
3078 /* Emit the exception table. */
3079 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3080 PUT2 (state->num_handlers); /* exception_table_length */
3081 handler = state->handlers;
3082 for (; handler != NULL; handler = handler->next)
3083 {
3084 int type_index;
3085 PUT2 (handler->start_label->pc);
3086 PUT2 (handler->end_label->pc);
3087 PUT2 (handler->handler_label->pc);
3088 if (handler->type == NULL_TREE)
3089 type_index = 0;
3090 else
3091 type_index = find_class_constant (&state->cpool,
3092 handler->type);
3093 PUT2 (type_index);
3094 }
3095
3096 ptr = append_chunk (NULL, 2, state);
3097 PUT2 (code_attributes_count);
3098
3099 /* Write the LineNumberTable attribute. */
3100 if (state->linenumber_count > 0)
3101 {
3102 static tree LineNumberTable_node = NULL_TREE;
3103 ptr = append_chunk (NULL,
3104 8 + 4 * state->linenumber_count, state);
3105 if (LineNumberTable_node == NULL_TREE)
3106 LineNumberTable_node = get_identifier ("LineNumberTable");
3107 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3108 PUT2 (i); /* attribute_name_index */
3109 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3110 i = state->linenumber_count; PUT2 (i);
3111 for (block = state->blocks; block != NULL; block = block->next)
3112 {
3113 int line = block->linenumber;
3114 if (line > 0)
3115 {
3116 PUT2 (block->pc);
3117 PUT2 (line);
3118 }
3119 }
3120 }
3121
3122 /* Write the LocalVariableTable attribute. */
3123 if (state->lvar_count > 0)
3124 {
3125 static tree LocalVariableTable_node = NULL_TREE;
3126 struct localvar_info *lvar = state->first_lvar;
3127 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3128 if (LocalVariableTable_node == NULL_TREE)
3129 LocalVariableTable_node = get_identifier("LocalVariableTable");
3130 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3131 PUT2 (i); /* attribute_name_index */
3132 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3133 i = state->lvar_count; PUT2 (i);
3134 for ( ; lvar != NULL; lvar = lvar->next)
3135 {
3136 tree name = DECL_NAME (lvar->decl);
3137 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3138 i = lvar->start_label->pc; PUT2 (i);
3139 i = lvar->end_label->pc - i; PUT2 (i);
3140 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3141 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3142 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3143 }
3144 }
3145 }
3146 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3147 {
3148 tree t = DECL_FUNCTION_THROWS (part);
3149 int throws_count = list_length (t);
3150 static tree Exceptions_node = NULL_TREE;
3151 if (Exceptions_node == NULL_TREE)
3152 Exceptions_node = get_identifier ("Exceptions");
3153 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3154 i = find_utf8_constant (&state->cpool, Exceptions_node);
3155 PUT2 (i); /* attribute_name_index */
3156 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3157 i = throws_count; PUT2 (i);
3158 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3159 {
3160 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3161 PUT2 (i);
3162 }
3163 }
3164 methods_count++;
3165 current_function_decl = save_function;
3166 }
3167 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3168
3169 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3170 for (s = source_file; ; s++)
3171 {
3172 char ch = *s;
3173 if (ch == '\0')
3174 break;
3175 if (ch == '/' || ch == '\\')
3176 source_file = s+1;
3177 }
3178 ptr = append_chunk (NULL, 10, state);
3179
3180 i = 1; /* Source file always exists as an attribute */
3181 if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3182 i++;
3183 if (clas == object_type_node)
3184 i++;
3185 PUT2 (i); /* attributes_count */
3186
3187 /* generate the SourceFile attribute. */
3188 if (SourceFile_node == NULL_TREE)
3189 {
3190 SourceFile_node = get_identifier ("SourceFile");
3191 }
3192
3193 i = find_utf8_constant (&state->cpool, SourceFile_node);
3194 PUT2 (i); /* attribute_name_index */
3195 PUT4 (2);
3196 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3197 PUT2 (i);
3198 append_gcj_attribute (state, clas);
3199 append_innerclasses_attribute (state, clas);
3200
3201 /* New finally generate the contents of the constant pool chunk. */
3202 i = count_constant_pool_bytes (&state->cpool);
3203 ptr = obstack_alloc (state->chunk_obstack, i);
3204 cpool_chunk->data = ptr;
3205 cpool_chunk->size = i;
3206 write_constant_pool (&state->cpool, ptr, i);
3207 return state->first;
3208 }
3209
3210 static GTY(()) tree Synthetic_node;
3211 static unsigned char *
3212 append_synthetic_attribute (state)
3213 struct jcf_partial *state;
3214 {
3215 unsigned char *ptr = append_chunk (NULL, 6, state);
3216 int i;
3217
3218 if (Synthetic_node == NULL_TREE)
3219 {
3220 Synthetic_node = get_identifier ("Synthetic");
3221 }
3222 i = find_utf8_constant (&state->cpool, Synthetic_node);
3223 PUT2 (i); /* Attribute string index */
3224 PUT4 (0); /* Attribute length */
3225
3226 return ptr;
3227 }
3228
3229 static void
3230 append_gcj_attribute (state, class)
3231 struct jcf_partial *state;
3232 tree class;
3233 {
3234 unsigned char *ptr;
3235 int i;
3236
3237 if (class != object_type_node)
3238 return;
3239
3240 ptr = append_chunk (NULL, 6, state); /* 2+4 */
3241 i = find_utf8_constant (&state->cpool,
3242 get_identifier ("gnu.gcj.gcj-compiled"));
3243 PUT2 (i); /* Attribute string index */
3244 PUT4 (0); /* Attribute length */
3245 }
3246
3247 static tree InnerClasses_node;
3248 static void
3249 append_innerclasses_attribute (state, class)
3250 struct jcf_partial *state;
3251 tree class;
3252 {
3253 tree orig_decl = TYPE_NAME (class);
3254 tree current, decl;
3255 int length = 0, i;
3256 unsigned char *ptr, *length_marker, *number_marker;
3257
3258 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3259 return;
3260
3261 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3262
3263 if (InnerClasses_node == NULL_TREE)
3264 {
3265 InnerClasses_node = get_identifier ("InnerClasses");
3266 }
3267 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3268 PUT2 (i);
3269 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3270 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3271
3272 /* Generate the entries: all inner classes visible from the one we
3273 process: itself, up and down. */
3274 while (class && INNER_CLASS_TYPE_P (class))
3275 {
3276 const char *n;
3277
3278 decl = TYPE_NAME (class);
3279 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3280 IDENTIFIER_LENGTH (DECL_NAME (decl));
3281
3282 while (n[-1] != '$')
3283 n--;
3284 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3285 length++;
3286
3287 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3288 }
3289
3290 decl = orig_decl;
3291 for (current = DECL_INNER_CLASS_LIST (decl);
3292 current; current = TREE_CHAIN (current))
3293 {
3294 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3295 TREE_VALUE (current));
3296 length++;
3297 }
3298
3299 ptr = length_marker; PUT4 (8*length+2);
3300 ptr = number_marker; PUT2 (length);
3301 }
3302
3303 static void
3304 append_innerclasses_attribute_entry (state, decl, name)
3305 struct jcf_partial *state;
3306 tree decl, name;
3307 {
3308 int icii, icaf;
3309 int ocii = 0, ini = 0;
3310 unsigned char *ptr = append_chunk (NULL, 8, state);
3311
3312 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3313
3314 /* Sun's implementation seems to generate ocii to 0 for inner
3315 classes (which aren't considered members of the class they're
3316 in.) The specs are saying that if the class is anonymous,
3317 inner_name_index must be zero. */
3318 if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3319 {
3320 ocii = find_class_constant (&state->cpool,
3321 TREE_TYPE (DECL_CONTEXT (decl)));
3322 ini = find_utf8_constant (&state->cpool, name);
3323 }
3324 icaf = get_access_flags (decl);
3325
3326 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3327 }
3328
3329 static char *
3330 make_class_file_name (clas)
3331 tree clas;
3332 {
3333 const char *dname, *cname, *slash;
3334 char *r;
3335 struct stat sb;
3336
3337 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3338 "", '.', DIR_SEPARATOR,
3339 ".class"));
3340 if (jcf_write_base_directory == NULL)
3341 {
3342 /* Make sure we put the class file into the .java file's
3343 directory, and not into some subdirectory thereof. */
3344 char *t;
3345 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3346 slash = strrchr (dname, DIR_SEPARATOR);
3347 if (! slash)
3348 {
3349 dname = ".";
3350 slash = dname + 1;
3351 }
3352 t = strrchr (cname, DIR_SEPARATOR);
3353 if (t)
3354 cname = t + 1;
3355 }
3356 else
3357 {
3358 dname = jcf_write_base_directory;
3359 slash = dname + strlen (dname);
3360 }
3361
3362 r = xmalloc (slash - dname + strlen (cname) + 2);
3363 strncpy (r, dname, slash - dname);
3364 r[slash - dname] = DIR_SEPARATOR;
3365 strcpy (&r[slash - dname + 1], cname);
3366
3367 /* We try to make new directories when we need them. We only do
3368 this for directories which "might not" exist. For instance, we
3369 assume the `-d' directory exists, but we don't assume that any
3370 subdirectory below it exists. It might be worthwhile to keep
3371 track of which directories we've created to avoid gratuitous
3372 stat()s. */
3373 dname = r + (slash - dname) + 1;
3374 while (1)
3375 {
3376 char *s = strchr (dname, DIR_SEPARATOR);
3377 if (s == NULL)
3378 break;
3379 *s = '\0';
3380 if (stat (r, &sb) == -1
3381 /* Try to make it. */
3382 && mkdir (r, 0755) == -1)
3383 fatal_io_error ("can't create directory %s", r);
3384
3385 *s = DIR_SEPARATOR;
3386 /* Skip consecutive separators. */
3387 for (dname = s + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3388 ;
3389 }
3390
3391 return r;
3392 }
3393
3394 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3395 The output .class file name is make_class_file_name(CLAS). */
3396
3397 void
3398 write_classfile (clas)
3399 tree clas;
3400 {
3401 struct obstack *work = &temporary_obstack;
3402 struct jcf_partial state[1];
3403 char *class_file_name = make_class_file_name (clas);
3404 struct chunk *chunks;
3405
3406 if (class_file_name != NULL)
3407 {
3408 FILE *stream;
3409 char *temporary_file_name;
3410
3411 /* The .class file is initially written to a ".tmp" file so that
3412 if multiple instances of the compiler are running at once
3413 they do not see partially formed class files. */
3414 temporary_file_name = concat (class_file_name, ".tmp", NULL);
3415 stream = fopen (temporary_file_name, "wb");
3416 if (stream == NULL)
3417 fatal_io_error ("can't open %s for writing", temporary_file_name);
3418
3419 jcf_dependency_add_target (class_file_name);
3420 init_jcf_state (state, work);
3421 chunks = generate_classfile (clas, state);
3422 write_chunks (stream, chunks);
3423 if (fclose (stream))
3424 fatal_io_error ("error closing %s", temporary_file_name);
3425
3426 /* If a file named by the string pointed to by `new' exists
3427 prior to the call to the `rename' function, the bahaviour
3428 is implementation-defined. ISO 9899-1990 7.9.4.2.
3429
3430 For example, on Win32 with MSVCRT, it is an error. */
3431
3432 unlink (class_file_name);
3433
3434 if (rename (temporary_file_name, class_file_name) == -1)
3435 {
3436 remove (temporary_file_name);
3437 fatal_io_error ("can't create %s", class_file_name);
3438 }
3439 free (temporary_file_name);
3440 free (class_file_name);
3441 }
3442 release_jcf_state (state);
3443 }
3444
3445 /* TODO:
3446 string concatenation
3447 synchronized statement
3448 */
3449
3450 #include "gt-java-jcf-write.h"