1dcebae43cfa27e83bb1981fdceb7fb40c94fdcf
[gcc.git] / gcc / java / jcf-write.c
1 /* Write out a Java(TM) class file.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING. If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "jcf.h"
27 #include "tree.h"
28 #include "java-tree.h"
29 #include "obstack.h"
30 #undef AND
31 #include "rtl.h"
32 #include "flags.h"
33 #include "java-opcodes.h"
34 #include "parse.h" /* for BLOCK_EXPR_BODY */
35 #include "buffer.h"
36 #include "toplev.h"
37
38 #ifndef DIR_SEPARATOR
39 #define DIR_SEPARATOR '/'
40 #endif
41
42 extern struct obstack temporary_obstack;
43
44 /* Base directory in which `.class' files should be written.
45 NULL means to put the file into the same directory as the
46 corresponding .java file. */
47 char *jcf_write_base_directory = NULL;
48
49 /* Make sure bytecode.data is big enough for at least N more bytes. */
50
51 #define RESERVE(N) \
52 do { CHECK_OP(state); \
53 if (state->bytecode.ptr + (N) > state->bytecode.limit) \
54 buffer_grow (&state->bytecode, N); } while (0)
55
56 /* Add a 1-byte instruction/operand I to bytecode.data,
57 assuming space has already been RESERVE'd. */
58
59 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
60
61 /* Like OP1, but I is a 2-byte big endian integer. */
62
63 #define OP2(I) \
64 do { int _i = (I); OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
65
66 /* Like OP1, but I is a 4-byte big endian integer. */
67
68 #define OP4(I) \
69 do { int _i = (I); OP1 (_i >> 24); OP1 (_i >> 16); \
70 OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
71
72 /* Macro to call each time we push I words on the JVM stack. */
73
74 #define NOTE_PUSH(I) \
75 do { state->code_SP += (I); \
76 if (state->code_SP > state->code_SP_max) \
77 state->code_SP_max = state->code_SP; } while (0)
78
79 /* Macro to call each time we pop I words from the JVM stack. */
80
81 #define NOTE_POP(I) \
82 do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
83
84 /* A chunk or segment of a .class file. */
85
86 struct chunk
87 {
88 /* The next segment of this .class file. */
89 struct chunk *next;
90
91 /* The actual data in this segment to be written to the .class file. */
92 unsigned char *data;
93
94 /* The size of the segment to be written to the .class file. */
95 int size;
96 };
97
98 #define PENDING_CLEANUP_PC (-3)
99 #define PENDING_EXIT_PC (-2)
100 #define UNDEFINED_PC (-1)
101
102 /* Each "block" represents a label plus the bytecode instructions following.
103 There may be branches out of the block, but no incoming jumps, except
104 to the beginning of the block.
105
106 If (pc < 0), the jcf_block is not an actual block (i.e. it has no
107 assocated code yet), but it is an undefined label.
108 */
109
110 struct jcf_block
111 {
112 /* For blocks that that are defined, the next block (in pc order).
113 For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
114 or a cleanup expression (from a WITH_CLEANUP_EXPR),
115 this is the next (outer) such end label, in a stack headed by
116 labeled_blocks in jcf_partial. */
117 struct jcf_block *next;
118
119 /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
120 pc is PENDING_EXIT_PC.
121 In the not-yet-defined end label for pending cleanup subroutine,
122 pc is PENDING_CLEANUP_PC.
123 For other not-yet-defined labels, pc is UNDEFINED_PC.
124
125 If the label has been defined:
126 Until perform_relocations is finished, this is the maximum possible
127 value of the bytecode offset at the begnning of this block.
128 After perform_relocations, it is the actual offset (pc). */
129 int pc;
130
131 int linenumber;
132
133 /* After finish_jcf_block is called, The actual instructions contained in this block.
134 Before than NULL, and the instructions are in state->bytecode. */
135 union {
136 struct chunk *chunk;
137
138 /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
139 coveed by the cleanup. */
140 struct jcf_block *start_label;
141 } v;
142
143 union {
144 /* Set of relocations (in reverse offset order) for this block. */
145 struct jcf_relocation *relocations;
146
147 /* If this block is that of the not-yet-defined end label of
148 a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
149 If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
150 tree labeled_block;
151 } u;
152 };
153
154 /* A "relocation" type for the 0-3 bytes of padding at the start
155 of a tableswitch or a lookupswitch. */
156 #define SWITCH_ALIGN_RELOC 4
157
158 /* A relocation type for the labels in a tableswitch or a lookupswitch;
159 these are relative to the start of the instruction, but (due to
160 th 0-3 bytes of padding), we don't know the offset before relocation. */
161 #define BLOCK_START_RELOC 1
162
163 struct jcf_relocation
164 {
165 /* Next relocation for the current jcf_block. */
166 struct jcf_relocation *next;
167
168 /* The (byte) offset within the current block that needs to be relocated. */
169 HOST_WIDE_INT offset;
170
171 /* 0 if offset is a 4-byte relative offset.
172 4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
173 for proper alignment in tableswitch/lookupswitch instructions.
174 1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
175 to the start of the containing block.
176 -1 if offset is a 2-byte relative offset.
177 < -1 if offset is the address of an instruction with a 2-byte offset
178 that does not have a corresponding 4-byte offset version, in which
179 case the absolute value of kind is the inverted opcode.
180 > 4 if offset is the address of an instruction (such as jsr) with a
181 2-byte offset that does have a corresponding 4-byte offset version,
182 in which case kind is the opcode of the 4-byte version (such as jsr_w). */
183 int kind;
184
185 /* The label the relocation wants to actually transfer to. */
186 struct jcf_block *label;
187 };
188
189 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
190 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
191
192 /* State for single catch clause. */
193
194 struct jcf_handler
195 {
196 struct jcf_handler *next;
197
198 struct jcf_block *start_label;
199 struct jcf_block *end_label;
200 struct jcf_block *handler_label;
201
202 /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
203 tree type;
204 };
205
206 /* State for the current switch statement. */
207
208 struct jcf_switch_state
209 {
210 struct jcf_switch_state *prev;
211 struct jcf_block *default_label;
212
213 struct jcf_relocation *cases;
214 int num_cases;
215 HOST_WIDE_INT min_case, max_case;
216 };
217
218 /* This structure is used to contain the various pieces that will
219 become a .class file. */
220
221 struct jcf_partial
222 {
223 struct chunk *first;
224 struct chunk *chunk;
225 struct obstack *chunk_obstack;
226 tree current_method;
227
228 /* List of basic blocks for the current method. */
229 struct jcf_block *blocks;
230 struct jcf_block *last_block;
231
232 struct localvar_info *first_lvar;
233 struct localvar_info *last_lvar;
234 int lvar_count;
235
236 CPool cpool;
237
238 int linenumber_count;
239
240 /* Until perform_relocations, this is a upper bound on the number
241 of bytes (so far) in the instructions for the current method. */
242 int code_length;
243
244 /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
245 struct jcf_block *labeled_blocks;
246
247 /* The current stack size (stack pointer) in the current method. */
248 int code_SP;
249
250 /* The largest extent of stack size (stack pointer) in the current method. */
251 int code_SP_max;
252
253 /* Contains a mapping from local var slot number to localvar_info. */
254 struct buffer localvars;
255
256 /* The buffer allocated for bytecode for the current jcf_block. */
257 struct buffer bytecode;
258
259 /* Chain of exception handlers for the current method. */
260 struct jcf_handler *handlers;
261
262 /* Last element in handlers chain. */
263 struct jcf_handler *last_handler;
264
265 /* Number of exception handlers for the current method. */
266 int num_handlers;
267
268 /* Number of finalizers we are currently nested within. */
269 int num_finalizers;
270
271 /* If non-NULL, use this for the return value. */
272 tree return_value_decl;
273
274 /* Information about the current switch statemenet. */
275 struct jcf_switch_state *sw_state;
276 };
277
278 static void generate_bytecode_insns PARAMS ((tree, int, struct jcf_partial *));
279 static struct chunk * alloc_chunk PARAMS ((struct chunk *, unsigned char *,
280 int, struct obstack *));
281 static unsigned char * append_chunk PARAMS ((unsigned char *, int,
282 struct jcf_partial *));
283 static void append_chunk_copy PARAMS ((unsigned char *, int,
284 struct jcf_partial *));
285 static struct jcf_block * gen_jcf_label PARAMS ((struct jcf_partial *));
286 static void finish_jcf_block PARAMS ((struct jcf_partial *));
287 static void define_jcf_label PARAMS ((struct jcf_block *,
288 struct jcf_partial *));
289 static struct jcf_block * get_jcf_label_here PARAMS ((struct jcf_partial *));
290 static void put_linenumber PARAMS ((int, struct jcf_partial *));
291 static void localvar_alloc PARAMS ((tree, struct jcf_partial *));
292 static void localvar_free PARAMS ((tree, struct jcf_partial *));
293 static int get_access_flags PARAMS ((tree));
294 static void write_chunks PARAMS ((FILE *, struct chunk *));
295 static int adjust_typed_op PARAMS ((tree, int));
296 static void generate_bytecode_conditional PARAMS ((tree, struct jcf_block *,
297 struct jcf_block *, int,
298 struct jcf_partial *));
299 static void generate_bytecode_return PARAMS ((tree, struct jcf_partial *));
300 static void perform_relocations PARAMS ((struct jcf_partial *));
301 static void init_jcf_state PARAMS ((struct jcf_partial *, struct obstack *));
302 static void init_jcf_method PARAMS ((struct jcf_partial *, tree));
303 static void release_jcf_state PARAMS ((struct jcf_partial *));
304 static struct chunk * generate_classfile PARAMS ((tree, struct jcf_partial *));
305 static struct jcf_handler *alloc_handler PARAMS ((struct jcf_block *,
306 struct jcf_block *,
307 struct jcf_partial *));
308 static void emit_iinc PARAMS ((tree, HOST_WIDE_INT, struct jcf_partial *));
309 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
310 struct jcf_partial *));
311 static void push_constant1 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
312 static void push_constant2 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
313 static void push_int_const PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
314 static int find_constant_wide PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
315 struct jcf_partial *));
316 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
317 struct jcf_partial *));
318 static int find_constant_index PARAMS ((tree, struct jcf_partial *));
319 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
320 struct jcf_partial *));
321 static void field_op PARAMS ((tree, int, struct jcf_partial *));
322 static void maybe_wide PARAMS ((int, int, struct jcf_partial *));
323 static void emit_dup PARAMS ((int, int, struct jcf_partial *));
324 static void emit_pop PARAMS ((int, struct jcf_partial *));
325 static void emit_load_or_store PARAMS ((tree, int, struct jcf_partial *));
326 static void emit_load PARAMS ((tree, struct jcf_partial *));
327 static void emit_store PARAMS ((tree, struct jcf_partial *));
328 static void emit_unop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
329 static void emit_binop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
330 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
331 struct jcf_partial *));
332 static void emit_switch_reloc PARAMS ((struct jcf_block *,
333 struct jcf_partial *));
334 static void emit_case_reloc PARAMS ((struct jcf_relocation *,
335 struct jcf_partial *));
336 static void emit_if PARAMS ((struct jcf_block *, int, int,
337 struct jcf_partial *));
338 static void emit_goto PARAMS ((struct jcf_block *, struct jcf_partial *));
339 static void emit_jsr PARAMS ((struct jcf_block *, struct jcf_partial *));
340 static void call_cleanups PARAMS ((struct jcf_block *, struct jcf_partial *));
341 static char *make_class_file_name PARAMS ((tree));
342 static unsigned char *append_synthetic_attribute PARAMS ((struct jcf_partial *));
343 static void append_innerclasses_attribute PARAMS ((struct jcf_partial *, tree));
344 static void append_innerclasses_attribute_entry PARAMS ((struct jcf_partial *, tree, tree));
345
346 /* Utility macros for appending (big-endian) data to a buffer.
347 We assume a local variable 'ptr' points into where we want to
348 write next, and we assume enoygh space has been allocated. */
349
350 #ifdef ENABLE_CHECKING
351 int
352 CHECK_PUT(ptr, state, i)
353 void *ptr;
354 struct jcf_partial *state;
355 int i;
356 {
357 if (ptr < state->chunk->data
358 || (char*)ptr + i > state->chunk->data + state->chunk->size)
359 fatal ("internal error - CHECK_PUT failed");
360 return 0;
361 }
362 #else
363 #define CHECK_PUT(PTR, STATE, I) ((void)0)
364 #endif
365
366 #define PUT1(X) (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
367 #define PUT2(X) (PUT1((X) >> 8), PUT1((X) & 0xFF))
368 #define PUT4(X) (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
369 #define PUTN(P, N) (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
370
371 /* There are some cases below where CHECK_PUT is guaranteed to fail.
372 Use the following macros in those specific cases. */
373 #define UNSAFE_PUT1(X) (*ptr++ = (X))
374 #define UNSAFE_PUT2(X) (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
375 #define UNSAFE_PUT4(X) (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
376 #define UNSAFE_PUTN(P, N) (memcpy(ptr, P, N), ptr += (N))
377
378 \f
379 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
380 Set the data and size fields to DATA and SIZE, respectively.
381 However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
382
383 static struct chunk *
384 alloc_chunk (last, data, size, work)
385 struct chunk *last;
386 unsigned char *data;
387 int size;
388 struct obstack *work;
389 {
390 struct chunk *chunk = (struct chunk *)
391 obstack_alloc (work, sizeof(struct chunk));
392
393 if (data == NULL && size > 0)
394 data = obstack_alloc (work, size);
395
396 chunk->next = NULL;
397 chunk->data = data;
398 chunk->size = size;
399 if (last != NULL)
400 last->next = chunk;
401 return chunk;
402 }
403
404 #ifdef ENABLE_CHECKING
405 int
406 CHECK_OP(struct jcf_partial *state)
407 {
408 if (state->bytecode.ptr > state->bytecode.limit)
409 {
410 fatal("internal error - CHECK_OP failed");
411 }
412 return 0;
413 }
414 #else
415 #define CHECK_OP(STATE) ((void)0)
416 #endif
417
418 static unsigned char *
419 append_chunk (data, size, state)
420 unsigned char *data;
421 int size;
422 struct jcf_partial *state;
423 {
424 state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
425 if (state->first == NULL)
426 state->first = state->chunk;
427 return state->chunk->data;
428 }
429
430 static void
431 append_chunk_copy (data, size, state)
432 unsigned char *data;
433 int size;
434 struct jcf_partial *state;
435 {
436 unsigned char *ptr = append_chunk (NULL, size, state);
437 memcpy (ptr, data, size);
438 }
439 \f
440 static struct jcf_block *
441 gen_jcf_label (state)
442 struct jcf_partial *state;
443 {
444 struct jcf_block *block = (struct jcf_block *)
445 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
446 block->next = NULL;
447 block->linenumber = -1;
448 block->pc = UNDEFINED_PC;
449 return block;
450 }
451
452 static void
453 finish_jcf_block (state)
454 struct jcf_partial *state;
455 {
456 struct jcf_block *block = state->last_block;
457 struct jcf_relocation *reloc;
458 int code_length = BUFFER_LENGTH (&state->bytecode);
459 int pc = state->code_length;
460 append_chunk_copy (state->bytecode.data, code_length, state);
461 BUFFER_RESET (&state->bytecode);
462 block->v.chunk = state->chunk;
463
464 /* Calculate code_length to the maximum value it can have. */
465 pc += block->v.chunk->size;
466 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
467 {
468 int kind = reloc->kind;
469 if (kind == SWITCH_ALIGN_RELOC)
470 pc += 3;
471 else if (kind > BLOCK_START_RELOC)
472 pc += 2; /* 2-byte offset may grow to 4-byte offset */
473 else if (kind < -1)
474 pc += 5; /* May need to add a goto_w. */
475 }
476 state->code_length = pc;
477 }
478
479 static void
480 define_jcf_label (label, state)
481 struct jcf_block *label;
482 struct jcf_partial *state;
483 {
484 if (state->last_block != NULL)
485 finish_jcf_block (state);
486 label->pc = state->code_length;
487 if (state->blocks == NULL)
488 state->blocks = label;
489 else
490 state->last_block->next = label;
491 state->last_block = label;
492 label->next = NULL;
493 label->u.relocations = NULL;
494 }
495
496 static struct jcf_block *
497 get_jcf_label_here (state)
498 struct jcf_partial *state;
499 {
500 if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
501 return state->last_block;
502 else
503 {
504 struct jcf_block *label = gen_jcf_label (state);
505 define_jcf_label (label, state);
506 return label;
507 }
508 }
509
510 /* Note a line number entry for the current PC and given LINE. */
511
512 static void
513 put_linenumber (line, state)
514 int line;
515 struct jcf_partial *state;
516 {
517 struct jcf_block *label = get_jcf_label_here (state);
518 if (label->linenumber > 0)
519 {
520 label = gen_jcf_label (state);
521 define_jcf_label (label, state);
522 }
523 label->linenumber = line;
524 state->linenumber_count++;
525 }
526
527 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
528 in the range (START_LABEL, END_LABEL). */
529
530 static struct jcf_handler *
531 alloc_handler (start_label, end_label, state)
532 struct jcf_block *start_label;
533 struct jcf_block *end_label;
534 struct jcf_partial *state;
535 {
536 struct jcf_handler *handler = (struct jcf_handler *)
537 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
538 handler->start_label = start_label;
539 handler->end_label = end_label;
540 handler->handler_label = get_jcf_label_here (state);
541 if (state->handlers == NULL)
542 state->handlers = handler;
543 else
544 state->last_handler->next = handler;
545 state->last_handler = handler;
546 handler->next = NULL;
547 state->num_handlers++;
548 return handler;
549 }
550
551 \f
552 /* The index of jvm local variable allocated for this DECL.
553 This is assigned when generating .class files;
554 contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
555 (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
556
557 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
558
559 struct localvar_info
560 {
561 struct localvar_info *next;
562
563 tree decl;
564 struct jcf_block *start_label;
565 struct jcf_block *end_label;
566 };
567
568 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
569 #define localvar_max \
570 ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
571
572 static void
573 localvar_alloc (decl, state)
574 tree decl;
575 struct jcf_partial *state;
576 {
577 struct jcf_block *start_label = get_jcf_label_here (state);
578 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
579 int index;
580 register struct localvar_info *info;
581 register struct localvar_info **ptr = localvar_buffer;
582 register struct localvar_info **limit
583 = (struct localvar_info**) state->localvars.ptr;
584 for (index = 0; ptr < limit; index++, ptr++)
585 {
586 if (ptr[0] == NULL
587 && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
588 break;
589 }
590 if (ptr == limit)
591 {
592 buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
593 ptr = (struct localvar_info**) state->localvars.data + index;
594 state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
595 }
596 info = (struct localvar_info *)
597 obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
598 ptr[0] = info;
599 if (wide)
600 ptr[1] = (struct localvar_info *)(~0);
601 DECL_LOCAL_INDEX (decl) = index;
602 info->decl = decl;
603 info->start_label = start_label;
604
605 if (debug_info_level > DINFO_LEVEL_TERSE
606 && DECL_NAME (decl) != NULL_TREE)
607 {
608 /* Generate debugging info. */
609 info->next = NULL;
610 if (state->last_lvar != NULL)
611 state->last_lvar->next = info;
612 else
613 state->first_lvar = info;
614 state->last_lvar = info;
615 state->lvar_count++;
616 }
617 }
618
619 static void
620 localvar_free (decl, state)
621 tree decl;
622 struct jcf_partial *state;
623 {
624 struct jcf_block *end_label = get_jcf_label_here (state);
625 int index = DECL_LOCAL_INDEX (decl);
626 register struct localvar_info **ptr = &localvar_buffer [index];
627 register struct localvar_info *info = *ptr;
628 int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
629
630 info->end_label = end_label;
631
632 if (info->decl != decl)
633 abort ();
634 ptr[0] = NULL;
635 if (wide)
636 {
637 if (ptr[1] != (struct localvar_info *)(~0))
638 abort ();
639 ptr[1] = NULL;
640 }
641 }
642
643 \f
644 #define STACK_TARGET 1
645 #define IGNORE_TARGET 2
646
647 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
648 a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
649
650 static int
651 get_access_flags (decl)
652 tree decl;
653 {
654 int flags = 0;
655 int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
656 if (CLASS_PUBLIC (decl)) /* same as FIELD_PUBLIC and METHOD_PUBLIC */
657 flags |= ACC_PUBLIC;
658 if (CLASS_FINAL (decl)) /* same as FIELD_FINAL and METHOD_FINAL */
659 flags |= ACC_FINAL;
660 if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
661 {
662 if (TREE_PROTECTED (decl))
663 flags |= ACC_PROTECTED;
664 if (TREE_PRIVATE (decl))
665 flags |= ACC_PRIVATE;
666 }
667 else if (TREE_CODE (decl) == TYPE_DECL)
668 {
669 if (CLASS_SUPER (decl))
670 flags |= ACC_SUPER;
671 if (CLASS_ABSTRACT (decl))
672 flags |= ACC_ABSTRACT;
673 if (CLASS_INTERFACE (decl))
674 flags |= ACC_INTERFACE;
675 if (CLASS_STATIC (decl))
676 flags |= ACC_STATIC;
677 if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
678 || LOCAL_CLASS_P (TREE_TYPE (decl)))
679 flags |= ACC_PRIVATE;
680 }
681 else
682 fatal ("internal error - bad argument to get_access_flags");
683 if (TREE_CODE (decl) == FUNCTION_DECL)
684 {
685 if (METHOD_NATIVE (decl))
686 flags |= ACC_NATIVE;
687 if (METHOD_STATIC (decl))
688 flags |= ACC_STATIC;
689 if (METHOD_SYNCHRONIZED (decl))
690 flags |= ACC_SYNCHRONIZED;
691 if (METHOD_ABSTRACT (decl))
692 flags |= ACC_ABSTRACT;
693 }
694 if (isfield)
695 {
696 if (FIELD_STATIC (decl))
697 flags |= ACC_STATIC;
698 if (FIELD_VOLATILE (decl))
699 flags |= ACC_VOLATILE;
700 if (FIELD_TRANSIENT (decl))
701 flags |= ACC_TRANSIENT;
702 }
703 return flags;
704 }
705
706 /* Write the list of segments starting at CHUNKS to STREAM. */
707
708 static void
709 write_chunks (stream, chunks)
710 FILE* stream;
711 struct chunk *chunks;
712 {
713 for (; chunks != NULL; chunks = chunks->next)
714 fwrite (chunks->data, chunks->size, 1, stream);
715 }
716
717 /* Push a 1-word constant in the constant pool at the given INDEX.
718 (Caller is responsible for doing NOTE_PUSH.) */
719
720 static void
721 push_constant1 (index, state)
722 HOST_WIDE_INT index;
723 struct jcf_partial *state;
724 {
725 RESERVE (3);
726 if (index < 256)
727 {
728 OP1 (OPCODE_ldc);
729 OP1 (index);
730 }
731 else
732 {
733 OP1 (OPCODE_ldc_w);
734 OP2 (index);
735 }
736 }
737
738 /* Push a 2-word constant in the constant pool at the given INDEX.
739 (Caller is responsible for doing NOTE_PUSH.) */
740
741 static void
742 push_constant2 (index, state)
743 HOST_WIDE_INT index;
744 struct jcf_partial *state;
745 {
746 RESERVE (3);
747 OP1 (OPCODE_ldc2_w);
748 OP2 (index);
749 }
750
751 /* Push 32-bit integer constant on VM stack.
752 Caller is responsible for doing NOTE_PUSH. */
753
754 static void
755 push_int_const (i, state)
756 HOST_WIDE_INT i;
757 struct jcf_partial *state;
758 {
759 RESERVE(3);
760 if (i >= -1 && i <= 5)
761 OP1(OPCODE_iconst_0 + i);
762 else if (i >= -128 && i < 128)
763 {
764 OP1(OPCODE_bipush);
765 OP1(i);
766 }
767 else if (i >= -32768 && i < 32768)
768 {
769 OP1(OPCODE_sipush);
770 OP2(i);
771 }
772 else
773 {
774 i = find_constant1 (&state->cpool, CONSTANT_Integer,
775 (jword)(i & 0xFFFFFFFF));
776 push_constant1 (i, state);
777 }
778 }
779
780 static int
781 find_constant_wide (lo, hi, state)
782 HOST_WIDE_INT lo, hi;
783 struct jcf_partial *state;
784 {
785 HOST_WIDE_INT w1, w2;
786 lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
787 return find_constant2 (&state->cpool, CONSTANT_Long,
788 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
789 }
790
791 /* Find or allocate a constant pool entry for the given VALUE.
792 Return the index in the constant pool. */
793
794 static int
795 find_constant_index (value, state)
796 tree value;
797 struct jcf_partial *state;
798 {
799 if (TREE_CODE (value) == INTEGER_CST)
800 {
801 if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
802 return find_constant1 (&state->cpool, CONSTANT_Integer,
803 (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
804 else
805 return find_constant_wide (TREE_INT_CST_LOW (value),
806 TREE_INT_CST_HIGH (value), state);
807 }
808 else if (TREE_CODE (value) == REAL_CST)
809 {
810 long words[2];
811 if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
812 {
813 words[0] = etarsingle (TREE_REAL_CST (value)) & 0xFFFFFFFF;
814 return find_constant1 (&state->cpool, CONSTANT_Float,
815 (jword)words[0]);
816 }
817 else
818 {
819 etardouble (TREE_REAL_CST (value), words);
820 return find_constant2 (&state->cpool, CONSTANT_Double,
821 (jword)(words[1-FLOAT_WORDS_BIG_ENDIAN] &
822 0xFFFFFFFF),
823 (jword)(words[FLOAT_WORDS_BIG_ENDIAN] &
824 0xFFFFFFFF));
825 }
826 }
827 else if (TREE_CODE (value) == STRING_CST)
828 {
829 return find_string_constant (&state->cpool, value);
830 }
831 else
832 fatal ("find_constant_index - bad type");
833 }
834
835 /* Push 64-bit long constant on VM stack.
836 Caller is responsible for doing NOTE_PUSH. */
837
838 static void
839 push_long_const (lo, hi, state)
840 HOST_WIDE_INT lo, hi;
841 struct jcf_partial *state;
842 {
843 if (hi == 0 && lo >= 0 && lo <= 1)
844 {
845 RESERVE(1);
846 OP1(OPCODE_lconst_0 + lo);
847 }
848 else if ((hi == 0 && (jword)(lo & 0xFFFFFFFF) < 32768)
849 || (hi == -1 && (jword)(lo & 0xFFFFFFFF) >= -32768))
850 {
851 push_int_const (lo, state);
852 RESERVE (1);
853 OP1 (OPCODE_i2l);
854 }
855 else
856 push_constant2 (find_constant_wide (lo, hi, state), state);
857 }
858
859 static void
860 field_op (field, opcode, state)
861 tree field;
862 int opcode;
863 struct jcf_partial *state;
864 {
865 int index = find_fieldref_index (&state->cpool, field);
866 RESERVE (3);
867 OP1 (opcode);
868 OP2 (index);
869 }
870
871 /* Returns an integer in the range 0 (for 'int') through 4 (for object
872 reference) to 7 (for 'short') which matches the pattern of how JVM
873 opcodes typically depend on the operand type. */
874
875 static int
876 adjust_typed_op (type, max)
877 tree type;
878 int max;
879 {
880 switch (TREE_CODE (type))
881 {
882 case POINTER_TYPE:
883 case RECORD_TYPE: return 4;
884 case BOOLEAN_TYPE:
885 return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
886 case CHAR_TYPE:
887 return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
888 case INTEGER_TYPE:
889 switch (TYPE_PRECISION (type))
890 {
891 case 8: return max < 5 ? 0 : 5;
892 case 16: return max < 7 ? 0 : 7;
893 case 32: return 0;
894 case 64: return 1;
895 }
896 break;
897 case REAL_TYPE:
898 switch (TYPE_PRECISION (type))
899 {
900 case 32: return 2;
901 case 64: return 3;
902 }
903 break;
904 default:
905 break;
906 }
907 abort ();
908 }
909
910 static void
911 maybe_wide (opcode, index, state)
912 int opcode, index;
913 struct jcf_partial *state;
914 {
915 if (index >= 256)
916 {
917 RESERVE (4);
918 OP1 (OPCODE_wide);
919 OP1 (opcode);
920 OP2 (index);
921 }
922 else
923 {
924 RESERVE (2);
925 OP1 (opcode);
926 OP1 (index);
927 }
928 }
929
930 /* Compile code to duplicate with offset, where
931 SIZE is the size of the stack item to duplicate (1 or 2), abd
932 OFFSET is where to insert the result (must be 0, 1, or 2).
933 (The new words get inserted at stack[SP-size-offset].) */
934
935 static void
936 emit_dup (size, offset, state)
937 int size, offset;
938 struct jcf_partial *state;
939 {
940 int kind;
941 if (size == 0)
942 return;
943 RESERVE(1);
944 if (offset == 0)
945 kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
946 else if (offset == 1)
947 kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
948 else if (offset == 2)
949 kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
950 else
951 abort();
952 OP1 (kind);
953 NOTE_PUSH (size);
954 }
955
956 static void
957 emit_pop (size, state)
958 int size;
959 struct jcf_partial *state;
960 {
961 RESERVE (1);
962 OP1 (OPCODE_pop - 1 + size);
963 }
964
965 static void
966 emit_iinc (var, value, state)
967 tree var;
968 HOST_WIDE_INT value;
969 struct jcf_partial *state;
970 {
971 int slot = DECL_LOCAL_INDEX (var);
972
973 if (value < -128 || value > 127 || slot >= 256)
974 {
975 RESERVE (6);
976 OP1 (OPCODE_wide);
977 OP1 (OPCODE_iinc);
978 OP2 (slot);
979 OP2 (value);
980 }
981 else
982 {
983 RESERVE (3);
984 OP1 (OPCODE_iinc);
985 OP1 (slot);
986 OP1 (value);
987 }
988 }
989
990 static void
991 emit_load_or_store (var, opcode, state)
992 tree var; /* Variable to load from or store into. */
993 int opcode; /* Either OPCODE_iload or OPCODE_istore. */
994 struct jcf_partial *state;
995 {
996 tree type = TREE_TYPE (var);
997 int kind = adjust_typed_op (type, 4);
998 int index = DECL_LOCAL_INDEX (var);
999 if (index <= 3)
1000 {
1001 RESERVE (1);
1002 OP1 (opcode + 5 + 4 * kind + index); /* [ilfda]{load,store}_[0123] */
1003 }
1004 else
1005 maybe_wide (opcode + kind, index, state); /* [ilfda]{load,store} */
1006 }
1007
1008 static void
1009 emit_load (var, state)
1010 tree var;
1011 struct jcf_partial *state;
1012 {
1013 emit_load_or_store (var, OPCODE_iload, state);
1014 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1015 }
1016
1017 static void
1018 emit_store (var, state)
1019 tree var;
1020 struct jcf_partial *state;
1021 {
1022 emit_load_or_store (var, OPCODE_istore, state);
1023 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1024 }
1025
1026 static void
1027 emit_unop (opcode, type, state)
1028 enum java_opcode opcode;
1029 tree type ATTRIBUTE_UNUSED;
1030 struct jcf_partial *state;
1031 {
1032 RESERVE(1);
1033 OP1 (opcode);
1034 }
1035
1036 static void
1037 emit_binop (opcode, type, state)
1038 enum java_opcode opcode;
1039 tree type;
1040 struct jcf_partial *state;
1041 {
1042 int size = TYPE_IS_WIDE (type) ? 2 : 1;
1043 RESERVE(1);
1044 OP1 (opcode);
1045 NOTE_POP (size);
1046 }
1047
1048 static void
1049 emit_reloc (value, kind, target, state)
1050 HOST_WIDE_INT value;
1051 int kind;
1052 struct jcf_block *target;
1053 struct jcf_partial *state;
1054 {
1055 struct jcf_relocation *reloc = (struct jcf_relocation *)
1056 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1057 struct jcf_block *block = state->last_block;
1058 reloc->next = block->u.relocations;
1059 block->u.relocations = reloc;
1060 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1061 reloc->label = target;
1062 reloc->kind = kind;
1063 if (kind == 0 || kind == BLOCK_START_RELOC)
1064 OP4 (value);
1065 else if (kind != SWITCH_ALIGN_RELOC)
1066 OP2 (value);
1067 }
1068
1069 static void
1070 emit_switch_reloc (label, state)
1071 struct jcf_block *label;
1072 struct jcf_partial *state;
1073 {
1074 emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1075 }
1076
1077 /* Similar to emit_switch_reloc,
1078 but re-uses an existing case reloc. */
1079
1080 static void
1081 emit_case_reloc (reloc, state)
1082 struct jcf_relocation *reloc;
1083 struct jcf_partial *state;
1084 {
1085 struct jcf_block *block = state->last_block;
1086 reloc->next = block->u.relocations;
1087 block->u.relocations = reloc;
1088 reloc->offset = BUFFER_LENGTH (&state->bytecode);
1089 reloc->kind = BLOCK_START_RELOC;
1090 OP4 (0);
1091 }
1092
1093 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1094 The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1095
1096 static void
1097 emit_if (target, opcode, inv_opcode, state)
1098 struct jcf_block *target;
1099 int opcode, inv_opcode;
1100 struct jcf_partial *state;
1101 {
1102 RESERVE(3);
1103 OP1 (opcode);
1104 /* value is 1 byte from reloc back to start of instruction. */
1105 emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1106 }
1107
1108 static void
1109 emit_goto (target, state)
1110 struct jcf_block *target;
1111 struct jcf_partial *state;
1112 {
1113 RESERVE(3);
1114 OP1 (OPCODE_goto);
1115 /* Value is 1 byte from reloc back to start of instruction. */
1116 emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1117 }
1118
1119 static void
1120 emit_jsr (target, state)
1121 struct jcf_block *target;
1122 struct jcf_partial *state;
1123 {
1124 RESERVE(3);
1125 OP1 (OPCODE_jsr);
1126 /* Value is 1 byte from reloc back to start of instruction. */
1127 emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1128 }
1129
1130 /* Generate code to evaluate EXP. If the result is true,
1131 branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1132 TRUE_BRANCH_FIRST is a code geneation hint that the
1133 TRUE_LABEL may follow right after this. (The idea is that we
1134 may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1135
1136 static void
1137 generate_bytecode_conditional (exp, true_label, false_label,
1138 true_branch_first, state)
1139 tree exp;
1140 struct jcf_block *true_label;
1141 struct jcf_block *false_label;
1142 int true_branch_first;
1143 struct jcf_partial *state;
1144 {
1145 tree exp0, exp1, type;
1146 int save_SP = state->code_SP;
1147 enum java_opcode op, negop;
1148 switch (TREE_CODE (exp))
1149 {
1150 case INTEGER_CST:
1151 emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1152 break;
1153 case COND_EXPR:
1154 {
1155 struct jcf_block *then_label = gen_jcf_label (state);
1156 struct jcf_block *else_label = gen_jcf_label (state);
1157 int save_SP_before, save_SP_after;
1158 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1159 then_label, else_label, 1, state);
1160 define_jcf_label (then_label, state);
1161 save_SP_before = state->code_SP;
1162 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1163 true_label, false_label, 1, state);
1164 save_SP_after = state->code_SP;
1165 state->code_SP = save_SP_before;
1166 define_jcf_label (else_label, state);
1167 generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1168 true_label, false_label,
1169 true_branch_first, state);
1170 if (state->code_SP != save_SP_after)
1171 fatal ("internal error non-matching SP");
1172 }
1173 break;
1174 case TRUTH_NOT_EXPR:
1175 generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label, true_label,
1176 ! true_branch_first, state);
1177 break;
1178 case TRUTH_ANDIF_EXPR:
1179 {
1180 struct jcf_block *next_label = gen_jcf_label (state);
1181 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1182 next_label, false_label, 1, state);
1183 define_jcf_label (next_label, state);
1184 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1185 true_label, false_label, 1, state);
1186 }
1187 break;
1188 case TRUTH_ORIF_EXPR:
1189 {
1190 struct jcf_block *next_label = gen_jcf_label (state);
1191 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1192 true_label, next_label, 1, state);
1193 define_jcf_label (next_label, state);
1194 generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1195 true_label, false_label, 1, state);
1196 }
1197 break;
1198 compare_1:
1199 /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1200 set it to the corresponding 1-operand if<COND> instructions. */
1201 op = op - 6;
1202 /* FALLTHROUGH */
1203 compare_2:
1204 /* The opcodes with their inverses are allocated in pairs.
1205 E.g. The inverse of if_icmplt (161) is if_icmpge (162). */
1206 negop = (op & 1) ? op + 1 : op - 1;
1207 compare_2_ptr:
1208 if (true_branch_first)
1209 {
1210 emit_if (false_label, negop, op, state);
1211 emit_goto (true_label, state);
1212 }
1213 else
1214 {
1215 emit_if (true_label, op, negop, state);
1216 emit_goto (false_label, state);
1217 }
1218 break;
1219 case EQ_EXPR:
1220 op = OPCODE_if_icmpeq;
1221 goto compare;
1222 case NE_EXPR:
1223 op = OPCODE_if_icmpne;
1224 goto compare;
1225 case GT_EXPR:
1226 op = OPCODE_if_icmpgt;
1227 goto compare;
1228 case LT_EXPR:
1229 op = OPCODE_if_icmplt;
1230 goto compare;
1231 case GE_EXPR:
1232 op = OPCODE_if_icmpge;
1233 goto compare;
1234 case LE_EXPR:
1235 op = OPCODE_if_icmple;
1236 goto compare;
1237 compare:
1238 exp0 = TREE_OPERAND (exp, 0);
1239 exp1 = TREE_OPERAND (exp, 1);
1240 type = TREE_TYPE (exp0);
1241 switch (TREE_CODE (type))
1242 {
1243 int opf;
1244 case POINTER_TYPE: case RECORD_TYPE:
1245 switch (TREE_CODE (exp))
1246 {
1247 case EQ_EXPR: op = OPCODE_if_acmpeq; break;
1248 case NE_EXPR: op = OPCODE_if_acmpne; break;
1249 default: abort();
1250 }
1251 if (integer_zerop (exp1) || integer_zerop (exp0))
1252 {
1253 generate_bytecode_insns (integer_zerop (exp1) ? exp0 : exp0,
1254 STACK_TARGET, state);
1255 op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1256 negop = (op & 1) ? op - 1 : op + 1;
1257 NOTE_POP (1);
1258 goto compare_2_ptr;
1259 }
1260 generate_bytecode_insns (exp0, STACK_TARGET, state);
1261 generate_bytecode_insns (exp1, STACK_TARGET, state);
1262 NOTE_POP (2);
1263 goto compare_2;
1264 case REAL_TYPE:
1265 generate_bytecode_insns (exp0, STACK_TARGET, state);
1266 generate_bytecode_insns (exp1, STACK_TARGET, state);
1267 if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1268 opf = OPCODE_fcmpg;
1269 else
1270 opf = OPCODE_fcmpl;
1271 if (TYPE_PRECISION (type) > 32)
1272 {
1273 opf += 2;
1274 NOTE_POP (4);
1275 }
1276 else
1277 NOTE_POP (2);
1278 RESERVE (1);
1279 OP1 (opf);
1280 goto compare_1;
1281 case INTEGER_TYPE:
1282 if (TYPE_PRECISION (type) > 32)
1283 {
1284 generate_bytecode_insns (exp0, STACK_TARGET, state);
1285 generate_bytecode_insns (exp1, STACK_TARGET, state);
1286 NOTE_POP (4);
1287 RESERVE (1);
1288 OP1 (OPCODE_lcmp);
1289 goto compare_1;
1290 }
1291 /* FALLTHOUGH */
1292 default:
1293 if (integer_zerop (exp1))
1294 {
1295 generate_bytecode_insns (exp0, STACK_TARGET, state);
1296 NOTE_POP (1);
1297 goto compare_1;
1298 }
1299 if (integer_zerop (exp0))
1300 {
1301 switch (op)
1302 {
1303 case OPCODE_if_icmplt:
1304 case OPCODE_if_icmpge:
1305 op += 2;
1306 break;
1307 case OPCODE_if_icmpgt:
1308 case OPCODE_if_icmple:
1309 op -= 2;
1310 break;
1311 default:
1312 break;
1313 }
1314 generate_bytecode_insns (exp1, STACK_TARGET, state);
1315 NOTE_POP (1);
1316 goto compare_1;
1317 }
1318 generate_bytecode_insns (exp0, STACK_TARGET, state);
1319 generate_bytecode_insns (exp1, STACK_TARGET, state);
1320 NOTE_POP (2);
1321 goto compare_2;
1322 }
1323
1324 default:
1325 generate_bytecode_insns (exp, STACK_TARGET, state);
1326 NOTE_POP (1);
1327 if (true_branch_first)
1328 {
1329 emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1330 emit_goto (true_label, state);
1331 }
1332 else
1333 {
1334 emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1335 emit_goto (false_label, state);
1336 }
1337 break;
1338 }
1339 if (save_SP != state->code_SP)
1340 fatal ("internal error - SP mismatch");
1341 }
1342
1343 /* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
1344 but only as far out as LIMIT (since we are about to jump to the
1345 emit label that is LIMIT). */
1346
1347 static void
1348 call_cleanups (limit, state)
1349 struct jcf_block *limit;
1350 struct jcf_partial *state;
1351 {
1352 struct jcf_block *block = state->labeled_blocks;
1353 for (; block != limit; block = block->next)
1354 {
1355 if (block->pc == PENDING_CLEANUP_PC)
1356 emit_jsr (block, state);
1357 }
1358 }
1359
1360 static void
1361 generate_bytecode_return (exp, state)
1362 tree exp;
1363 struct jcf_partial *state;
1364 {
1365 tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1366 int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1367 int op;
1368 again:
1369 if (exp != NULL)
1370 {
1371 switch (TREE_CODE (exp))
1372 {
1373 case COMPOUND_EXPR:
1374 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1375 state);
1376 exp = TREE_OPERAND (exp, 1);
1377 goto again;
1378 case COND_EXPR:
1379 {
1380 struct jcf_block *then_label = gen_jcf_label (state);
1381 struct jcf_block *else_label = gen_jcf_label (state);
1382 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1383 then_label, else_label, 1, state);
1384 define_jcf_label (then_label, state);
1385 generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1386 define_jcf_label (else_label, state);
1387 generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1388 }
1389 return;
1390 default:
1391 generate_bytecode_insns (exp,
1392 returns_void ? IGNORE_TARGET
1393 : STACK_TARGET, state);
1394 }
1395 }
1396 if (returns_void)
1397 {
1398 op = OPCODE_return;
1399 call_cleanups (NULL_PTR, state);
1400 }
1401 else
1402 {
1403 op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1404 if (state->num_finalizers > 0)
1405 {
1406 if (state->return_value_decl == NULL_TREE)
1407 {
1408 state->return_value_decl
1409 = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1410 localvar_alloc (state->return_value_decl, state);
1411 }
1412 emit_store (state->return_value_decl, state);
1413 call_cleanups (NULL_PTR, state);
1414 emit_load (state->return_value_decl, state);
1415 /* If we call localvar_free (state->return_value_decl, state),
1416 then we risk the save decl erroneously re-used in the
1417 finalizer. Instead, we keep the state->return_value_decl
1418 allocated through the rest of the method. This is not
1419 the greatest solution, but it is at least simple and safe. */
1420 }
1421 }
1422 RESERVE (1);
1423 OP1 (op);
1424 }
1425
1426 /* Generate bytecode for sub-expression EXP of METHOD.
1427 TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1428
1429 static void
1430 generate_bytecode_insns (exp, target, state)
1431 tree exp;
1432 int target;
1433 struct jcf_partial *state;
1434 {
1435 tree type;
1436 enum java_opcode jopcode;
1437 int op;
1438 HOST_WIDE_INT value;
1439 int post_op;
1440 int size;
1441 int offset;
1442
1443 if (exp == NULL && target == IGNORE_TARGET)
1444 return;
1445
1446 type = TREE_TYPE (exp);
1447
1448 switch (TREE_CODE (exp))
1449 {
1450 case BLOCK:
1451 if (BLOCK_EXPR_BODY (exp))
1452 {
1453 tree local;
1454 tree body = BLOCK_EXPR_BODY (exp);
1455 for (local = BLOCK_EXPR_DECLS (exp); local; )
1456 {
1457 tree next = TREE_CHAIN (local);
1458 localvar_alloc (local, state);
1459 local = next;
1460 }
1461 /* Avoid deep recursion for long blocks. */
1462 while (TREE_CODE (body) == COMPOUND_EXPR)
1463 {
1464 generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1465 body = TREE_OPERAND (body, 1);
1466 }
1467 generate_bytecode_insns (body, target, state);
1468 for (local = BLOCK_EXPR_DECLS (exp); local; )
1469 {
1470 tree next = TREE_CHAIN (local);
1471 localvar_free (local, state);
1472 local = next;
1473 }
1474 }
1475 break;
1476 case COMPOUND_EXPR:
1477 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1478 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1479 break;
1480 case EXPR_WITH_FILE_LOCATION:
1481 {
1482 const char *saved_input_filename = input_filename;
1483 tree body = EXPR_WFL_NODE (exp);
1484 int saved_lineno = lineno;
1485 if (body == empty_stmt_node)
1486 break;
1487 input_filename = EXPR_WFL_FILENAME (exp);
1488 lineno = EXPR_WFL_LINENO (exp);
1489 if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1490 && debug_info_level > DINFO_LEVEL_NONE)
1491 put_linenumber (lineno, state);
1492 generate_bytecode_insns (body, target, state);
1493 input_filename = saved_input_filename;
1494 lineno = saved_lineno;
1495 }
1496 break;
1497 case INTEGER_CST:
1498 if (target == IGNORE_TARGET) ; /* do nothing */
1499 else if (TREE_CODE (type) == POINTER_TYPE)
1500 {
1501 if (! integer_zerop (exp))
1502 abort();
1503 RESERVE(1);
1504 OP1 (OPCODE_aconst_null);
1505 NOTE_PUSH (1);
1506 }
1507 else if (TYPE_PRECISION (type) <= 32)
1508 {
1509 push_int_const (TREE_INT_CST_LOW (exp), state);
1510 NOTE_PUSH (1);
1511 }
1512 else
1513 {
1514 push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1515 state);
1516 NOTE_PUSH (2);
1517 }
1518 break;
1519 case REAL_CST:
1520 {
1521 int prec = TYPE_PRECISION (type) >> 5;
1522 RESERVE(1);
1523 if (real_zerop (exp))
1524 OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1525 else if (real_onep (exp))
1526 OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1527 /* FIXME Should also use fconst_2 for 2.0f.
1528 Also, should use iconst_2/ldc followed by i2f/i2d
1529 for other float/double when the value is a small integer. */
1530 else
1531 {
1532 offset = find_constant_index (exp, state);
1533 if (prec == 1)
1534 push_constant1 (offset, state);
1535 else
1536 push_constant2 (offset, state);
1537 }
1538 NOTE_PUSH (prec);
1539 }
1540 break;
1541 case STRING_CST:
1542 push_constant1 (find_string_constant (&state->cpool, exp), state);
1543 NOTE_PUSH (1);
1544 break;
1545 case VAR_DECL:
1546 if (TREE_STATIC (exp))
1547 {
1548 field_op (exp, OPCODE_getstatic, state);
1549 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1550 break;
1551 }
1552 /* ... fall through ... */
1553 case PARM_DECL:
1554 emit_load (exp, state);
1555 break;
1556 case NON_LVALUE_EXPR:
1557 case INDIRECT_REF:
1558 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1559 break;
1560 case ARRAY_REF:
1561 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1562 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1563 if (target != IGNORE_TARGET)
1564 {
1565 jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1566 RESERVE(1);
1567 OP1 (jopcode);
1568 if (! TYPE_IS_WIDE (type))
1569 NOTE_POP (1);
1570 }
1571 break;
1572 case COMPONENT_REF:
1573 {
1574 tree obj = TREE_OPERAND (exp, 0);
1575 tree field = TREE_OPERAND (exp, 1);
1576 int is_static = FIELD_STATIC (field);
1577 generate_bytecode_insns (obj,
1578 is_static ? IGNORE_TARGET : target, state);
1579 if (target != IGNORE_TARGET)
1580 {
1581 if (DECL_NAME (field) == length_identifier_node && !is_static
1582 && TYPE_ARRAY_P (TREE_TYPE (obj)))
1583 {
1584 RESERVE (1);
1585 OP1 (OPCODE_arraylength);
1586 }
1587 else
1588 {
1589 field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1590 state);
1591 if (! is_static)
1592 NOTE_POP (1);
1593 NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1594 }
1595 }
1596 }
1597 break;
1598 case TRUTH_ANDIF_EXPR:
1599 case TRUTH_ORIF_EXPR:
1600 case EQ_EXPR:
1601 case NE_EXPR:
1602 case GT_EXPR:
1603 case LT_EXPR:
1604 case GE_EXPR:
1605 case LE_EXPR:
1606 {
1607 struct jcf_block *then_label = gen_jcf_label (state);
1608 struct jcf_block *else_label = gen_jcf_label (state);
1609 struct jcf_block *end_label = gen_jcf_label (state);
1610 generate_bytecode_conditional (exp,
1611 then_label, else_label, 1, state);
1612 define_jcf_label (then_label, state);
1613 push_int_const (1, state);
1614 emit_goto (end_label, state);
1615 define_jcf_label (else_label, state);
1616 push_int_const (0, state);
1617 define_jcf_label (end_label, state);
1618 NOTE_PUSH (1);
1619 }
1620 break;
1621 case COND_EXPR:
1622 {
1623 struct jcf_block *then_label = gen_jcf_label (state);
1624 struct jcf_block *else_label = gen_jcf_label (state);
1625 struct jcf_block *end_label = gen_jcf_label (state);
1626 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1627 then_label, else_label, 1, state);
1628 define_jcf_label (then_label, state);
1629 generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1630 if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1631 /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1632 || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1633 emit_goto (end_label, state);
1634 define_jcf_label (else_label, state);
1635 generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1636 define_jcf_label (end_label, state);
1637 /* COND_EXPR can be used in a binop. The stack must be adjusted. */
1638 if (TREE_TYPE (exp) != void_type_node)
1639 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1640 }
1641 break;
1642 case CASE_EXPR:
1643 {
1644 struct jcf_switch_state *sw_state = state->sw_state;
1645 struct jcf_relocation *reloc = (struct jcf_relocation *)
1646 obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1647 HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1648 reloc->kind = 0;
1649 reloc->label = get_jcf_label_here (state);
1650 reloc->offset = case_value;
1651 reloc->next = sw_state->cases;
1652 sw_state->cases = reloc;
1653 if (sw_state->num_cases == 0)
1654 {
1655 sw_state->min_case = case_value;
1656 sw_state->max_case = case_value;
1657 }
1658 else
1659 {
1660 if (case_value < sw_state->min_case)
1661 sw_state->min_case = case_value;
1662 if (case_value > sw_state->max_case)
1663 sw_state->max_case = case_value;
1664 }
1665 sw_state->num_cases++;
1666 }
1667 break;
1668 case DEFAULT_EXPR:
1669 state->sw_state->default_label = get_jcf_label_here (state);
1670 break;
1671
1672 case SWITCH_EXPR:
1673 {
1674 /* The SWITCH_EXPR has three parts, generated in the following order:
1675 1. the switch_expression (the value used to select the correct case);
1676 2. the switch_body;
1677 3. the switch_instruction (the tableswitch/loopupswitch instruction.).
1678 After code generation, we will re-order then in the order 1, 3, 2.
1679 This is to avoid an extra GOTOs. */
1680 struct jcf_switch_state sw_state;
1681 struct jcf_block *expression_last; /* Last block of the switch_expression. */
1682 struct jcf_block *body_last; /* Last block of the switch_body. */
1683 struct jcf_block *switch_instruction; /* First block of switch_instruction. */
1684 struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1685 struct jcf_block *body_block;
1686 int switch_length;
1687 sw_state.prev = state->sw_state;
1688 state->sw_state = &sw_state;
1689 sw_state.cases = NULL;
1690 sw_state.num_cases = 0;
1691 sw_state.default_label = NULL;
1692 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1693 expression_last = state->last_block;
1694 body_block = get_jcf_label_here (state); /* Force a new block here. */
1695 generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1696 body_last = state->last_block;
1697
1698 switch_instruction = gen_jcf_label (state);
1699 define_jcf_label (switch_instruction, state);
1700 if (sw_state.default_label == NULL)
1701 sw_state.default_label = gen_jcf_label (state);
1702
1703 if (sw_state.num_cases <= 1)
1704 {
1705 if (sw_state.num_cases == 0)
1706 {
1707 emit_pop (1, state);
1708 NOTE_POP (1);
1709 }
1710 else
1711 {
1712 push_int_const (sw_state.cases->offset, state);
1713 emit_if (sw_state.cases->label,
1714 OPCODE_ifeq, OPCODE_ifne, state);
1715 }
1716 emit_goto (sw_state.default_label, state);
1717 }
1718 else
1719 {
1720 HOST_WIDE_INT i;
1721 /* Copy the chain of relocs into a sorted array. */
1722 struct jcf_relocation **relocs = (struct jcf_relocation **)
1723 xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1724 /* The relocs arrays is a buffer with a gap.
1725 The assumption is that cases will normally come in "runs". */
1726 int gap_start = 0;
1727 int gap_end = sw_state.num_cases;
1728 struct jcf_relocation *reloc;
1729 for (reloc = sw_state.cases; reloc != NULL; reloc = reloc->next)
1730 {
1731 HOST_WIDE_INT case_value = reloc->offset;
1732 while (gap_end < sw_state.num_cases)
1733 {
1734 struct jcf_relocation *end = relocs[gap_end];
1735 if (case_value <= end->offset)
1736 break;
1737 relocs[gap_start++] = end;
1738 gap_end++;
1739 }
1740 while (gap_start > 0)
1741 {
1742 struct jcf_relocation *before = relocs[gap_start-1];
1743 if (case_value >= before->offset)
1744 break;
1745 relocs[--gap_end] = before;
1746 gap_start--;
1747 }
1748 relocs[gap_start++] = reloc;
1749 /* Note we don't check for duplicates. FIXME! */
1750 }
1751
1752 if (2 * sw_state.num_cases
1753 >= sw_state.max_case - sw_state.min_case)
1754 { /* Use tableswitch. */
1755 int index = 0;
1756 RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1757 OP1 (OPCODE_tableswitch);
1758 emit_reloc (RELOCATION_VALUE_0,
1759 SWITCH_ALIGN_RELOC, NULL, state);
1760 emit_switch_reloc (sw_state.default_label, state);
1761 OP4 (sw_state.min_case);
1762 OP4 (sw_state.max_case);
1763 for (i = sw_state.min_case; ; )
1764 {
1765 reloc = relocs[index];
1766 if (i == reloc->offset)
1767 {
1768 emit_case_reloc (reloc, state);
1769 if (i == sw_state.max_case)
1770 break;
1771 index++;
1772 }
1773 else
1774 emit_switch_reloc (sw_state.default_label, state);
1775 i++;
1776 }
1777 }
1778 else
1779 { /* Use lookupswitch. */
1780 RESERVE(9 + 8 * sw_state.num_cases);
1781 OP1 (OPCODE_lookupswitch);
1782 emit_reloc (RELOCATION_VALUE_0,
1783 SWITCH_ALIGN_RELOC, NULL, state);
1784 emit_switch_reloc (sw_state.default_label, state);
1785 OP4 (sw_state.num_cases);
1786 for (i = 0; i < sw_state.num_cases; i++)
1787 {
1788 struct jcf_relocation *reloc = relocs[i];
1789 OP4 (reloc->offset);
1790 emit_case_reloc (reloc, state);
1791 }
1792 }
1793 free (relocs);
1794 }
1795
1796 instruction_last = state->last_block;
1797 if (sw_state.default_label->pc < 0)
1798 define_jcf_label (sw_state.default_label, state);
1799 else /* Force a new block. */
1800 sw_state.default_label = get_jcf_label_here (state);
1801 /* Now re-arrange the blocks so the switch_instruction
1802 comes before the switch_body. */
1803 switch_length = state->code_length - switch_instruction->pc;
1804 switch_instruction->pc = body_block->pc;
1805 instruction_last->next = body_block;
1806 instruction_last->v.chunk->next = body_block->v.chunk;
1807 expression_last->next = switch_instruction;
1808 expression_last->v.chunk->next = switch_instruction->v.chunk;
1809 body_last->next = sw_state.default_label;
1810 body_last->v.chunk->next = NULL;
1811 state->chunk = body_last->v.chunk;
1812 for (; body_block != sw_state.default_label; body_block = body_block->next)
1813 body_block->pc += switch_length;
1814
1815 state->sw_state = sw_state.prev;
1816 break;
1817 }
1818
1819 case RETURN_EXPR:
1820 exp = TREE_OPERAND (exp, 0);
1821 if (exp == NULL_TREE)
1822 exp = empty_stmt_node;
1823 else if (TREE_CODE (exp) != MODIFY_EXPR)
1824 abort ();
1825 else
1826 exp = TREE_OPERAND (exp, 1);
1827 generate_bytecode_return (exp, state);
1828 break;
1829 case LABELED_BLOCK_EXPR:
1830 {
1831 struct jcf_block *end_label = gen_jcf_label (state);
1832 end_label->next = state->labeled_blocks;
1833 state->labeled_blocks = end_label;
1834 end_label->pc = PENDING_EXIT_PC;
1835 end_label->u.labeled_block = exp;
1836 if (LABELED_BLOCK_BODY (exp))
1837 generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1838 if (state->labeled_blocks != end_label)
1839 abort();
1840 state->labeled_blocks = end_label->next;
1841 define_jcf_label (end_label, state);
1842 }
1843 break;
1844 case LOOP_EXPR:
1845 {
1846 tree body = TREE_OPERAND (exp, 0);
1847 #if 0
1848 if (TREE_CODE (body) == COMPOUND_EXPR
1849 && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1850 {
1851 /* Optimize: H: if (TEST) GOTO L; BODY; GOTO H; L:
1852 to: GOTO L; BODY; L: if (!TEST) GOTO L; */
1853 struct jcf_block *head_label;
1854 struct jcf_block *body_label;
1855 struct jcf_block *end_label = gen_jcf_label (state);
1856 struct jcf_block *exit_label = state->labeled_blocks;
1857 head_label = gen_jcf_label (state);
1858 emit_goto (head_label, state);
1859 body_label = get_jcf_label_here (state);
1860 generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1861 define_jcf_label (head_label, state);
1862 generate_bytecode_conditional (TREE_OPERAND (body, 0),
1863 end_label, body_label, 1, state);
1864 define_jcf_label (end_label, state);
1865 }
1866 else
1867 #endif
1868 {
1869 struct jcf_block *head_label = get_jcf_label_here (state);
1870 generate_bytecode_insns (body, IGNORE_TARGET, state);
1871 emit_goto (head_label, state);
1872 }
1873 }
1874 break;
1875 case EXIT_EXPR:
1876 {
1877 struct jcf_block *label = state->labeled_blocks;
1878 struct jcf_block *end_label = gen_jcf_label (state);
1879 generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1880 label, end_label, 0, state);
1881 define_jcf_label (end_label, state);
1882 }
1883 break;
1884 case EXIT_BLOCK_EXPR:
1885 {
1886 struct jcf_block *label = state->labeled_blocks;
1887 if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1888 while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1889 label = label->next;
1890 call_cleanups (label, state);
1891 emit_goto (label, state);
1892 }
1893 break;
1894
1895 case PREDECREMENT_EXPR: value = -1; post_op = 0; goto increment;
1896 case PREINCREMENT_EXPR: value = 1; post_op = 0; goto increment;
1897 case POSTDECREMENT_EXPR: value = -1; post_op = 1; goto increment;
1898 case POSTINCREMENT_EXPR: value = 1; post_op = 1; goto increment;
1899 increment:
1900
1901 exp = TREE_OPERAND (exp, 0);
1902 type = TREE_TYPE (exp);
1903 size = TYPE_IS_WIDE (type) ? 2 : 1;
1904 if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1905 && ! TREE_STATIC (exp)
1906 && TREE_CODE (type) == INTEGER_TYPE
1907 && TYPE_PRECISION (type) == 32)
1908 {
1909 if (target != IGNORE_TARGET && post_op)
1910 emit_load (exp, state);
1911 emit_iinc (exp, value, state);
1912 if (target != IGNORE_TARGET && ! post_op)
1913 emit_load (exp, state);
1914 break;
1915 }
1916 if (TREE_CODE (exp) == COMPONENT_REF)
1917 {
1918 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1919 emit_dup (1, 0, state);
1920 /* Stack: ..., objectref, objectref. */
1921 field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1922 NOTE_PUSH (size-1);
1923 /* Stack: ..., objectref, oldvalue. */
1924 offset = 1;
1925 }
1926 else if (TREE_CODE (exp) == ARRAY_REF)
1927 {
1928 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1929 generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1930 emit_dup (2, 0, state);
1931 /* Stack: ..., array, index, array, index. */
1932 jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1933 RESERVE(1);
1934 OP1 (jopcode);
1935 NOTE_POP (2-size);
1936 /* Stack: ..., array, index, oldvalue. */
1937 offset = 2;
1938 }
1939 else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1940 {
1941 generate_bytecode_insns (exp, STACK_TARGET, state);
1942 /* Stack: ..., oldvalue. */
1943 offset = 0;
1944 }
1945 else
1946 abort ();
1947
1948 if (target != IGNORE_TARGET && post_op)
1949 emit_dup (size, offset, state);
1950 /* Stack, if ARRAY_REF: ..., [result, ] array, index, oldvalue. */
1951 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, oldvalue. */
1952 /* Stack, otherwise: ..., [result, ] oldvalue. */
1953 if (size == 1)
1954 push_int_const (value, state);
1955 else
1956 push_long_const (value, (HOST_WIDE_INT)(value >= 0 ? 0 : -1), state);
1957 NOTE_PUSH (size);
1958 emit_binop (OPCODE_iadd + adjust_typed_op (type, 3), type, state);
1959 if (target != IGNORE_TARGET && ! post_op)
1960 emit_dup (size, offset, state);
1961 /* Stack, if ARRAY_REF: ..., [result, ] array, index, newvalue. */
1962 /* Stack, if COMPONENT_REF: ..., [result, ] objectref, newvalue. */
1963 /* Stack, otherwise: ..., [result, ] newvalue. */
1964 goto finish_assignment;
1965
1966 case MODIFY_EXPR:
1967 {
1968 tree lhs = TREE_OPERAND (exp, 0);
1969 tree rhs = TREE_OPERAND (exp, 1);
1970 int offset = 0;
1971
1972 /* See if we can use the iinc instruction. */
1973 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1974 && ! TREE_STATIC (lhs)
1975 && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
1976 && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
1977 && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
1978 {
1979 tree arg0 = TREE_OPERAND (rhs, 0);
1980 tree arg1 = TREE_OPERAND (rhs, 1);
1981 HOST_WIDE_INT min_value = -32768;
1982 HOST_WIDE_INT max_value = 32767;
1983 if (TREE_CODE (rhs) == MINUS_EXPR)
1984 {
1985 min_value++;
1986 max_value++;
1987 }
1988 else if (arg1 == lhs)
1989 {
1990 arg0 = arg1;
1991 arg1 = TREE_OPERAND (rhs, 0);
1992 }
1993 if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
1994 {
1995 HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
1996 value = TREE_INT_CST_LOW (arg1);
1997 if ((hi_value == 0 && value <= max_value)
1998 || (hi_value == -1 && value >= min_value))
1999 {
2000 if (TREE_CODE (rhs) == MINUS_EXPR)
2001 value = -value;
2002 emit_iinc (lhs, value, state);
2003 break;
2004 }
2005 }
2006 }
2007
2008 if (TREE_CODE (lhs) == COMPONENT_REF)
2009 {
2010 generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2011 STACK_TARGET, state);
2012 offset = 1;
2013 }
2014 else if (TREE_CODE (lhs) == ARRAY_REF)
2015 {
2016 generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2017 STACK_TARGET, state);
2018 generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2019 STACK_TARGET, state);
2020 offset = 2;
2021 }
2022 else
2023 offset = 0;
2024 generate_bytecode_insns (rhs, STACK_TARGET, state);
2025 if (target != IGNORE_TARGET)
2026 emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2027 exp = lhs;
2028 }
2029 /* FALLTHOUGH */
2030
2031 finish_assignment:
2032 if (TREE_CODE (exp) == COMPONENT_REF)
2033 {
2034 tree field = TREE_OPERAND (exp, 1);
2035 if (! FIELD_STATIC (field))
2036 NOTE_POP (1);
2037 field_op (field,
2038 FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2039 state);
2040
2041 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2042 }
2043 else if (TREE_CODE (exp) == VAR_DECL
2044 || TREE_CODE (exp) == PARM_DECL)
2045 {
2046 if (FIELD_STATIC (exp))
2047 {
2048 field_op (exp, OPCODE_putstatic, state);
2049 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2050 }
2051 else
2052 emit_store (exp, state);
2053 }
2054 else if (TREE_CODE (exp) == ARRAY_REF)
2055 {
2056 jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2057 RESERVE(1);
2058 OP1 (jopcode);
2059 NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2060 }
2061 else
2062 fatal ("internal error (bad lhs to MODIFY_EXPR)");
2063 break;
2064 case PLUS_EXPR:
2065 jopcode = OPCODE_iadd;
2066 goto binop;
2067 case MINUS_EXPR:
2068 jopcode = OPCODE_isub;
2069 goto binop;
2070 case MULT_EXPR:
2071 jopcode = OPCODE_imul;
2072 goto binop;
2073 case TRUNC_DIV_EXPR:
2074 case RDIV_EXPR:
2075 jopcode = OPCODE_idiv;
2076 goto binop;
2077 case TRUNC_MOD_EXPR:
2078 jopcode = OPCODE_irem;
2079 goto binop;
2080 case LSHIFT_EXPR: jopcode = OPCODE_ishl; goto binop;
2081 case RSHIFT_EXPR: jopcode = OPCODE_ishr; goto binop;
2082 case URSHIFT_EXPR: jopcode = OPCODE_iushr; goto binop;
2083 case TRUTH_AND_EXPR:
2084 case BIT_AND_EXPR: jopcode = OPCODE_iand; goto binop;
2085 case TRUTH_OR_EXPR:
2086 case BIT_IOR_EXPR: jopcode = OPCODE_ior; goto binop;
2087 case TRUTH_XOR_EXPR:
2088 case BIT_XOR_EXPR: jopcode = OPCODE_ixor; goto binop;
2089 binop:
2090 {
2091 tree arg0 = TREE_OPERAND (exp, 0);
2092 tree arg1 = TREE_OPERAND (exp, 1);
2093 jopcode += adjust_typed_op (type, 3);
2094 if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2095 {
2096 /* fold may (e.g) convert 2*x to x+x. */
2097 generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2098 emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2099 }
2100 else
2101 {
2102 generate_bytecode_insns (arg0, target, state);
2103 if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2104 arg1 = convert (int_type_node, arg1);
2105 generate_bytecode_insns (arg1, target, state);
2106 }
2107 /* For most binary operations, both operands and the result have the
2108 same type. Shift operations are different. Using arg1's type
2109 gets us the correct SP adjustment in all cases. */
2110 if (target == STACK_TARGET)
2111 emit_binop (jopcode, TREE_TYPE (arg1), state);
2112 break;
2113 }
2114 case TRUTH_NOT_EXPR:
2115 case BIT_NOT_EXPR:
2116 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2117 if (target == STACK_TARGET)
2118 {
2119 int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2120 push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2121 RESERVE (2);
2122 if (is_long)
2123 OP1 (OPCODE_i2l);
2124 NOTE_PUSH (1 + is_long);
2125 OP1 (OPCODE_ixor + is_long);
2126 NOTE_POP (1 + is_long);
2127 }
2128 break;
2129 case NEGATE_EXPR:
2130 jopcode = OPCODE_ineg;
2131 jopcode += adjust_typed_op (type, 3);
2132 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2133 if (target == STACK_TARGET)
2134 emit_unop (jopcode, type, state);
2135 break;
2136 case INSTANCEOF_EXPR:
2137 {
2138 int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2139 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2140 RESERVE (3);
2141 OP1 (OPCODE_instanceof);
2142 OP2 (index);
2143 }
2144 break;
2145 case SAVE_EXPR:
2146 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2147 break;
2148 case CONVERT_EXPR:
2149 case NOP_EXPR:
2150 case FLOAT_EXPR:
2151 case FIX_TRUNC_EXPR:
2152 {
2153 tree src = TREE_OPERAND (exp, 0);
2154 tree src_type = TREE_TYPE (src);
2155 tree dst_type = TREE_TYPE (exp);
2156 generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2157 if (target == IGNORE_TARGET || src_type == dst_type)
2158 break;
2159 if (TREE_CODE (dst_type) == POINTER_TYPE)
2160 {
2161 if (TREE_CODE (exp) == CONVERT_EXPR)
2162 {
2163 int index = find_class_constant (&state->cpool,
2164 TREE_TYPE (dst_type));
2165 RESERVE (3);
2166 OP1 (OPCODE_checkcast);
2167 OP2 (index);
2168 }
2169 }
2170 else /* Convert numeric types. */
2171 {
2172 int wide_src = TYPE_PRECISION (src_type) > 32;
2173 int wide_dst = TYPE_PRECISION (dst_type) > 32;
2174 NOTE_POP (1 + wide_src);
2175 RESERVE (1);
2176 if (TREE_CODE (dst_type) == REAL_TYPE)
2177 {
2178 if (TREE_CODE (src_type) == REAL_TYPE)
2179 OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2180 else if (TYPE_PRECISION (src_type) == 64)
2181 OP1 (OPCODE_l2f + wide_dst);
2182 else
2183 OP1 (OPCODE_i2f + wide_dst);
2184 }
2185 else /* Convert to integral type. */
2186 {
2187 if (TREE_CODE (src_type) == REAL_TYPE)
2188 OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2189 else if (wide_dst)
2190 OP1 (OPCODE_i2l);
2191 else if (wide_src)
2192 OP1 (OPCODE_l2i);
2193 if (TYPE_PRECISION (dst_type) < 32)
2194 {
2195 RESERVE (1);
2196 /* Already converted to int, if needed. */
2197 if (TYPE_PRECISION (dst_type) <= 8)
2198 OP1 (OPCODE_i2b);
2199 else if (TREE_UNSIGNED (dst_type))
2200 OP1 (OPCODE_i2c);
2201 else
2202 OP1 (OPCODE_i2s);
2203 }
2204 }
2205 NOTE_PUSH (1 + wide_dst);
2206 }
2207 }
2208 break;
2209
2210 case CLEANUP_POINT_EXPR:
2211 {
2212 struct jcf_block *save_labeled_blocks = state->labeled_blocks;
2213 int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
2214 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2215 if (target != IGNORE_TARGET)
2216 abort ();
2217 while (state->labeled_blocks != save_labeled_blocks)
2218 {
2219 struct jcf_block *finished_label = NULL;
2220 tree return_link;
2221 tree exception_type = build_pointer_type (throwable_type_node);
2222 tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
2223 exception_type);
2224 struct jcf_block *end_label = get_jcf_label_here (state);
2225 struct jcf_block *label = state->labeled_blocks;
2226 struct jcf_handler *handler;
2227 tree cleanup = label->u.labeled_block;
2228 state->labeled_blocks = label->next;
2229 state->num_finalizers--;
2230 if (can_complete)
2231 {
2232 finished_label = gen_jcf_label (state);
2233 emit_jsr (label, state);
2234 emit_goto (finished_label, state);
2235 if (! CAN_COMPLETE_NORMALLY (cleanup))
2236 can_complete = 0;
2237 }
2238 handler = alloc_handler (label->v.start_label, end_label, state);
2239 handler->type = NULL_TREE;
2240 localvar_alloc (exception_decl, state);
2241 NOTE_PUSH (1);
2242 emit_store (exception_decl, state);
2243 emit_jsr (label, state);
2244 emit_load (exception_decl, state);
2245 RESERVE (1);
2246 OP1 (OPCODE_athrow);
2247 NOTE_POP (1);
2248
2249 /* The finally block. */
2250 return_link = build_decl (VAR_DECL, NULL_TREE,
2251 return_address_type_node);
2252 define_jcf_label (label, state);
2253 NOTE_PUSH (1);
2254 localvar_alloc (return_link, state);
2255 emit_store (return_link, state);
2256 generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
2257 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2258 localvar_free (return_link, state);
2259 localvar_free (exception_decl, state);
2260 if (finished_label != NULL)
2261 define_jcf_label (finished_label, state);
2262 }
2263 }
2264 break;
2265
2266 case WITH_CLEANUP_EXPR:
2267 {
2268 struct jcf_block *label;
2269 generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
2270 label = gen_jcf_label (state);
2271 label->pc = PENDING_CLEANUP_PC;
2272 label->next = state->labeled_blocks;
2273 state->labeled_blocks = label;
2274 state->num_finalizers++;
2275 label->u.labeled_block = TREE_OPERAND (exp, 2);
2276 label->v.start_label = get_jcf_label_here (state);
2277 if (target != IGNORE_TARGET)
2278 abort ();
2279 }
2280 break;
2281
2282 case TRY_EXPR:
2283 {
2284 tree try_clause = TREE_OPERAND (exp, 0);
2285 struct jcf_block *start_label = get_jcf_label_here (state);
2286 struct jcf_block *end_label; /* End of try clause. */
2287 struct jcf_block *finished_label = gen_jcf_label (state);
2288 tree clause = TREE_OPERAND (exp, 1);
2289 if (target != IGNORE_TARGET)
2290 abort ();
2291 generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2292 end_label = get_jcf_label_here (state);
2293 if (CAN_COMPLETE_NORMALLY (try_clause))
2294 emit_goto (finished_label, state);
2295 while (clause != NULL_TREE)
2296 {
2297 tree catch_clause = TREE_OPERAND (clause, 0);
2298 tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2299 struct jcf_handler *handler = alloc_handler (start_label, end_label, state);
2300 if (exception_decl == NULL_TREE)
2301 handler->type = NULL_TREE;
2302 else
2303 handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2304 generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2305 clause = TREE_CHAIN (clause);
2306 if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2307 emit_goto (finished_label, state);
2308 }
2309 define_jcf_label (finished_label, state);
2310 }
2311 break;
2312 case TRY_FINALLY_EXPR:
2313 {
2314 struct jcf_block *finished_label, *finally_label, *start_label;
2315 struct jcf_handler *handler;
2316 int worthwhile_finally = 1;
2317 tree try_block = TREE_OPERAND (exp, 0);
2318 tree finally = TREE_OPERAND (exp, 1);
2319 tree return_link, exception_decl;
2320
2321 finally_label = start_label = NULL;
2322 return_link = exception_decl = NULL_TREE;
2323 finished_label = gen_jcf_label (state);
2324
2325 /* If the finally clause happens to be empty, set a flag so we
2326 remember to just skip it. */
2327 if (BLOCK_EXPR_BODY (finally) == empty_stmt_node)
2328 worthwhile_finally = 0;
2329
2330 if (worthwhile_finally)
2331 {
2332 tree exception_type;
2333 return_link = build_decl (VAR_DECL, NULL_TREE,
2334 return_address_type_node);
2335 exception_type = build_pointer_type (throwable_type_node);
2336 exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2337
2338 finally_label = gen_jcf_label (state);
2339 start_label = get_jcf_label_here (state);
2340 finally_label->pc = PENDING_CLEANUP_PC;
2341 finally_label->next = state->labeled_blocks;
2342 state->labeled_blocks = finally_label;
2343 state->num_finalizers++;
2344 }
2345
2346 generate_bytecode_insns (try_block, target, state);
2347
2348 if (worthwhile_finally)
2349 {
2350 if (state->labeled_blocks != finally_label)
2351 abort();
2352 state->labeled_blocks = finally_label->next;
2353 emit_jsr (finally_label, state);
2354 }
2355
2356 if (CAN_COMPLETE_NORMALLY (try_block)
2357 && BLOCK_EXPR_BODY (try_block) != empty_stmt_node)
2358 emit_goto (finished_label, state);
2359
2360 /* Handle exceptions. */
2361
2362 if (!worthwhile_finally)
2363 break;
2364
2365 localvar_alloc (return_link, state);
2366 handler = alloc_handler (start_label, NULL_PTR, state);
2367 handler->end_label = handler->handler_label;
2368 handler->type = NULL_TREE;
2369 localvar_alloc (exception_decl, state);
2370 NOTE_PUSH (1);
2371 emit_store (exception_decl, state);
2372 emit_jsr (finally_label, state);
2373 emit_load (exception_decl, state);
2374 RESERVE (1);
2375 OP1 (OPCODE_athrow);
2376 NOTE_POP (1);
2377 localvar_free (exception_decl, state);
2378
2379 /* The finally block. First save return PC into return_link. */
2380 define_jcf_label (finally_label, state);
2381 NOTE_PUSH (1);
2382 emit_store (return_link, state);
2383
2384 generate_bytecode_insns (finally, IGNORE_TARGET, state);
2385 maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2386 localvar_free (return_link, state);
2387 define_jcf_label (finished_label, state);
2388 }
2389 break;
2390 case THROW_EXPR:
2391 generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2392 RESERVE (1);
2393 OP1 (OPCODE_athrow);
2394 break;
2395 case NEW_ARRAY_INIT:
2396 {
2397 tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2398 tree array_type = TREE_TYPE (TREE_TYPE (exp));
2399 tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2400 HOST_WIDE_INT length = java_array_type_length (array_type);
2401 if (target == IGNORE_TARGET)
2402 {
2403 for ( ; values != NULL_TREE; values = TREE_CHAIN (values))
2404 generate_bytecode_insns (TREE_VALUE (values), target, state);
2405 break;
2406 }
2407 push_int_const (length, state);
2408 NOTE_PUSH (1);
2409 RESERVE (3);
2410 if (JPRIMITIVE_TYPE_P (element_type))
2411 {
2412 int atype = encode_newarray_type (element_type);
2413 OP1 (OPCODE_newarray);
2414 OP1 (atype);
2415 }
2416 else
2417 {
2418 int index = find_class_constant (&state->cpool,
2419 TREE_TYPE (element_type));
2420 OP1 (OPCODE_anewarray);
2421 OP2 (index);
2422 }
2423 offset = 0;
2424 jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2425 for ( ; values != NULL_TREE; values = TREE_CHAIN (values), offset++)
2426 {
2427 int save_SP = state->code_SP;
2428 emit_dup (1, 0, state);
2429 push_int_const (offset, state);
2430 NOTE_PUSH (1);
2431 generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2432 RESERVE (1);
2433 OP1 (jopcode);
2434 state->code_SP = save_SP;
2435 }
2436 }
2437 break;
2438 case NEW_CLASS_EXPR:
2439 {
2440 tree class = TREE_TYPE (TREE_TYPE (exp));
2441 int need_result = target != IGNORE_TARGET;
2442 int index = find_class_constant (&state->cpool, class);
2443 RESERVE (4);
2444 OP1 (OPCODE_new);
2445 OP2 (index);
2446 if (need_result)
2447 OP1 (OPCODE_dup);
2448 NOTE_PUSH (1 + need_result);
2449 }
2450 /* ... fall though ... */
2451 case CALL_EXPR:
2452 {
2453 tree f = TREE_OPERAND (exp, 0);
2454 tree x = TREE_OPERAND (exp, 1);
2455 int save_SP = state->code_SP;
2456 int nargs;
2457 if (TREE_CODE (f) == ADDR_EXPR)
2458 f = TREE_OPERAND (f, 0);
2459 if (f == soft_newarray_node)
2460 {
2461 int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2462 generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2463 STACK_TARGET, state);
2464 RESERVE (2);
2465 OP1 (OPCODE_newarray);
2466 OP1 (type_code);
2467 break;
2468 }
2469 else if (f == soft_multianewarray_node)
2470 {
2471 int ndims;
2472 int idim;
2473 int index = find_class_constant (&state->cpool,
2474 TREE_TYPE (TREE_TYPE (exp)));
2475 x = TREE_CHAIN (x); /* Skip class argument. */
2476 ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2477 for (idim = ndims; --idim >= 0; )
2478 {
2479 x = TREE_CHAIN (x);
2480 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2481 }
2482 RESERVE (4);
2483 OP1 (OPCODE_multianewarray);
2484 OP2 (index);
2485 OP1 (ndims);
2486 break;
2487 }
2488 else if (f == soft_anewarray_node)
2489 {
2490 tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2491 int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2492 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2493 RESERVE (3);
2494 OP1 (OPCODE_anewarray);
2495 OP2 (index);
2496 break;
2497 }
2498 else if (f == soft_monitorenter_node
2499 || f == soft_monitorexit_node
2500 || f == throw_node[0]
2501 || f == throw_node[1])
2502 {
2503 if (f == soft_monitorenter_node)
2504 op = OPCODE_monitorenter;
2505 else if (f == soft_monitorexit_node)
2506 op = OPCODE_monitorexit;
2507 else
2508 op = OPCODE_athrow;
2509 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2510 RESERVE (1);
2511 OP1 (op);
2512 NOTE_POP (1);
2513 break;
2514 }
2515 else if (exp == soft_exceptioninfo_call_node)
2516 {
2517 NOTE_PUSH (1); /* Pushed by exception system. */
2518 break;
2519 }
2520 for ( ; x != NULL_TREE; x = TREE_CHAIN (x))
2521 {
2522 generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2523 }
2524 nargs = state->code_SP - save_SP;
2525 state->code_SP = save_SP;
2526 if (f == soft_fmod_node)
2527 {
2528 RESERVE (1);
2529 OP1 (OPCODE_drem);
2530 NOTE_PUSH (2);
2531 break;
2532 }
2533 if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2534 NOTE_POP (1); /* Pop implicit this. */
2535 if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2536 {
2537 tree saved_context;
2538 int index, interface = 0;
2539 RESERVE (5);
2540 if (METHOD_STATIC (f))
2541 OP1 (OPCODE_invokestatic);
2542 else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2543 || METHOD_PRIVATE (f))
2544 OP1 (OPCODE_invokespecial);
2545 else if (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (f))))
2546 {
2547 OP1 (OPCODE_invokeinterface);
2548 interface = 1;
2549 }
2550 else
2551 OP1 (OPCODE_invokevirtual);
2552 if (interface)
2553 {
2554 saved_context = DECL_CONTEXT (f);
2555 DECL_CONTEXT (f) =
2556 TREE_TYPE (TREE_TYPE (TREE_VALUE (TREE_OPERAND (exp, 1))));
2557 }
2558 index = find_methodref_index (&state->cpool, f);
2559 if (interface)
2560 DECL_CONTEXT (f) = saved_context;
2561 OP2 (index);
2562 f = TREE_TYPE (TREE_TYPE (f));
2563 if (TREE_CODE (f) != VOID_TYPE)
2564 {
2565 int size = TYPE_IS_WIDE (f) ? 2 : 1;
2566 if (target == IGNORE_TARGET)
2567 emit_pop (size, state);
2568 else
2569 NOTE_PUSH (size);
2570 }
2571 if (interface)
2572 {
2573 OP1 (nargs);
2574 OP1 (0);
2575 }
2576 break;
2577 }
2578 }
2579 /* fall through */
2580 notimpl:
2581 default:
2582 error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2583 tree_code_name [(int) TREE_CODE (exp)]);
2584 }
2585 }
2586
2587 static void
2588 perform_relocations (state)
2589 struct jcf_partial *state;
2590 {
2591 struct jcf_block *block;
2592 struct jcf_relocation *reloc;
2593 int pc;
2594 int shrink;
2595
2596 /* Before we start, the pc field of each block is an upper bound on
2597 the block's start pc (it may be less, if previous blocks need less
2598 than their maximum).
2599
2600 The minimum size of each block is in the block's chunk->size. */
2601
2602 /* First, figure out the actual locations of each block. */
2603 pc = 0;
2604 shrink = 0;
2605 for (block = state->blocks; block != NULL; block = block->next)
2606 {
2607 int block_size = block->v.chunk->size;
2608
2609 block->pc = pc;
2610
2611 /* Optimize GOTO L; L: by getting rid of the redundant goto.
2612 Assumes relocations are in reverse order. */
2613 reloc = block->u.relocations;
2614 while (reloc != NULL
2615 && reloc->kind == OPCODE_goto_w
2616 && reloc->label->pc == block->next->pc
2617 && reloc->offset + 2 == block_size)
2618 {
2619 reloc = reloc->next;
2620 block->u.relocations = reloc;
2621 block->v.chunk->size -= 3;
2622 block_size -= 3;
2623 shrink += 3;
2624 }
2625
2626 for (reloc = block->u.relocations; reloc != NULL; reloc = reloc->next)
2627 {
2628 if (reloc->kind == SWITCH_ALIGN_RELOC)
2629 {
2630 /* We assume this is the first relocation in this block,
2631 so we know its final pc. */
2632 int where = pc + reloc->offset;
2633 int pad = ((where + 3) & ~3) - where;
2634 block_size += pad;
2635 }
2636 else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2637 {
2638 int delta = reloc->label->pc - (pc + reloc->offset - 1);
2639 int expand = reloc->kind > 0 ? 2 : 5;
2640
2641 if (delta > 0)
2642 delta -= shrink;
2643 if (delta >= -32768 && delta <= 32767)
2644 {
2645 shrink += expand;
2646 reloc->kind = -1;
2647 }
2648 else
2649 block_size += expand;
2650 }
2651 }
2652 pc += block_size;
2653 }
2654
2655 for (block = state->blocks; block != NULL; block = block->next)
2656 {
2657 struct chunk *chunk = block->v.chunk;
2658 int old_size = chunk->size;
2659 int next_pc = block->next == NULL ? pc : block->next->pc;
2660 int new_size = next_pc - block->pc;
2661 unsigned char *new_ptr;
2662 unsigned char *old_buffer = chunk->data;
2663 unsigned char *old_ptr = old_buffer + old_size;
2664 if (new_size != old_size)
2665 {
2666 chunk->data = (unsigned char *)
2667 obstack_alloc (state->chunk_obstack, new_size);
2668 chunk->size = new_size;
2669 }
2670 new_ptr = chunk->data + new_size;
2671
2672 /* We do the relocations from back to front, because
2673 the relocations are in reverse order. */
2674 for (reloc = block->u.relocations; ; reloc = reloc->next)
2675 {
2676 /* new_ptr and old_ptr point into the old and new buffers,
2677 respectively. (If no relocations cause the buffer to
2678 grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2679 The bytes at higher adress have been copied and relocations
2680 handled; those at lower addresses remain to process. */
2681
2682 /* Lower old index of piece to be copied with no relocation.
2683 I.e. high index of the first piece that does need relocation. */
2684 int start = reloc == NULL ? 0
2685 : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2686 : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2687 ? reloc->offset + 4
2688 : reloc->offset + 2;
2689 int32 value;
2690 int new_offset;
2691 int n = (old_ptr - old_buffer) - start;
2692 new_ptr -= n;
2693 old_ptr -= n;
2694 if (n > 0)
2695 memcpy (new_ptr, old_ptr, n);
2696 if (old_ptr == old_buffer)
2697 break;
2698
2699 new_offset = new_ptr - chunk->data;
2700 new_offset -= (reloc->kind == -1 ? 2 : 4);
2701 if (reloc->kind == 0)
2702 {
2703 old_ptr -= 4;
2704 value = GET_u4 (old_ptr);
2705 }
2706 else if (reloc->kind == BLOCK_START_RELOC)
2707 {
2708 old_ptr -= 4;
2709 value = 0;
2710 new_offset = 0;
2711 }
2712 else if (reloc->kind == SWITCH_ALIGN_RELOC)
2713 {
2714 int where = block->pc + reloc->offset;
2715 int pad = ((where + 3) & ~3) - where;
2716 while (--pad >= 0)
2717 *--new_ptr = 0;
2718 continue;
2719 }
2720 else
2721 {
2722 old_ptr -= 2;
2723 value = GET_u2 (old_ptr);
2724 }
2725 value += reloc->label->pc - (block->pc + new_offset);
2726 *--new_ptr = (unsigned char) value; value >>= 8;
2727 *--new_ptr = (unsigned char) value; value >>= 8;
2728 if (reloc->kind != -1)
2729 {
2730 *--new_ptr = (unsigned char) value; value >>= 8;
2731 *--new_ptr = (unsigned char) value;
2732 }
2733 if (reloc->kind > BLOCK_START_RELOC)
2734 {
2735 /* Convert: OP TARGET to: OP_w TARGET; (OP is goto or jsr). */
2736 --old_ptr;
2737 *--new_ptr = reloc->kind;
2738 }
2739 else if (reloc->kind < -1)
2740 {
2741 /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2742 --old_ptr;
2743 *--new_ptr = OPCODE_goto_w;
2744 *--new_ptr = 3;
2745 *--new_ptr = 0;
2746 *--new_ptr = - reloc->kind;
2747 }
2748 }
2749 if (new_ptr != chunk->data)
2750 fatal ("internal error - perform_relocations");
2751 }
2752 state->code_length = pc;
2753 }
2754
2755 static void
2756 init_jcf_state (state, work)
2757 struct jcf_partial *state;
2758 struct obstack *work;
2759 {
2760 state->chunk_obstack = work;
2761 state->first = state->chunk = NULL;
2762 CPOOL_INIT (&state->cpool);
2763 BUFFER_INIT (&state->localvars);
2764 BUFFER_INIT (&state->bytecode);
2765 }
2766
2767 static void
2768 init_jcf_method (state, method)
2769 struct jcf_partial *state;
2770 tree method;
2771 {
2772 state->current_method = method;
2773 state->blocks = state->last_block = NULL;
2774 state->linenumber_count = 0;
2775 state->first_lvar = state->last_lvar = NULL;
2776 state->lvar_count = 0;
2777 state->labeled_blocks = NULL;
2778 state->code_length = 0;
2779 BUFFER_RESET (&state->bytecode);
2780 BUFFER_RESET (&state->localvars);
2781 state->code_SP = 0;
2782 state->code_SP_max = 0;
2783 state->handlers = NULL;
2784 state->last_handler = NULL;
2785 state->num_handlers = 0;
2786 state->num_finalizers = 0;
2787 state->return_value_decl = NULL_TREE;
2788 }
2789
2790 static void
2791 release_jcf_state (state)
2792 struct jcf_partial *state;
2793 {
2794 CPOOL_FINISH (&state->cpool);
2795 obstack_free (state->chunk_obstack, state->first);
2796 }
2797
2798 /* Generate and return a list of chunks containing the class CLAS
2799 in the .class file representation. The list can be written to a
2800 .class file using write_chunks. Allocate chunks from obstack WORK. */
2801
2802 static struct chunk *
2803 generate_classfile (clas, state)
2804 tree clas;
2805 struct jcf_partial *state;
2806 {
2807 struct chunk *cpool_chunk;
2808 const char *source_file, *s;
2809 char *ptr;
2810 int i;
2811 char *fields_count_ptr;
2812 int fields_count = 0;
2813 char *methods_count_ptr;
2814 int methods_count = 0;
2815 static tree SourceFile_node = NULL_TREE;
2816 tree part;
2817 int total_supers
2818 = clas == object_type_node ? 0
2819 : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2820
2821 ptr = append_chunk (NULL, 8, state);
2822 PUT4 (0xCafeBabe); /* Magic number */
2823 PUT2 (3); /* Minor version */
2824 PUT2 (45); /* Major version */
2825
2826 append_chunk (NULL, 0, state);
2827 cpool_chunk = state->chunk;
2828
2829 /* Next allocate the chunk containing acces_flags through fields_counr. */
2830 if (clas == object_type_node)
2831 i = 10;
2832 else
2833 i = 8 + 2 * total_supers;
2834 ptr = append_chunk (NULL, i, state);
2835 i = get_access_flags (TYPE_NAME (clas));
2836 if (! (i & ACC_INTERFACE))
2837 i |= ACC_SUPER;
2838 PUT2 (i); /* acces_flags */
2839 i = find_class_constant (&state->cpool, clas); PUT2 (i); /* this_class */
2840 if (clas == object_type_node)
2841 {
2842 PUT2(0); /* super_class */
2843 PUT2(0); /* interfaces_count */
2844 }
2845 else
2846 {
2847 tree basetypes = TYPE_BINFO_BASETYPES (clas);
2848 tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2849 int j = find_class_constant (&state->cpool, base);
2850 PUT2 (j); /* super_class */
2851 PUT2 (total_supers - 1); /* interfaces_count */
2852 for (i = 1; i < total_supers; i++)
2853 {
2854 base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2855 j = find_class_constant (&state->cpool, base);
2856 PUT2 (j);
2857 }
2858 }
2859 fields_count_ptr = ptr;
2860
2861 for (part = TYPE_FIELDS (clas); part; part = TREE_CHAIN (part))
2862 {
2863 int have_value, attr_count = 0;
2864 if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2865 continue;
2866 ptr = append_chunk (NULL, 8, state);
2867 i = get_access_flags (part); PUT2 (i);
2868 i = find_utf8_constant (&state->cpool, DECL_NAME (part)); PUT2 (i);
2869 i = find_utf8_constant (&state->cpool,
2870 build_java_signature (TREE_TYPE (part)));
2871 PUT2(i);
2872 have_value = DECL_INITIAL (part) != NULL_TREE && FIELD_STATIC (part)
2873 && TREE_CODE (TREE_TYPE (part)) != POINTER_TYPE;
2874 if (have_value)
2875 attr_count++;
2876
2877 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
2878 attr_count++;
2879
2880 PUT2 (attr_count); /* attributes_count */
2881 if (have_value)
2882 {
2883 tree init = DECL_INITIAL (part);
2884 static tree ConstantValue_node = NULL_TREE;
2885 ptr = append_chunk (NULL, 8, state);
2886 if (ConstantValue_node == NULL_TREE)
2887 ConstantValue_node = get_identifier ("ConstantValue");
2888 i = find_utf8_constant (&state->cpool, ConstantValue_node);
2889 PUT2 (i); /* attribute_name_index */
2890 PUT4 (2); /* attribute_length */
2891 i = find_constant_index (init, state); PUT2 (i);
2892 }
2893 /* Emit the "Synthetic" attribute for val$<x> and this$<n> fields. */
2894 if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part))
2895 ptr = append_synthetic_attribute (state);
2896 fields_count++;
2897 }
2898 ptr = fields_count_ptr; UNSAFE_PUT2 (fields_count);
2899
2900 ptr = methods_count_ptr = append_chunk (NULL, 2, state);
2901 PUT2 (0);
2902
2903 for (part = TYPE_METHODS (clas); part; part = TREE_CHAIN (part))
2904 {
2905 struct jcf_block *block;
2906 tree function_body = DECL_FUNCTION_BODY (part);
2907 tree body = function_body == NULL_TREE ? NULL_TREE
2908 : BLOCK_EXPR_BODY (function_body);
2909 tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
2910 : DECL_NAME (part);
2911 tree type = TREE_TYPE (part);
2912 tree save_function = current_function_decl;
2913 int synthetic_p = 0;
2914 current_function_decl = part;
2915 ptr = append_chunk (NULL, 8, state);
2916 i = get_access_flags (part); PUT2 (i);
2917 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
2918 i = find_utf8_constant (&state->cpool, build_java_signature (type));
2919 PUT2 (i);
2920 i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
2921
2922 /* Make room for the Synthetic attribute (of zero length.) */
2923 if (DECL_FINIT_P (part)
2924 || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
2925 || TYPE_DOT_CLASS (clas) == part)
2926 {
2927 i++;
2928 synthetic_p = 1;
2929 }
2930
2931 PUT2 (i); /* attributes_count */
2932
2933 if (synthetic_p)
2934 ptr = append_synthetic_attribute (state);
2935
2936 if (body != NULL_TREE)
2937 {
2938 int code_attributes_count = 0;
2939 static tree Code_node = NULL_TREE;
2940 tree t;
2941 char *attr_len_ptr;
2942 struct jcf_handler *handler;
2943 if (Code_node == NULL_TREE)
2944 Code_node = get_identifier ("Code");
2945 ptr = append_chunk (NULL, 14, state);
2946 i = find_utf8_constant (&state->cpool, Code_node); PUT2 (i);
2947 attr_len_ptr = ptr;
2948 init_jcf_method (state, part);
2949 get_jcf_label_here (state); /* Force a first block. */
2950 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2951 localvar_alloc (t, state);
2952 generate_bytecode_insns (body, IGNORE_TARGET, state);
2953 if (CAN_COMPLETE_NORMALLY (body))
2954 {
2955 if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
2956 abort();
2957 RESERVE (1);
2958 OP1 (OPCODE_return);
2959 }
2960 for (t = DECL_ARGUMENTS (part); t != NULL_TREE; t = TREE_CHAIN (t))
2961 localvar_free (t, state);
2962 if (state->return_value_decl != NULL_TREE)
2963 localvar_free (state->return_value_decl, state);
2964 finish_jcf_block (state);
2965 perform_relocations (state);
2966
2967 ptr = attr_len_ptr;
2968 i = 8 + state->code_length + 4 + 8 * state->num_handlers;
2969 if (state->linenumber_count > 0)
2970 {
2971 code_attributes_count++;
2972 i += 8 + 4 * state->linenumber_count;
2973 }
2974 if (state->lvar_count > 0)
2975 {
2976 code_attributes_count++;
2977 i += 8 + 10 * state->lvar_count;
2978 }
2979 UNSAFE_PUT4 (i); /* attribute_length */
2980 UNSAFE_PUT2 (state->code_SP_max); /* max_stack */
2981 UNSAFE_PUT2 (localvar_max); /* max_locals */
2982 UNSAFE_PUT4 (state->code_length);
2983
2984 /* Emit the exception table. */
2985 ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
2986 PUT2 (state->num_handlers); /* exception_table_length */
2987 handler = state->handlers;
2988 for (; handler != NULL; handler = handler->next)
2989 {
2990 int type_index;
2991 PUT2 (handler->start_label->pc);
2992 PUT2 (handler->end_label->pc);
2993 PUT2 (handler->handler_label->pc);
2994 if (handler->type == NULL_TREE)
2995 type_index = 0;
2996 else
2997 type_index = find_class_constant (&state->cpool,
2998 handler->type);
2999 PUT2 (type_index);
3000 }
3001
3002 ptr = append_chunk (NULL, 2, state);
3003 PUT2 (code_attributes_count);
3004
3005 /* Write the LineNumberTable attribute. */
3006 if (state->linenumber_count > 0)
3007 {
3008 static tree LineNumberTable_node = NULL_TREE;
3009 ptr = append_chunk (NULL,
3010 8 + 4 * state->linenumber_count, state);
3011 if (LineNumberTable_node == NULL_TREE)
3012 LineNumberTable_node = get_identifier ("LineNumberTable");
3013 i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3014 PUT2 (i); /* attribute_name_index */
3015 i = 2+4*state->linenumber_count; PUT4(i); /* attribute_length */
3016 i = state->linenumber_count; PUT2 (i);
3017 for (block = state->blocks; block != NULL; block = block->next)
3018 {
3019 int line = block->linenumber;
3020 if (line > 0)
3021 {
3022 PUT2 (block->pc);
3023 PUT2 (line);
3024 }
3025 }
3026 }
3027
3028 /* Write the LocalVariableTable attribute. */
3029 if (state->lvar_count > 0)
3030 {
3031 static tree LocalVariableTable_node = NULL_TREE;
3032 struct localvar_info *lvar = state->first_lvar;
3033 ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3034 if (LocalVariableTable_node == NULL_TREE)
3035 LocalVariableTable_node = get_identifier("LocalVariableTable");
3036 i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3037 PUT2 (i); /* attribute_name_index */
3038 i = 2 + 10 * state->lvar_count; PUT4 (i); /* attribute_length */
3039 i = state->lvar_count; PUT2 (i);
3040 for ( ; lvar != NULL; lvar = lvar->next)
3041 {
3042 tree name = DECL_NAME (lvar->decl);
3043 tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3044 i = lvar->start_label->pc; PUT2 (i);
3045 i = lvar->end_label->pc - i; PUT2 (i);
3046 i = find_utf8_constant (&state->cpool, name); PUT2 (i);
3047 i = find_utf8_constant (&state->cpool, sig); PUT2 (i);
3048 i = DECL_LOCAL_INDEX (lvar->decl); PUT2 (i);
3049 }
3050 }
3051 }
3052 if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3053 {
3054 tree t = DECL_FUNCTION_THROWS (part);
3055 int throws_count = list_length (t);
3056 static tree Exceptions_node = NULL_TREE;
3057 if (Exceptions_node == NULL_TREE)
3058 Exceptions_node = get_identifier ("Exceptions");
3059 ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3060 i = find_utf8_constant (&state->cpool, Exceptions_node);
3061 PUT2 (i); /* attribute_name_index */
3062 i = 2 + 2 * throws_count; PUT4(i); /* attribute_length */
3063 i = throws_count; PUT2 (i);
3064 for (; t != NULL_TREE; t = TREE_CHAIN (t))
3065 {
3066 i = find_class_constant (&state->cpool, TREE_VALUE (t));
3067 PUT2 (i);
3068 }
3069 }
3070 methods_count++;
3071 current_function_decl = save_function;
3072 }
3073 ptr = methods_count_ptr; UNSAFE_PUT2 (methods_count);
3074
3075 source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3076 for (s = source_file; ; s++)
3077 {
3078 char ch = *s;
3079 if (ch == '\0')
3080 break;
3081 if (ch == '/' || ch == '\\')
3082 source_file = s+1;
3083 }
3084 ptr = append_chunk (NULL, 10, state);
3085
3086 i = ((INNER_CLASS_TYPE_P (clas)
3087 || DECL_INNER_CLASS_LIST (TYPE_NAME (clas))) ? 2 : 1);
3088 PUT2 (i); /* attributes_count */
3089
3090 /* generate the SourceFile attribute. */
3091 if (SourceFile_node == NULL_TREE)
3092 SourceFile_node = get_identifier ("SourceFile");
3093 i = find_utf8_constant (&state->cpool, SourceFile_node);
3094 PUT2 (i); /* attribute_name_index */
3095 PUT4 (2);
3096 i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3097 PUT2 (i);
3098 append_innerclasses_attribute (state, clas);
3099
3100 /* New finally generate the contents of the constant pool chunk. */
3101 i = count_constant_pool_bytes (&state->cpool);
3102 ptr = obstack_alloc (state->chunk_obstack, i);
3103 cpool_chunk->data = ptr;
3104 cpool_chunk->size = i;
3105 write_constant_pool (&state->cpool, ptr, i);
3106 return state->first;
3107 }
3108
3109 static unsigned char *
3110 append_synthetic_attribute (state)
3111 struct jcf_partial *state;
3112 {
3113 static tree Synthetic_node = NULL_TREE;
3114 unsigned char *ptr = append_chunk (NULL, 6, state);
3115 int i;
3116
3117 if (Synthetic_node == NULL_TREE)
3118 Synthetic_node = get_identifier ("Synthetic");
3119 i = find_utf8_constant (&state->cpool, Synthetic_node);
3120 PUT2 (i); /* Attribute string index */
3121 PUT4 (0); /* Attribute length */
3122
3123 return ptr;
3124 }
3125
3126 static void
3127 append_innerclasses_attribute (state, class)
3128 struct jcf_partial *state;
3129 tree class;
3130 {
3131 static tree InnerClasses_node = NULL_TREE;
3132 tree orig_decl = TYPE_NAME (class);
3133 tree current, decl;
3134 int length = 0, i;
3135 unsigned char *ptr, *length_marker, *number_marker;
3136
3137 if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3138 return;
3139
3140 ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3141
3142 if (InnerClasses_node == NULL_TREE)
3143 InnerClasses_node = get_identifier ("InnerClasses");
3144 i = find_utf8_constant (&state->cpool, InnerClasses_node);
3145 PUT2 (i);
3146 length_marker = ptr; PUT4 (0); /* length, to be later patched */
3147 number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3148
3149 /* Generate the entries: all inner classes visible from the one we
3150 process: itself, up and down. */
3151 while (class && INNER_CLASS_TYPE_P (class))
3152 {
3153 char *n;
3154
3155 decl = TYPE_NAME (class);
3156 n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3157 IDENTIFIER_LENGTH (DECL_NAME (decl));
3158
3159 while (n[-1] != '$')
3160 n--;
3161 append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3162 length++;
3163
3164 class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3165 }
3166
3167 decl = orig_decl;
3168 for (current = DECL_INNER_CLASS_LIST (decl);
3169 current; current = TREE_CHAIN (current))
3170 {
3171 append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3172 TREE_VALUE (current));
3173 length++;
3174 }
3175
3176 ptr = length_marker; PUT4 (8*length+2);
3177 ptr = number_marker; PUT2 (length);
3178 }
3179
3180 static void
3181 append_innerclasses_attribute_entry (state, decl, name)
3182 struct jcf_partial *state;
3183 tree decl, name;
3184 {
3185 static tree anonymous_name = NULL_TREE;
3186 int icii, ocii, ini, icaf;
3187 unsigned char *ptr = append_chunk (NULL, 8, state);
3188
3189 if (!anonymous_name)
3190 anonymous_name = get_identifier ("");
3191
3192 icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3193 ocii = find_class_constant (&state->cpool, TREE_TYPE (DECL_CONTEXT (decl)));
3194
3195 /* The specs are saying that if the class is anonymous,
3196 inner_name_index must be zero. But the implementation makes it
3197 point to an empty string. */
3198 ini = find_utf8_constant (&state->cpool,
3199 (ANONYMOUS_CLASS_P (TREE_TYPE (decl)) ?
3200 anonymous_name : name));
3201 icaf = get_access_flags (decl);
3202
3203 PUT2 (icii); PUT2 (ocii); PUT2 (ini); PUT2 (icaf);
3204 }
3205
3206 static char *
3207 make_class_file_name (clas)
3208 tree clas;
3209 {
3210 const char *dname, *slash;
3211 char *cname, *r;
3212 struct stat sb;
3213
3214 cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3215 "", '.', DIR_SEPARATOR,
3216 ".class"));
3217 if (jcf_write_base_directory == NULL)
3218 {
3219 /* Make sure we put the class file into the .java file's
3220 directory, and not into some subdirectory thereof. */
3221 char *t;
3222 dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3223 slash = strrchr (dname, DIR_SEPARATOR);
3224 if (! slash)
3225 {
3226 dname = ".";
3227 slash = dname + 1;
3228 }
3229 t = strrchr (cname, DIR_SEPARATOR);
3230 if (t)
3231 cname = t + 1;
3232 }
3233 else
3234 {
3235 dname = jcf_write_base_directory;
3236 slash = dname + strlen (dname);
3237 }
3238
3239 r = xmalloc (slash - dname + strlen (cname) + 2);
3240 strncpy (r, dname, slash - dname);
3241 r[slash - dname] = DIR_SEPARATOR;
3242 strcpy (&r[slash - dname + 1], cname);
3243
3244 /* We try to make new directories when we need them. We only do
3245 this for directories which "might not" exist. For instance, we
3246 assume the `-d' directory exists, but we don't assume that any
3247 subdirectory below it exists. It might be worthwhile to keep
3248 track of which directories we've created to avoid gratuitous
3249 stat()s. */
3250 dname = r + (slash - dname) + 1;
3251 while (1)
3252 {
3253 cname = strchr (dname, DIR_SEPARATOR);
3254 if (cname == NULL)
3255 break;
3256 *cname = '\0';
3257 if (stat (r, &sb) == -1)
3258 {
3259 /* Try to make it. */
3260 if (mkdir (r, 0755) == -1)
3261 {
3262 fatal ("failed to create directory `%s'", r);
3263 free (r);
3264 return NULL;
3265 }
3266 }
3267 *cname = DIR_SEPARATOR;
3268 /* Skip consecutive separators. */
3269 for (dname = cname + 1; *dname && *dname == DIR_SEPARATOR; ++dname)
3270 ;
3271 }
3272
3273 return r;
3274 }
3275
3276 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3277 The output .class file name is make_class_file_name(CLAS). */
3278
3279 void
3280 write_classfile (clas)
3281 tree clas;
3282 {
3283 struct obstack *work = &temporary_obstack;
3284 struct jcf_partial state[1];
3285 char *class_file_name = make_class_file_name (clas);
3286 struct chunk *chunks;
3287
3288 if (class_file_name != NULL)
3289 {
3290 FILE* stream = fopen (class_file_name, "wb");
3291 if (stream == NULL)
3292 fatal ("failed to open `%s' for writing", class_file_name);
3293 jcf_dependency_add_target (class_file_name);
3294 init_jcf_state (state, work);
3295 chunks = generate_classfile (clas, state);
3296 write_chunks (stream, chunks);
3297 if (fclose (stream))
3298 fatal ("failed to close after writing `%s'", class_file_name);
3299 free (class_file_name);
3300 }
3301 release_jcf_state (state);
3302 }
3303
3304 /* TODO:
3305 string concatenation
3306 synchronized statement
3307 */