configure.ac (--enable-libgcj-multifile): Remove.
[gcc.git] / libjava / boehm.cc
1 // boehm.cc - interface between libjava and Boehm GC.
2
3 /* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004
4 Free Software Foundation
5
6 This file is part of libgcj.
7
8 This software is copyrighted work licensed under the terms of the
9 Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
10 details. */
11
12 #include <config.h>
13
14 #include <stdio.h>
15 #include <limits.h>
16
17 #include <jvm.h>
18 #include <gcj/cni.h>
19
20 #include <java/lang/Class.h>
21 #include <java/lang/reflect/Modifier.h>
22 #include <java-interp.h>
23
24 // More nastiness: the GC wants to define TRUE and FALSE. We don't
25 // need the Java definitions (themselves a hack), so we undefine them.
26 #undef TRUE
27 #undef FALSE
28
29 extern "C"
30 {
31 #include <gc_config.h>
32
33 // Set GC_DEBUG before including gc.h!
34 #ifdef LIBGCJ_GC_DEBUG
35 # define GC_DEBUG
36 #endif
37
38 #include <gc_mark.h>
39 #include <gc_gcj.h>
40 #include <javaxfc.h> // GC_finalize_all declaration.
41
42 #ifdef THREAD_LOCAL_ALLOC
43 # define GC_REDIRECT_TO_LOCAL
44 # include <gc_local_alloc.h>
45 #endif
46
47 // From boehm's misc.c
48 void GC_enable();
49 void GC_disable();
50 };
51
52 #define MAYBE_MARK(Obj, Top, Limit, Source) \
53 Top=GC_MARK_AND_PUSH((GC_PTR) Obj, Top, Limit, (GC_PTR *) Source)
54
55 // `kind' index used when allocating Java arrays.
56 static int array_kind_x;
57
58 // Freelist used for Java arrays.
59 static void **array_free_list;
60
61 \f
62
63 // This is called by the GC during the mark phase. It marks a Java
64 // object. We use `void *' arguments and return, and not what the
65 // Boehm GC wants, to avoid pollution in our headers.
66 void *
67 _Jv_MarkObj (void *addr, void *msp, void *msl, void *env)
68 {
69 struct GC_ms_entry *mark_stack_ptr = (struct GC_ms_entry *)msp;
70 struct GC_ms_entry *mark_stack_limit = (struct GC_ms_entry *)msl;
71
72 if (env == (void *)1) /* Object allocated with debug allocator. */
73 addr = (GC_PTR)GC_USR_PTR_FROM_BASE(addr);
74 jobject obj = (jobject) addr;
75
76 _Jv_VTable *dt = *(_Jv_VTable **) addr;
77 // The object might not yet have its vtable set, or it might
78 // really be an object on the freelist. In either case, the vtable slot
79 // will either be 0, or it will point to a cleared object.
80 // This assumes Java objects have size at least 3 words,
81 // including the header. But this should remain true, since this
82 // should only be used with debugging allocation or with large objects.
83 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
84 return mark_stack_ptr;
85 jclass klass = dt->clas;
86 GC_PTR p;
87
88 # ifndef JV_HASH_SYNCHRONIZATION
89 // Every object has a sync_info pointer.
90 p = (GC_PTR) obj->sync_info;
91 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj);
92 # endif
93 // Mark the object's class.
94 p = (GC_PTR) klass;
95 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj);
96
97 if (__builtin_expect (klass == &java::lang::Class::class$, false))
98 {
99 // Currently we allocate some of the memory referenced from class objects
100 // as pointerfree memory, and then mark it more intelligently here.
101 // We ensure that the ClassClass mark descriptor forces invocation of
102 // this procedure.
103 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
104 // collector, we need to make sure that the class object is written whenever
105 // any of the subobjects are altered and may need rescanning. This may be tricky
106 // during construction, and this may not be the right way to do this with
107 // incremental collection.
108 // If we overflow the mark stack, we will rescan the class object, so we should
109 // be OK. The same applies if we redo the mark phase because win32 unmapped part
110 // of our root set. - HB
111 jclass c = (jclass) addr;
112
113 p = (GC_PTR) c->name;
114 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
115 p = (GC_PTR) c->superclass;
116 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
117 for (int i = 0; i < c->constants.size; ++i)
118 {
119 /* FIXME: We could make this more precise by using the tags -KKT */
120 p = (GC_PTR) c->constants.data[i].p;
121 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
122 }
123
124 #ifdef INTERPRETER
125 if (_Jv_IsInterpretedClass (c))
126 {
127 p = (GC_PTR) c->constants.tags;
128 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
129 p = (GC_PTR) c->constants.data;
130 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
131 }
132 #endif
133
134 // The vtable might be allocated even for compiled code.
135 p = (GC_PTR) c->vtable;
136 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
137
138 // If the class is an array, then the methods field holds a
139 // pointer to the element class. If the class is primitive,
140 // then the methods field holds a pointer to the array class.
141 p = (GC_PTR) c->methods;
142 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
143
144 // The vtable might have been set, but the rest of the class
145 // could still be uninitialized. If this is the case, then
146 // c.isArray will SEGV. We check for this, and if it is the
147 // case we just return.
148 if (__builtin_expect (c->name == NULL, false))
149 return mark_stack_ptr;
150
151 if (! c->isArray() && ! c->isPrimitive())
152 {
153 // Scan each method in the cases where `methods' really
154 // points to a methods structure.
155 for (int i = 0; i < c->method_count; ++i)
156 {
157 p = (GC_PTR) c->methods[i].name;
158 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
159 p = (GC_PTR) c->methods[i].signature;
160 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
161
162 // Note that we don't have to mark each individual throw
163 // separately, as these are stored in the constant pool.
164 p = (GC_PTR) c->methods[i].throws;
165 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
166 }
167 }
168
169 // Mark all the fields.
170 p = (GC_PTR) c->fields;
171 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
172 for (int i = 0; i < c->field_count; ++i)
173 {
174 _Jv_Field* field = &c->fields[i];
175
176 p = (GC_PTR) field->name;
177 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
178 p = (GC_PTR) field->type;
179 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
180
181 // For the interpreter, we also need to mark the memory
182 // containing static members
183 if ((field->flags & java::lang::reflect::Modifier::STATIC))
184 {
185 p = (GC_PTR) field->u.addr;
186 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
187
188 // also, if the static member is a reference,
189 // mark also the value pointed to. We check for isResolved
190 // since marking can happen before memory is allocated for
191 // static members.
192 // Note that field->u.addr may be null if the class c is
193 // JV_STATE_LOADED but not JV_STATE_PREPARED (initialized).
194 if (JvFieldIsRef (field) && p && field->isResolved())
195 {
196 jobject val = *(jobject*) p;
197 p = (GC_PTR) val;
198 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
199 }
200 }
201 }
202
203 p = (GC_PTR) c->vtable;
204 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
205 p = (GC_PTR) c->interfaces;
206 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
207 for (int i = 0; i < c->interface_count; ++i)
208 {
209 p = (GC_PTR) c->interfaces[i];
210 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
211 }
212 p = (GC_PTR) c->loader;
213 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
214
215 // The dispatch tables can be allocated at runtime.
216 p = (GC_PTR) c->ancestors;
217 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
218 if (c->idt)
219 {
220 p = (GC_PTR) c->idt;
221 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
222
223 if (c->isInterface())
224 {
225 p = (GC_PTR) c->idt->iface.ioffsets;
226 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c->idt);
227 }
228 else if (! c->isPrimitive())
229 {
230 // This field is only valid for ordinary classes.
231 p = (GC_PTR) c->idt->cls.itable;
232 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c->idt);
233 }
234 }
235
236 p = (GC_PTR) c->arrayclass;
237 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
238 p = (GC_PTR) c->protectionDomain;
239 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
240 p = (GC_PTR) c->hack_signers;
241 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
242 p = (GC_PTR) c->aux_info;
243 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
244
245 #ifdef INTERPRETER
246 if (_Jv_IsInterpretedClass (c) && c->aux_info)
247 {
248 _Jv_InterpClass* ic = (_Jv_InterpClass*) c->aux_info;
249
250 p = (GC_PTR) ic->interpreted_methods;
251 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic);
252
253 p = (GC_PTR) ic->source_file_name;
254 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic);
255
256 for (int i = 0; i < c->method_count; i++)
257 {
258 // The interpreter installs a heap-allocated trampoline
259 // here, so we'll mark it.
260 p = (GC_PTR) c->methods[i].ncode;
261 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c);
262
263 using namespace java::lang::reflect;
264
265 // Mark the direct-threaded code. Note a subtlety here:
266 // when we add Miranda methods to a class, we don't
267 // resize its interpreted_methods array. If we try to
268 // reference one of these methods, we may crash.
269 // However, we know these are all abstract, and we know
270 // that abstract methods have nothing useful in this
271 // array. So, we skip all abstract methods to avoid the
272 // problem. FIXME: this is pretty obscure, it may be
273 // better to add a methods to the execution engine and
274 // resize the array.
275 if ((c->methods[i].accflags & Modifier::ABSTRACT) != 0)
276 continue;
277
278 p = (GC_PTR) ic->interpreted_methods[i];
279 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic);
280
281 if ((c->methods[i].accflags & Modifier::NATIVE) != 0)
282 {
283 _Jv_JNIMethod *jm
284 = (_Jv_JNIMethod *) ic->interpreted_methods[i];
285 if (jm)
286 {
287 p = (GC_PTR) jm->jni_arg_types;
288 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, p);
289 }
290 }
291 else
292 {
293 _Jv_InterpMethod *im
294 = (_Jv_InterpMethod *) ic->interpreted_methods[i];
295 if (im)
296 {
297 p = (GC_PTR) im->line_table;
298 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic);
299 p = (GC_PTR) im->prepared;
300 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic);
301 }
302 }
303 }
304
305 p = (GC_PTR) ic->field_initializers;
306 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic);
307
308 }
309 #endif
310
311 }
312 else
313 {
314 // NOTE: each class only holds information about the class
315 // itself. So we must do the marking for the entire inheritance
316 // tree in order to mark all fields. FIXME: what about
317 // interfaces? We skip Object here, because Object only has a
318 // sync_info, and we handled that earlier.
319 // Note: occasionally `klass' can be null. For instance, this
320 // can happen if a GC occurs between the point where an object
321 // is allocated and where the vtbl slot is set.
322 while (klass && klass != &java::lang::Object::class$)
323 {
324 jfieldID field = JvGetFirstInstanceField (klass);
325 jint max = JvNumInstanceFields (klass);
326
327 for (int i = 0; i < max; ++i)
328 {
329 if (JvFieldIsRef (field))
330 {
331 jobject val = JvGetObjectField (obj, field);
332 p = (GC_PTR) val;
333 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj);
334 }
335 field = field->getNextField ();
336 }
337 klass = klass->getSuperclass();
338 }
339 }
340
341 return mark_stack_ptr;
342 }
343
344 // This is called by the GC during the mark phase. It marks a Java
345 // array (of objects). We use `void *' arguments and return, and not
346 // what the Boehm GC wants, to avoid pollution in our headers.
347 void *
348 _Jv_MarkArray (void *addr, void *msp, void *msl, void *env)
349 {
350 struct GC_ms_entry *mark_stack_ptr = (struct GC_ms_entry *)msp;
351 struct GC_ms_entry *mark_stack_limit = (struct GC_ms_entry *)msl;
352
353 if (env == (void *)1) /* Object allocated with debug allocator. */
354 addr = (void *)GC_USR_PTR_FROM_BASE(addr);
355 jobjectArray array = (jobjectArray) addr;
356
357 _Jv_VTable *dt = *(_Jv_VTable **) addr;
358 // Assumes size >= 3 words. That's currently true since arrays have
359 // a vtable, sync pointer, and size. If the sync pointer goes away,
360 // we may need to round up the size.
361 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
362 return mark_stack_ptr;
363 jclass klass = dt->clas;
364 GC_PTR p;
365
366 # ifndef JV_HASH_SYNCHRONIZATION
367 // Every object has a sync_info pointer.
368 p = (GC_PTR) array->sync_info;
369 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array);
370 # endif
371 // Mark the object's class.
372 p = (GC_PTR) klass;
373 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, &(dt -> clas));
374
375 for (int i = 0; i < JvGetArrayLength (array); ++i)
376 {
377 jobject obj = elements (array)[i];
378 p = (GC_PTR) obj;
379 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array);
380 }
381
382 return mark_stack_ptr;
383 }
384
385 // Generate a GC marking descriptor for a class.
386 //
387 // We assume that the gcj mark proc has index 0. This is a dubious assumption,
388 // since another one could be registered first. But the compiler also
389 // knows this, so in that case everything else will break, too.
390 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
391
392 void *
393 _Jv_BuildGCDescr(jclass self)
394 {
395 jlong desc = 0;
396 jint bits_per_word = CHAR_BIT * sizeof (void *);
397
398 // Note: for now we only consider a bitmap mark descriptor. We
399 // could also handle the case where the first N fields of a type are
400 // references. However, this is not very likely to be used by many
401 // classes, and it is easier to compute things this way.
402
403 // The vtable pointer.
404 desc |= 1ULL << (bits_per_word - 1);
405 #ifndef JV_HASH_SYNCHRONIZATION
406 // The sync_info field.
407 desc |= 1ULL << (bits_per_word - 2);
408 #endif
409
410 for (jclass klass = self; klass != NULL; klass = klass->getSuperclass())
411 {
412 jfieldID field = JvGetFirstInstanceField(klass);
413 int count = JvNumInstanceFields(klass);
414
415 for (int i = 0; i < count; ++i)
416 {
417 if (field->isRef())
418 {
419 unsigned int off = field->getOffset();
420 // If we run into a weird situation, we bail.
421 if (off % sizeof (void *) != 0)
422 return (void *) (GCJ_DEFAULT_DESCR);
423 off /= sizeof (void *);
424 // If we find a field outside the range of our bitmap,
425 // fall back to procedure marker. The bottom 2 bits are
426 // reserved.
427 if (off >= (unsigned) bits_per_word - 2)
428 return (void *) (GCJ_DEFAULT_DESCR);
429 desc |= 1ULL << (bits_per_word - off - 1);
430 }
431
432 field = field->getNextField();
433 }
434 }
435
436 // For bitmap mark type, bottom bits are 01.
437 desc |= 1;
438 // Bogus warning avoidance (on many platforms).
439 return (void *) (unsigned long) desc;
440 }
441
442 // Allocate some space that is known to be pointer-free.
443 void *
444 _Jv_AllocBytes (jsize size)
445 {
446 void *r = GC_MALLOC_ATOMIC (size);
447 // We have to explicitly zero memory here, as the GC doesn't
448 // guarantee that PTRFREE allocations are zeroed. Note that we
449 // don't have to do this for other allocation types because we set
450 // the `ok_init' flag in the type descriptor.
451 memset (r, 0, size);
452 return r;
453 }
454
455 #ifdef LIBGCJ_GC_DEBUG
456
457 void *
458 _Jv_AllocObj (jsize size, jclass klass)
459 {
460 return GC_GCJ_MALLOC (size, klass->vtable);
461 }
462
463 void *
464 _Jv_AllocPtrFreeObj (jsize size, jclass klass)
465 {
466 #ifdef JV_HASH_SYNCHRONIZATION
467 void * obj = GC_MALLOC_ATOMIC(size);
468 *((_Jv_VTable **) obj) = klass->vtable;
469 #else
470 void * obj = GC_GCJ_MALLOC(size, klass->vtable);
471 #endif
472 return obj;
473 }
474
475 #endif /* LIBGCJ_GC_DEBUG */
476 // In the non-debug case, the above two functions are defined
477 // as inline functions in boehm-gc.h. In the debug case we
478 // really want to take advantage of the definitions in gc_gcj.h.
479
480 // Allocate space for a new Java array.
481 // Used only for arrays of objects.
482 void *
483 _Jv_AllocArray (jsize size, jclass klass)
484 {
485 void *obj;
486
487 #ifdef LIBGCJ_GC_DEBUG
488 // There isn't much to lose by scanning this conservatively.
489 // If we didn't, the mark proc would have to understand that
490 // it needed to skip the header.
491 obj = GC_MALLOC(size);
492 #else
493 const jsize min_heap_addr = 16*1024;
494 // A heuristic. If size is less than this value, the size
495 // stored in the array can't possibly be misinterpreted as
496 // a pointer. Thus we lose nothing by scanning the object
497 // completely conservatively, since no misidentification can
498 // take place.
499
500 if (size < min_heap_addr)
501 obj = GC_MALLOC(size);
502 else
503 obj = GC_generic_malloc (size, array_kind_x);
504 #endif
505 *((_Jv_VTable **) obj) = klass->vtable;
506 return obj;
507 }
508
509 /* Allocate space for a new non-Java object, which does not have the usual
510 Java object header but may contain pointers to other GC'ed objects. */
511 void *
512 _Jv_AllocRawObj (jsize size)
513 {
514 return (void *) GC_MALLOC (size);
515 }
516
517 static void
518 call_finalizer (GC_PTR obj, GC_PTR client_data)
519 {
520 _Jv_FinalizerFunc *fn = (_Jv_FinalizerFunc *) client_data;
521 jobject jobj = (jobject) obj;
522
523 (*fn) (jobj);
524 }
525
526 void
527 _Jv_RegisterFinalizer (void *object, _Jv_FinalizerFunc *meth)
528 {
529 GC_REGISTER_FINALIZER_NO_ORDER (object, call_finalizer, (GC_PTR) meth,
530 NULL, NULL);
531 }
532
533 void
534 _Jv_RunFinalizers (void)
535 {
536 GC_invoke_finalizers ();
537 }
538
539 void
540 _Jv_RunAllFinalizers (void)
541 {
542 GC_finalize_all ();
543 }
544
545 void
546 _Jv_RunGC (void)
547 {
548 GC_gcollect ();
549 }
550
551 long
552 _Jv_GCTotalMemory (void)
553 {
554 return GC_get_heap_size ();
555 }
556
557 long
558 _Jv_GCFreeMemory (void)
559 {
560 return GC_get_free_bytes ();
561 }
562
563 void
564 _Jv_GCSetInitialHeapSize (size_t size)
565 {
566 size_t current = GC_get_heap_size ();
567 if (size > current)
568 GC_expand_hp (size - current);
569 }
570
571 void
572 _Jv_GCSetMaximumHeapSize (size_t size)
573 {
574 GC_set_max_heap_size ((GC_word) size);
575 }
576
577 void
578 _Jv_DisableGC (void)
579 {
580 GC_disable();
581 }
582
583 void
584 _Jv_EnableGC (void)
585 {
586 GC_enable();
587 }
588
589 static void * handle_out_of_memory(size_t)
590 {
591 _Jv_ThrowNoMemory();
592 }
593
594 static void
595 gcj_describe_type_fn(void *obj, char *out_buf)
596 {
597 _Jv_VTable *dt = *(_Jv_VTable **) obj;
598
599 if (! dt /* Shouldn't happen */)
600 {
601 strcpy(out_buf, "GCJ (bad)");
602 return;
603 }
604 jclass klass = dt->clas;
605 if (!klass /* shouldn't happen */)
606 {
607 strcpy(out_buf, "GCJ (bad)");
608 return;
609 }
610 jstring name = klass -> getName();
611 size_t len = name -> length();
612 if (len >= GC_TYPE_DESCR_LEN) len = GC_TYPE_DESCR_LEN - 1;
613 JvGetStringUTFRegion (name, 0, len, out_buf);
614 out_buf[len] = '\0';
615 }
616
617 void
618 _Jv_InitGC (void)
619 {
620 int proc;
621
622 // Ignore pointers that do not point to the start of an object.
623 GC_all_interior_pointers = 0;
624
625 // Configure the collector to use the bitmap marking descriptors that we
626 // stash in the class vtable.
627 // We always use mark proc descriptor 0, since the compiler knows
628 // about it.
629 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj);
630
631 // Cause an out of memory error to be thrown from the allocators,
632 // instead of returning 0. This is cheaper than checking on allocation.
633 GC_oom_fn = handle_out_of_memory;
634
635 GC_java_finalization = 1;
636
637 // We use a different mark procedure for object arrays. This code
638 // configures a different object `kind' for object array allocation and
639 // marking.
640 array_free_list = GC_new_free_list();
641 proc = GC_new_proc((GC_mark_proc)_Jv_MarkArray);
642 array_kind_x = GC_new_kind(array_free_list, GC_MAKE_PROC (proc, 0), 0, 1);
643
644 // Arrange to have the GC print Java class names in backtraces, etc.
645 GC_register_describe_type_fn(GC_gcj_kind, gcj_describe_type_fn);
646 GC_register_describe_type_fn(GC_gcj_debug_kind, gcj_describe_type_fn);
647 }
648
649 #ifdef JV_HASH_SYNCHRONIZATION
650 // Allocate an object with a fake vtable pointer, which causes only
651 // the first field (beyond the fake vtable pointer) to be traced.
652 // Eventually this should probably be generalized.
653
654 static _Jv_VTable trace_one_vtable = {
655 0, // class pointer
656 (void *)(2 * sizeof(void *)),
657 // descriptor; scan 2 words incl. vtable ptr.
658 // Least significant bits must be zero to
659 // identify this as a length descriptor
660 {0} // First method
661 };
662
663 void *
664 _Jv_AllocTraceOne (jsize size /* includes vtable slot */)
665 {
666 return GC_GCJ_MALLOC (size, &trace_one_vtable);
667 }
668
669 // Ditto for two words.
670 // the first field (beyond the fake vtable pointer) to be traced.
671 // Eventually this should probably be generalized.
672
673 static _Jv_VTable trace_two_vtable =
674 {
675 0, // class pointer
676 (void *)(3 * sizeof(void *)),
677 // descriptor; scan 3 words incl. vtable ptr.
678 {0} // First method
679 };
680
681 void *
682 _Jv_AllocTraceTwo (jsize size /* includes vtable slot */)
683 {
684 return GC_GCJ_MALLOC (size, &trace_two_vtable);
685 }
686
687 #endif /* JV_HASH_SYNCHRONIZATION */
688
689 void
690 _Jv_GCInitializeFinalizers (void (*notifier) (void))
691 {
692 GC_finalize_on_demand = 1;
693 GC_finalizer_notifier = notifier;
694 }
695
696 void
697 _Jv_GCRegisterDisappearingLink (jobject *objp)
698 {
699 // This test helps to ensure that we meet a precondition of
700 // GC_general_register_disappearing_link, viz. "Obj must be a
701 // pointer to the first word of an object we allocated."
702 if (GC_base(*objp))
703 GC_general_register_disappearing_link ((GC_PTR *) objp, (GC_PTR) *objp);
704 }
705
706 jboolean
707 _Jv_GCCanReclaimSoftReference (jobject)
708 {
709 // For now, always reclaim soft references. FIXME.
710 return true;
711 }