* opts.sh (var_args): Fix regexp.
[gcc.git] / libjava / boehm.cc
1 // boehm.cc - interface between libjava and Boehm GC.
2
3 /* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation
4
5 This file is part of libgcj.
6
7 This software is copyrighted work licensed under the terms of the
8 Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
9 details. */
10
11 #include <config.h>
12
13 #include <stdio.h>
14 #include <limits.h>
15
16 #include <jvm.h>
17 #include <gcj/cni.h>
18
19 #include <java/lang/Class.h>
20 #include <java/lang/reflect/Modifier.h>
21 #include <java-interp.h>
22
23 // More nastiness: the GC wants to define TRUE and FALSE. We don't
24 // need the Java definitions (themselves a hack), so we undefine them.
25 #undef TRUE
26 #undef FALSE
27
28 extern "C"
29 {
30 #include <private/gc_pmark.h>
31 #include <gc_gcj.h>
32
33 #ifdef THREAD_LOCAL_ALLOC
34 # define GC_REDIRECT_TO_LOCAL
35 # include <gc_local_alloc.h>
36 #endif
37
38 // These aren't declared in any Boehm GC header.
39 void GC_finalize_all (void);
40 ptr_t GC_debug_generic_malloc (size_t size, int k, GC_EXTRA_PARAMS);
41 };
42
43 #define MAYBE_MARK(Obj, Top, Limit, Source, Exit) \
44 Top=GC_MARK_AND_PUSH((GC_PTR)Obj, Top, Limit, (GC_PTR *)Source)
45
46 // `kind' index used when allocating Java arrays.
47 static int array_kind_x;
48
49 // Freelist used for Java arrays.
50 static ptr_t *array_free_list;
51
52 // Lock used to protect access to Boehm's GC_enable/GC_disable functions.
53 static _Jv_Mutex_t disable_gc_mutex;
54
55 \f
56
57 // This is called by the GC during the mark phase. It marks a Java
58 // object. We use `void *' arguments and return, and not what the
59 // Boehm GC wants, to avoid pollution in our headers.
60 void *
61 _Jv_MarkObj (void *addr, void *msp, void *msl, void * /* env */)
62 {
63 mse *mark_stack_ptr = (mse *) msp;
64 mse *mark_stack_limit = (mse *) msl;
65 jobject obj = (jobject) addr;
66
67 // FIXME: if env is 1, this object was allocated through the debug
68 // interface, and addr points to the beginning of the debug header.
69 // In that case, we should really add the size of the header to addr.
70
71 _Jv_VTable *dt = *(_Jv_VTable **) addr;
72 // The object might not yet have its vtable set, or it might
73 // really be an object on the freelist. In either case, the vtable slot
74 // will either be 0, or it will point to a cleared object.
75 // This assumes Java objects have size at least 3 words,
76 // including the header. But this should remain true, since this
77 // should only be used with debugging allocation or with large objects.
78 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
79 return mark_stack_ptr;
80 jclass klass = dt->clas;
81 ptr_t p;
82
83 # ifndef JV_HASH_SYNCHRONIZATION
84 // Every object has a sync_info pointer.
85 p = (ptr_t) obj->sync_info;
86 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o1label);
87 # endif
88 // Mark the object's class.
89 p = (ptr_t) klass;
90 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o2label);
91
92 if (__builtin_expect (klass == &java::lang::Class::class$, false))
93 {
94 // Currently we allocate some of the memory referenced from class objects
95 // as pointerfree memory, and then mark it more intelligently here.
96 // We ensure that the ClassClass mark descriptor forces invocation of
97 // this procedure.
98 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
99 // collector, we need to make sure that the class object is written whenever
100 // any of the subobjects are altered and may need rescanning. This may be tricky
101 // during construction, and this may not be the right way to do this with
102 // incremental collection.
103 // If we overflow the mark stack, we will rescan the class object, so we should
104 // be OK. The same applies if we redo the mark phase because win32 unmapped part
105 // of our root set. - HB
106 jclass c = (jclass) addr;
107
108 p = (ptr_t) c->name;
109 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c3label);
110 p = (ptr_t) c->superclass;
111 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c4label);
112 for (int i = 0; i < c->constants.size; ++i)
113 {
114 /* FIXME: We could make this more precise by using the tags -KKT */
115 p = (ptr_t) c->constants.data[i].p;
116 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5label);
117 }
118
119 #ifdef INTERPRETER
120 if (_Jv_IsInterpretedClass (c))
121 {
122 p = (ptr_t) c->constants.tags;
123 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5alabel);
124 p = (ptr_t) c->constants.data;
125 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5blabel);
126 p = (ptr_t) c->vtable;
127 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5clabel);
128 }
129 #endif
130
131 // If the class is an array, then the methods field holds a
132 // pointer to the element class. If the class is primitive,
133 // then the methods field holds a pointer to the array class.
134 p = (ptr_t) c->methods;
135 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c6label);
136
137 // The vtable might have been set, but the rest of the class
138 // could still be uninitialized. If this is the case, then
139 // c.isArray will SEGV. We check for this, and if it is the
140 // case we just return.
141 if (__builtin_expect (c->name == NULL, false))
142 return mark_stack_ptr;
143
144 if (! c->isArray() && ! c->isPrimitive())
145 {
146 // Scan each method in the cases where `methods' really
147 // points to a methods structure.
148 for (int i = 0; i < c->method_count; ++i)
149 {
150 p = (ptr_t) c->methods[i].name;
151 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
152 cm1label);
153 p = (ptr_t) c->methods[i].signature;
154 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
155 cm2label);
156 }
157 }
158
159 // Mark all the fields.
160 p = (ptr_t) c->fields;
161 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8label);
162 for (int i = 0; i < c->field_count; ++i)
163 {
164 _Jv_Field* field = &c->fields[i];
165
166 #ifndef COMPACT_FIELDS
167 p = (ptr_t) field->name;
168 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8alabel);
169 #endif
170 p = (ptr_t) field->type;
171 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8blabel);
172
173 // For the interpreter, we also need to mark the memory
174 // containing static members
175 if ((field->flags & java::lang::reflect::Modifier::STATIC))
176 {
177 p = (ptr_t) field->u.addr;
178 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8clabel);
179
180 // also, if the static member is a reference,
181 // mark also the value pointed to. We check for isResolved
182 // since marking can happen before memory is allocated for
183 // static members.
184 if (JvFieldIsRef (field) && field->isResolved())
185 {
186 jobject val = *(jobject*) field->u.addr;
187 p = (ptr_t) val;
188 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
189 c, c8elabel);
190 }
191 }
192 }
193
194 p = (ptr_t) c->vtable;
195 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c9label);
196 p = (ptr_t) c->interfaces;
197 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cAlabel);
198 for (int i = 0; i < c->interface_count; ++i)
199 {
200 p = (ptr_t) c->interfaces[i];
201 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cClabel);
202 }
203 p = (ptr_t) c->loader;
204 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cBlabel);
205 p = (ptr_t) c->arrayclass;
206 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cDlabel);
207 p = (ptr_t) c->protectionDomain;
208 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cPlabel);
209 p = (ptr_t) c->hack_signers;
210 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cSlabel);
211 p = (ptr_t) c->aux_info;
212 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cTlabel);
213
214 #ifdef INTERPRETER
215 if (_Jv_IsInterpretedClass (c))
216 {
217 _Jv_InterpClass* ic = (_Jv_InterpClass*) c->aux_info;
218
219 p = (ptr_t) ic->interpreted_methods;
220 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cElabel);
221
222 for (int i = 0; i < c->method_count; i++)
223 {
224 p = (ptr_t) ic->interpreted_methods[i];
225 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
226 cFlabel);
227
228 // Mark the direct-threaded code.
229 if ((c->methods[i].accflags
230 & java::lang::reflect::Modifier::NATIVE) == 0)
231 {
232 _Jv_InterpMethod *im
233 = (_Jv_InterpMethod *) ic->interpreted_methods[i];
234 if (im)
235 {
236 p = (ptr_t) im->prepared;
237 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
238 cFlabel);
239 }
240 }
241
242 // The interpreter installs a heap-allocated trampoline
243 // here, so we'll mark it.
244 p = (ptr_t) c->methods[i].ncode;
245 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
246 cm3label);
247 }
248
249 p = (ptr_t) ic->field_initializers;
250 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cGlabel);
251
252 }
253 #endif
254
255 }
256 else
257 {
258 // NOTE: each class only holds information about the class
259 // itself. So we must do the marking for the entire inheritance
260 // tree in order to mark all fields. FIXME: what about
261 // interfaces? We skip Object here, because Object only has a
262 // sync_info, and we handled that earlier.
263 // Note: occasionally `klass' can be null. For instance, this
264 // can happen if a GC occurs between the point where an object
265 // is allocated and where the vtbl slot is set.
266 while (klass && klass != &java::lang::Object::class$)
267 {
268 jfieldID field = JvGetFirstInstanceField (klass);
269 jint max = JvNumInstanceFields (klass);
270
271 for (int i = 0; i < max; ++i)
272 {
273 if (JvFieldIsRef (field))
274 {
275 jobject val = JvGetObjectField (obj, field);
276 p = (ptr_t) val;
277 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
278 obj, elabel);
279 }
280 field = field->getNextField ();
281 }
282 klass = klass->getSuperclass();
283 }
284 }
285
286 return mark_stack_ptr;
287 }
288
289 // This is called by the GC during the mark phase. It marks a Java
290 // array (of objects). We use `void *' arguments and return, and not
291 // what the Boehm GC wants, to avoid pollution in our headers.
292 void *
293 _Jv_MarkArray (void *addr, void *msp, void *msl, void * /*env*/)
294 {
295 mse *mark_stack_ptr = (mse *) msp;
296 mse *mark_stack_limit = (mse *) msl;
297 jobjectArray array = (jobjectArray) addr;
298
299 _Jv_VTable *dt = *(_Jv_VTable **) addr;
300 // Assumes size >= 3 words. That's currently true since arrays have
301 // a vtable, sync pointer, and size. If the sync pointer goes away,
302 // we may need to round up the size.
303 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
304 return mark_stack_ptr;
305 jclass klass = dt->clas;
306 ptr_t p;
307
308 # ifndef JV_HASH_SYNCHRONIZATION
309 // Every object has a sync_info pointer.
310 p = (ptr_t) array->sync_info;
311 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e1label);
312 # endif
313 // Mark the object's class.
314 p = (ptr_t) klass;
315 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, &(dt -> clas), o2label);
316
317 for (int i = 0; i < JvGetArrayLength (array); ++i)
318 {
319 jobject obj = elements (array)[i];
320 p = (ptr_t) obj;
321 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e2label);
322 }
323
324 return mark_stack_ptr;
325 }
326
327 // Generate a GC marking descriptor for a class.
328 //
329 // We assume that the gcj mark proc has index 0. This is a dubious assumption,
330 // since another one could be registered first. But the compiler also
331 // knows this, so in that case everything else will break, too.
332 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
333
334 void *
335 _Jv_BuildGCDescr(jclass self)
336 {
337 jlong desc = 0;
338 jint bits_per_word = CHAR_BIT * sizeof (void *);
339
340 // Note: for now we only consider a bitmap mark descriptor. We
341 // could also handle the case where the first N fields of a type are
342 // references. However, this is not very likely to be used by many
343 // classes, and it is easier to compute things this way.
344
345 // The vtable pointer.
346 desc |= 1ULL << (bits_per_word - 1);
347 #ifndef JV_HASH_SYNCHRONIZATION
348 // The sync_info field.
349 desc |= 1ULL << (bits_per_word - 2);
350 #endif
351
352 for (jclass klass = self; klass != NULL; klass = klass->getSuperclass())
353 {
354 jfieldID field = JvGetFirstInstanceField(klass);
355 int count = JvNumInstanceFields(klass);
356
357 for (int i = 0; i < count; ++i)
358 {
359 if (field->isRef())
360 {
361 unsigned int off = field->getOffset();
362 // If we run into a weird situation, we bail.
363 if (off % sizeof (void *) != 0)
364 return (void *) (GCJ_DEFAULT_DESCR);
365 off /= sizeof (void *);
366 // If we find a field outside the range of our bitmap,
367 // fall back to procedure marker. The bottom 2 bits are
368 // reserved.
369 if (off >= bits_per_word - 2)
370 return (void *) (GCJ_DEFAULT_DESCR);
371 desc |= 1ULL << (bits_per_word - off - 1);
372 }
373
374 field = field->getNextField();
375 }
376 }
377
378 // For bitmap mark type, bottom bits are 01.
379 desc |= 1;
380 // Bogus warning avoidance (on many platforms).
381 return (void *) (unsigned long) desc;
382 }
383
384 // Allocate some space that is known to be pointer-free.
385 void *
386 _Jv_AllocBytes (jsize size)
387 {
388 void *r = GC_MALLOC_ATOMIC (size);
389 // We have to explicitly zero memory here, as the GC doesn't
390 // guarantee that PTRFREE allocations are zeroed. Note that we
391 // don't have to do this for other allocation types because we set
392 // the `ok_init' flag in the type descriptor.
393 memset (r, 0, size);
394 return r;
395 }
396
397 // Allocate space for a new Java array.
398 // Used only for arrays of objects.
399 void *
400 _Jv_AllocArray (jsize size, jclass klass)
401 {
402 void *obj;
403 const jsize min_heap_addr = 16*1024;
404 // A heuristic. If size is less than this value, the size
405 // stored in the array can't possibly be misinterpreted as
406 // a pointer. Thus we lose nothing by scanning the object
407 // completely conservatively, since no misidentification can
408 // take place.
409
410 #ifdef GC_DEBUG
411 // There isn't much to lose by scanning this conservatively.
412 // If we didn't, the mark proc would have to understand that
413 // it needed to skip the header.
414 obj = GC_MALLOC(size);
415 #else
416 if (size < min_heap_addr)
417 obj = GC_MALLOC(size);
418 else
419 obj = GC_generic_malloc (size, array_kind_x);
420 #endif
421 *((_Jv_VTable **) obj) = klass->vtable;
422 return obj;
423 }
424
425 /* Allocate space for a new non-Java object, which does not have the usual
426 Java object header but may contain pointers to other GC'ed objects. */
427 void *
428 _Jv_AllocRawObj (jsize size)
429 {
430 return (void *) GC_MALLOC (size);
431 }
432
433 static void
434 call_finalizer (GC_PTR obj, GC_PTR client_data)
435 {
436 _Jv_FinalizerFunc *fn = (_Jv_FinalizerFunc *) client_data;
437 jobject jobj = (jobject) obj;
438
439 (*fn) (jobj);
440 }
441
442 void
443 _Jv_RegisterFinalizer (void *object, _Jv_FinalizerFunc *meth)
444 {
445 GC_REGISTER_FINALIZER_NO_ORDER (object, call_finalizer, (GC_PTR) meth,
446 NULL, NULL);
447 }
448
449 void
450 _Jv_RunFinalizers (void)
451 {
452 GC_invoke_finalizers ();
453 }
454
455 void
456 _Jv_RunAllFinalizers (void)
457 {
458 GC_finalize_all ();
459 }
460
461 void
462 _Jv_RunGC (void)
463 {
464 GC_gcollect ();
465 }
466
467 long
468 _Jv_GCTotalMemory (void)
469 {
470 return GC_get_heap_size ();
471 }
472
473 long
474 _Jv_GCFreeMemory (void)
475 {
476 return GC_get_free_bytes ();
477 }
478
479 void
480 _Jv_GCSetInitialHeapSize (size_t size)
481 {
482 size_t current = GC_get_heap_size ();
483 if (size > current)
484 GC_expand_hp (size - current);
485 }
486
487 void
488 _Jv_GCSetMaximumHeapSize (size_t size)
489 {
490 GC_set_max_heap_size ((GC_word) size);
491 }
492
493 // From boehm's misc.c
494 extern "C" void GC_enable();
495 extern "C" void GC_disable();
496
497 void
498 _Jv_DisableGC (void)
499 {
500 _Jv_MutexLock (&disable_gc_mutex);
501 GC_disable();
502 _Jv_MutexUnlock (&disable_gc_mutex);
503 }
504
505 void
506 _Jv_EnableGC (void)
507 {
508 _Jv_MutexLock (&disable_gc_mutex);
509 GC_enable();
510 _Jv_MutexUnlock (&disable_gc_mutex);
511 }
512
513 static void * handle_out_of_memory(size_t)
514 {
515 _Jv_ThrowNoMemory();
516 }
517
518 void
519 _Jv_InitGC (void)
520 {
521 int proc;
522
523 // Ignore pointers that do not point to the start of an object.
524 GC_all_interior_pointers = 0;
525
526 // Configure the collector to use the bitmap marking descriptors that we
527 // stash in the class vtable.
528 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj);
529
530 // Cause an out of memory error to be thrown from the allocators,
531 // instead of returning 0. This is cheaper than checking on allocation.
532 GC_oom_fn = handle_out_of_memory;
533
534 GC_java_finalization = 1;
535
536 // We use a different mark procedure for object arrays. This code
537 // configures a different object `kind' for object array allocation and
538 // marking. FIXME: see above.
539 array_free_list = (ptr_t *) GC_generic_malloc_inner ((MAXOBJSZ + 1)
540 * sizeof (ptr_t),
541 PTRFREE);
542 memset (array_free_list, 0, (MAXOBJSZ + 1) * sizeof (ptr_t));
543
544 proc = GC_n_mark_procs++;
545 GC_mark_procs[proc] = (GC_mark_proc) _Jv_MarkArray;
546
547 array_kind_x = GC_n_kinds++;
548 GC_obj_kinds[array_kind_x].ok_freelist = array_free_list;
549 GC_obj_kinds[array_kind_x].ok_reclaim_list = 0;
550 GC_obj_kinds[array_kind_x].ok_descriptor = GC_MAKE_PROC (proc, 0);
551 GC_obj_kinds[array_kind_x].ok_relocate_descr = FALSE;
552 GC_obj_kinds[array_kind_x].ok_init = TRUE;
553
554 _Jv_MutexInit (&disable_gc_mutex);
555 }
556
557 #ifdef JV_HASH_SYNCHRONIZATION
558 // Allocate an object with a fake vtable pointer, which causes only
559 // the first field (beyond the fake vtable pointer) to be traced.
560 // Eventually this should probably be generalized.
561
562 static _Jv_VTable trace_one_vtable = {
563 0, // class pointer
564 (void *)(2 * sizeof(void *)),
565 // descriptor; scan 2 words incl. vtable ptr.
566 // Least significant bits must be zero to
567 // identify this as a length descriptor
568 {0} // First method
569 };
570
571 void *
572 _Jv_AllocTraceOne (jsize size /* includes vtable slot */)
573 {
574 return GC_GCJ_MALLOC (size, &trace_one_vtable);
575 }
576
577 // Ditto for two words.
578 // the first field (beyond the fake vtable pointer) to be traced.
579 // Eventually this should probably be generalized.
580
581 static _Jv_VTable trace_two_vtable =
582 {
583 0, // class pointer
584 (void *)(3 * sizeof(void *)),
585 // descriptor; scan 3 words incl. vtable ptr.
586 {0} // First method
587 };
588
589 void *
590 _Jv_AllocTraceTwo (jsize size /* includes vtable slot */)
591 {
592 return GC_GCJ_MALLOC (size, &trace_two_vtable);
593 }
594
595 #endif /* JV_HASH_SYNCHRONIZATION */
596
597 void
598 _Jv_GCInitializeFinalizers (void (*notifier) (void))
599 {
600 GC_finalize_on_demand = 1;
601 GC_finalizer_notifier = notifier;
602 }
603
604 void
605 _Jv_GCRegisterDisappearingLink (jobject *objp)
606 {
607 GC_general_register_disappearing_link ((GC_PTR *) objp, (GC_PTR) *objp);
608 }
609
610 jboolean
611 _Jv_GCCanReclaimSoftReference (jobject)
612 {
613 // For now, always reclaim soft references. FIXME.
614 return true;
615 }