+2004-09-02 Jan Hubicka <jh@suse.cz>
+
+ * gimplify.c (gimplify_compound_lval): Move "stack" varray out of
+ GGC.
+
+ * ggc-page.c: include tree-flow.h.
+ (extra_order_size_table): Add stmt_ann_d.
+ (STAT_LABEL): Rename from ....
+ (LABEL): ... this one.
+ * Makefile.in (ggc-page.o): Add dependency.
+
+ * ggc-common.c (ggc_force_collect): New global variable.
+ (loc_description): Add fields "freed", "collected"
+ (ptr_hash): New static hash
+ (ptr_hash_entry): New structure.
+ (hash_ptr,eq_ptr,ggc_prune_ptr): New static functions.
+ (ggc_record_overhead): Take ptr argument, record it
+ (ggc_prune_overhead_list, ggc_free_overhead): New functions.
+ (cmp_statistics): Imrove sorting.
+ (dump_ggc_loc_statistics): Output newly collected statistics
+ * ggc-page.c (ggc_alloc): Update call of ggc_record_overhead
+ (ggc_free): Call ggc_free_overhead.
+ (ggc_collect): Force collection when asked to be forced.
+ (ggc_collect): Call ggc_prune_overhead_list.
+ * ggc.h (ggc_force_collect): Declare
+ (ggc_record_overhead): Update prototype.
+ (ggc_free_overhead, ggc_prune_overhead_list): Declare.
+
2004-09-02 James E Wilson <wilson@specifixinc.com>
* common.opt (ftrapping-math): Default to on.
$(HASHTAB_H) toplev.h $(PARAMS_H) hosthooks.h
ggc-page.o: ggc-page.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
- flags.h toplev.h $(GGC_H) $(TIMEVAR_H) $(TM_P_H) $(PARAMS_H)
+ flags.h toplev.h $(GGC_H) $(TIMEVAR_H) $(TM_P_H) $(PARAMS_H) $(TREE_FLOW_H)
ggc-zone.o: ggc-zone.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
flags.h toplev.h $(GGC_H) $(TIMEVAR_H) $(TM_P_H) $(PARAMS_H)
#define VALGRIND_DISCARD(x)
#endif
+/* When set, ggc_collect will do collection. */
+bool ggc_force_collect;
+
/* Statistics about the allocation. */
static ggc_statistics *ggc_stats;
int times;
size_t allocated;
size_t overhead;
+ size_t freed;
+ size_t collected;
};
/* Hashtable used for statistics. */
&& d->function == d2->function);
}
+/* Hashtable converting address of allocated field to loc descriptor. */
+static htab_t ptr_hash;
+struct ptr_hash_entry
+{
+ void *ptr;
+ struct loc_descriptor *loc;
+ size_t size;
+};
+
+/* Hash table helpers functions. */
+static hashval_t
+hash_ptr (const void *p)
+{
+ const struct ptr_hash_entry *d = p;
+
+ return htab_hash_pointer (d->ptr);
+}
+
+static int
+eq_ptr (const void *p1, const void *p2)
+{
+ const struct ptr_hash_entry *p = p1;
+
+ return (p->ptr == p2);
+}
+
/* Return descriptor for given call site, create new one if needed. */
static struct loc_descriptor *
loc_descriptor (const char *name, int line, const char *function)
/* Record ALLOCATED and OVERHEAD bytes to descriptor NAME:LINE (FUNCTION). */
void
-ggc_record_overhead (size_t allocated, size_t overhead,
+ggc_record_overhead (size_t allocated, size_t overhead, void *ptr,
const char *name, int line, const char *function)
{
struct loc_descriptor *loc = loc_descriptor (name, line, function);
+ struct ptr_hash_entry *p = xmalloc (sizeof (struct ptr_hash_entry));
+ PTR *slot;
+
+ p->ptr = ptr;
+ p->loc = loc;
+ p->size = allocated + overhead;
+ if (!ptr_hash)
+ ptr_hash = htab_create (10, hash_ptr, eq_ptr, NULL);
+ slot = htab_find_slot_with_hash (ptr_hash, ptr, htab_hash_pointer (ptr), INSERT);
+ if (*slot)
+ abort ();
+ *slot = p;
loc->times++;
loc->allocated+=allocated;
loc->overhead+=overhead;
}
+/* Helper function for prune_overhead_list. See if SLOT is still marked and
+ remove it from hashtable if it is not. */
+static int
+ggc_prune_ptr (void **slot, void *b ATTRIBUTE_UNUSED)
+{
+ struct ptr_hash_entry *p = *slot;
+ if (!ggc_marked_p (p->ptr))
+ {
+ p->loc->collected += p->size;
+ htab_clear_slot (ptr_hash, slot);
+ free (p);
+ }
+ return 1;
+}
+
+/* After live values has been marked, walk all recorded pointers and see if
+ they are still live. */
+void
+ggc_prune_overhead_list (void)
+{
+ htab_traverse (ptr_hash, ggc_prune_ptr, NULL);
+}
+
+/* Notice that the pointer has been freed. */
+void ggc_free_overhead (void *ptr)
+{
+ PTR *slot = htab_find_slot_with_hash (ptr_hash, ptr, htab_hash_pointer (ptr),
+ NO_INSERT);
+ struct ptr_hash_entry *p = *slot;
+ p->loc->freed += p->size;
+ htab_clear_slot (ptr_hash, slot);
+ free (p);
+}
+
/* Helper for qsort; sort descriptors by amount of memory consumed. */
static int
cmp_statistic (const void *loc1, const void *loc2)
{
struct loc_descriptor *l1 = *(struct loc_descriptor **) loc1;
struct loc_descriptor *l2 = *(struct loc_descriptor **) loc2;
- return (l1->allocated + l1->overhead) - (l2->allocated + l2->overhead);
+ return ((l1->allocated + l1->overhead - l1->freed) -
+ (l2->allocated + l2->overhead - l1->freed));
}
/* Collect array of the descriptors from hashtable. */
#ifdef GATHER_STATISTICS
int nentries = 0;
char s[4096];
- size_t count, size, overhead;
+ size_t collected = 0, freed = 0, allocated = 0, overhead = 0, times = 0;
int i;
+ ggc_force_collect = true;
+ ggc_collect ();
+
loc_array = xcalloc (sizeof (*loc_array), loc_hash->n_elements);
fprintf (stderr, "-------------------------------------------------------\n");
- fprintf (stderr, "\n%-60s %10s %10s %10s\n",
- "source location", "Times", "Allocated", "Overhead");
+ fprintf (stderr, "\n%-48s %10s %10s %10s %10s %10s\n",
+ "source location", "Garbage", "Freed", "Leak", "Overhead", "Times");
fprintf (stderr, "-------------------------------------------------------\n");
- count = 0;
- size = 0;
- overhead = 0;
htab_traverse (loc_hash, add_statistics, &nentries);
qsort (loc_array, nentries, sizeof (*loc_array), cmp_statistic);
for (i = 0; i < nentries; i++)
{
struct loc_descriptor *d = loc_array[i];
- size += d->allocated;
- count += d->times;
+ allocated += d->allocated;
+ times += d->times;
+ freed += d->freed;
+ collected += d->collected;
overhead += d->overhead;
}
for (i = 0; i < nentries; i++)
while ((s2 = strstr (s1, "gcc/")))
s1 = s2 + 4;
sprintf (s, "%s:%i (%s)", s1, d->line, d->function);
- fprintf (stderr, "%-60s %10i %10li %10li:%.3f%%\n", s,
- d->times, (long)d->allocated, (long)d->overhead,
- (d->allocated + d->overhead) *100.0 / (size + overhead));
+ s[48] = 0;
+ fprintf (stderr, "%-48s %10li:%4.1f%% %10li:%4.1f%% %10li:%4.1f%% %10li:%4.1f%% %10li\n", s,
+ (long)d->collected,
+ (d->collected) * 100.0 / collected,
+ (long)d->freed,
+ (d->freed) * 100.0 / freed,
+ (long)(d->allocated + d->overhead - d->freed - d->collected),
+ (d->allocated + d->overhead - d->freed - d->collected) * 100.0
+ / (allocated + overhead - freed - collected),
+ (long)d->overhead,
+ d->overhead * 100.0 / overhead,
+ (long)d->times);
}
}
- fprintf (stderr, "%-60s %10ld %10ld %10ld\n",
- "Total", (long)count, (long)size, (long)overhead);
+ fprintf (stderr, "%-48s %10ld %10ld %10ld %10ld %10ld\n",
+ "Total", (long)collected, (long)freed,
+ (long)(allocated + overhead - freed - collected), (long)overhead,
+ (long)times);
+ fprintf (stderr, "%-48s %10s %10s %10s %10s %10s\n",
+ "source location", "Garbage", "Freed", "Leak", "Overhead", "Times");
fprintf (stderr, "-------------------------------------------------------\n");
#endif
}
#include "ggc.h"
#include "timevar.h"
#include "params.h"
+#include "tree-flow.h"
#ifdef ENABLE_VALGRIND_CHECKING
# ifdef HAVE_VALGRIND_MEMCHECK_H
# include <valgrind/memcheck.h>
thing you need to do to add a new special allocation size. */
static const size_t extra_order_size_table[] = {
+ sizeof (struct stmt_ann_d),
sizeof (struct tree_decl),
sizeof (struct tree_list),
TREE_EXP_SIZE (2),
G.page_tails[order]->next = entry;
G.page_tails[order] = entry;
}
-#ifdef GATHER_STATISTICS
- ggc_record_overhead (OBJECT_SIZE (order), OBJECT_SIZE (order) - size PASS_MEM_STAT);
-#endif
/* Calculate the object's address. */
result = entry->page + object_offset;
+#ifdef GATHER_STATISTICS
+ ggc_record_overhead (OBJECT_SIZE (order), OBJECT_SIZE (order) - size,
+ result PASS_MEM_STAT);
+#endif
#ifdef ENABLE_GC_CHECKING
/* Keep poisoning-by-writing-0xaf the object, in an attempt to keep the
size_t order = pe->order;
size_t size = OBJECT_SIZE (order);
+#ifdef GATHER_STATISTICS
+ ggc_free_overhead (p);
+#endif
+
if (GGC_DEBUG_LEVEL >= 3)
fprintf (G.debug_file,
"Freeing object, actual size=%lu, at %p on %p\n",
float min_expand = allocated_last_gc * PARAM_VALUE (GGC_MIN_EXPAND) / 100;
- if (G.allocated < allocated_last_gc + min_expand)
+ if (G.allocated < allocated_last_gc + min_expand && !ggc_force_collect)
return;
timevar_push (TV_GC);
clear_marks ();
ggc_mark_roots ();
+#ifdef GATHER_STATISTICS
+ ggc_prune_overhead_list ();
+#endif
poison_pages ();
validate_free_objects ();
sweep_pages ();
: ((x) < 1024*1024*10 \
? (x) / 1024 \
: (x) / (1024*1024))))
-#define LABEL(x) ((x) < 1024*10 ? ' ' : ((x) < 1024*1024*10 ? 'k' : 'M'))
+#define STAT_LABEL(x) ((x) < 1024*10 ? ' ' : ((x) < 1024*1024*10 ? 'k' : 'M'))
void
ggc_print_statistics (void)
}
fprintf (stderr, "%-5lu %10lu%c %10lu%c %10lu%c\n",
(unsigned long) OBJECT_SIZE (i),
- SCALE (allocated), LABEL (allocated),
- SCALE (in_use), LABEL (in_use),
- SCALE (overhead), LABEL (overhead));
+ SCALE (allocated), STAT_LABEL (allocated),
+ SCALE (in_use), STAT_LABEL (in_use),
+ SCALE (overhead), STAT_LABEL (overhead));
total_overhead += overhead;
}
fprintf (stderr, "%-5s %10lu%c %10lu%c %10lu%c\n", "Total",
- SCALE (G.bytes_mapped), LABEL (G.bytes_mapped),
- SCALE (G.allocated), LABEL(G.allocated),
- SCALE (total_overhead), LABEL (total_overhead));
+ SCALE (G.bytes_mapped), STAT_LABEL (G.bytes_mapped),
+ SCALE (G.allocated), STAT_LABEL(G.allocated),
+ SCALE (total_overhead), STAT_LABEL (total_overhead));
#ifdef GATHER_STATISTICS
{
extern struct alloc_zone *rtl_zone;
/* For regular tree allocations. */
extern struct alloc_zone *tree_zone;
+/* When set, ggc_collect will do collection. */
+extern bool ggc_force_collect;
/* The internal primitive. */
extern void *ggc_alloc_stat (size_t MEM_STAT_DECL);
/* Free a block. To be used when known for certain it's not reachable. */
extern void ggc_free (void *);
-extern void ggc_record_overhead (size_t, size_t MEM_STAT_DECL);
+extern void ggc_record_overhead (size_t, size_t, void * MEM_STAT_DECL);
+extern void ggc_free_overhead (void *);
+extern void ggc_prune_overhead_list (void);
extern void dump_ggc_loc_statistics (void);
int i;
/* Create a stack of the subexpressions so later we can walk them in
- order from inner to outer. */
- VARRAY_TREE_INIT (stack, 10, "stack");
+ order from inner to outer.
+
+ This array is very memory consuming. Don't even think of making
+ it VARRAY_TREE. */
+ VARRAY_GENERIC_PTR_NOGC_INIT (stack, 10, "stack");
/* We can either handle REALPART_EXPR, IMAGEPART_EXPR anything that
handled_components can deal with. */
(handled_component_p (*p)
|| TREE_CODE (*p) == REALPART_EXPR || TREE_CODE (*p) == IMAGPART_EXPR);
p = &TREE_OPERAND (*p, 0))
- VARRAY_PUSH_TREE (stack, *p);
+ VARRAY_PUSH_GENERIC_PTR_NOGC (stack, *p);
#if defined ENABLE_CHECKING
if (VARRAY_ACTIVE_SIZE (stack) == 0)
then we gimplify any indices, from left to right. */
for (i = VARRAY_ACTIVE_SIZE (stack) - 1; i >= 0; i--)
{
- tree t = VARRAY_TREE (stack, i);
+ tree t = VARRAY_GENERIC_PTR_NOGC (stack, i);
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
{
ret = MIN (ret, GS_OK);
}
+ VARRAY_FREE (stack);
+
return ret;
}