#define GCOV_LOCKED 0
#endif
+#ifndef GCOV_SUPPORTS_ATOMIC
+/* Detect whether target can support atomic update of profilers. */
+#if __SIZEOF_LONG_LONG__ == 4 && __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4
+#define GCOV_SUPPORTS_ATOMIC 1
+#else
+#if __SIZEOF_LONG_LONG__ == 8 && __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8
+#define GCOV_SUPPORTS_ATOMIC 1
+#else
+#define GCOV_SUPPORTS_ATOMIC 0
+#endif
+#endif
+#endif
+
/* In libgcov we need these functions to be extern, so prefix them with
__gcov. In libgcov they must also be hidden so that the instance in
the executable is not also used in a DSO. */
const struct gcov_fn_info *const *functions; /* pointer to pointers
to function information */
#else
- const struct gcov_fn_info **functions;
+ struct gcov_fn_info **functions;
+ struct gcov_summary summary;
#endif /* !IN_GCOV_TOOL */
};
/* Exactly one of these will be active in the process. */
extern struct gcov_master __gcov_master;
+extern struct gcov_kvp __gcov_kvp_pool[GCOV_PREALLOCATED_KVP];
+extern unsigned __gcov_kvp_pool_index;
/* Dump a set of gcov objects. */
extern void __gcov_dump_one (struct gcov_root *) ATTRIBUTE_HIDDEN;
if USE_ATOMIC is true. */
static inline void
-gcov_counter_add (gcov_type *counter, gcov_type value, int use_atomic)
+gcov_counter_add (gcov_type *counter, gcov_type value,
+ int use_atomic ATTRIBUTE_UNUSED)
{
+#if GCOV_SUPPORTS_ATOMIC
if (use_atomic)
__atomic_fetch_add (counter, value, __ATOMIC_RELAXED);
else
+#endif
*counter += value;
}
-/* Set NODE to memory location COUNTER and make it with atomic operation
- if USE_ATOMIC is true. */
+/* Allocate gcov_kvp from heap. If we are recursively called, then allocate
+ it from a list of pre-allocated pool. */
-static inline int
-gcov_counter_set_if_null (gcov_type *counter, struct gcov_kvp *node,
- int use_atomic)
+static inline struct gcov_kvp *
+allocate_gcov_kvp (void)
{
- if (use_atomic)
- return !__sync_val_compare_and_swap (counter, NULL, (intptr_t)node);
+ struct gcov_kvp *new_node = NULL;
+
+ static
+#if defined(HAVE_CC_TLS)
+__thread
+#endif
+ volatile unsigned in_recursion ATTRIBUTE_UNUSED = 0;
+
+#if !defined(IN_GCOV_TOOL) && !defined(L_gcov_merge_topn)
+ if (__builtin_expect (in_recursion, 0))
+ {
+ unsigned index;
+#if GCOV_SUPPORTS_ATOMIC
+ index
+ = __atomic_fetch_add (&__gcov_kvp_pool_index, 1, __ATOMIC_RELAXED);
+#else
+ index = __gcov_kvp_pool_index++;
+#endif
+ if (index < GCOV_PREALLOCATED_KVP)
+ new_node = &__gcov_kvp_pool[index];
+ else
+ /* Do not crash in the situation. */
+ return NULL;
+ }
else
+#endif
{
- *counter = (intptr_t)node;
- return 1;
+ in_recursion = 1;
+ new_node = (struct gcov_kvp *)xcalloc (1, sizeof (struct gcov_kvp));
+ in_recursion = 0;
}
+
+ return new_node;
}
/* Add key value pair VALUE:COUNT to a top N COUNTERS. When INCREMENT_TOTAL
struct gcov_kvp *prev_node = NULL;
struct gcov_kvp *minimal_node = NULL;
- struct gcov_kvp *current_node = (struct gcov_kvp *)counters[2];
+ struct gcov_kvp *current_node = (struct gcov_kvp *)(intptr_t)counters[2];
while (current_node)
{
}
else
{
- struct gcov_kvp *new_node
- = (struct gcov_kvp *)xcalloc (1, sizeof (struct gcov_kvp));
+ struct gcov_kvp *new_node = allocate_gcov_kvp ();
+ if (new_node == NULL)
+ return;
+
new_node->value = value;
new_node->count = count;
int success = 0;
if (!counters[2])
- success = gcov_counter_set_if_null (&counters[2], new_node, use_atomic);
+ {
+#if GCOV_SUPPORTS_ATOMIC
+ if (use_atomic)
+ {
+ struct gcov_kvp **ptr = (struct gcov_kvp **)(intptr_t)&counters[2];
+ success = !__sync_val_compare_and_swap (ptr, 0, new_node);
+ }
+ else
+#endif
+ {
+ counters[2] = (intptr_t)new_node;
+ success = 1;
+ }
+ }
else if (prev_node && !prev_node->next)
- success = gcov_counter_set_if_null ((gcov_type *)&prev_node->next,
- new_node, use_atomic);
+ {
+#if GCOV_SUPPORTS_ATOMIC
+ if (use_atomic)
+ success = !__sync_val_compare_and_swap (&prev_node->next, 0,
+ new_node);
+ else
+#endif
+ {
+ prev_node->next = new_node;
+ success = 1;
+ }
+ }
/* Increment number of nodes. */
if (success)