X-Git-Url: https://git.libre-soc.org/?a=blobdiff_plain;f=libgcc%2Flibgcov.h;h=8be5bebcac08a8f5d761eba75b700f1db8b6ced0;hb=57ea089421a3cfce936f91f3c0c92bf95ac71da1;hp=7c037a978785efddd59edb29415de164100deb5e;hpb=a04b7410d305800b747963ab940d2b1a602b5ddf;p=gcc.git diff --git a/libgcc/libgcov.h b/libgcc/libgcov.h index 7c037a97878..8be5bebcac0 100644 --- a/libgcc/libgcov.h +++ b/libgcc/libgcov.h @@ -85,6 +85,19 @@ typedef unsigned gcov_type_unsigned __attribute__ ((mode (QI))); #define GCOV_LOCKED 0 #endif +#ifndef GCOV_SUPPORTS_ATOMIC +/* Detect whether target can support atomic update of profilers. */ +#if __SIZEOF_LONG_LONG__ == 4 && __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4 +#define GCOV_SUPPORTS_ATOMIC 1 +#else +#if __SIZEOF_LONG_LONG__ == 8 && __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8 +#define GCOV_SUPPORTS_ATOMIC 1 +#else +#define GCOV_SUPPORTS_ATOMIC 0 +#endif +#endif +#endif + /* In libgcov we need these functions to be extern, so prefix them with __gcov. In libgcov they must also be hidden so that the instance in the executable is not also used in a DSO. */ @@ -203,7 +216,8 @@ struct gcov_info const struct gcov_fn_info *const *functions; /* pointer to pointers to function information */ #else - const struct gcov_fn_info **functions; + struct gcov_fn_info **functions; + struct gcov_summary summary; #endif /* !IN_GCOV_TOOL */ }; @@ -236,6 +250,8 @@ struct indirect_call_tuple /* Exactly one of these will be active in the process. */ extern struct gcov_master __gcov_master; +extern struct gcov_kvp __gcov_kvp_pool[GCOV_PREALLOCATED_KVP]; +extern unsigned __gcov_kvp_pool_index; /* Dump a set of gcov objects. */ extern void __gcov_dump_one (struct gcov_root *) ATTRIBUTE_HIDDEN; @@ -377,28 +393,56 @@ gcov_get_counter_target (void) if USE_ATOMIC is true. */ static inline void -gcov_counter_add (gcov_type *counter, gcov_type value, int use_atomic) +gcov_counter_add (gcov_type *counter, gcov_type value, + int use_atomic ATTRIBUTE_UNUSED) { +#if GCOV_SUPPORTS_ATOMIC if (use_atomic) __atomic_fetch_add (counter, value, __ATOMIC_RELAXED); else +#endif *counter += value; } -/* Set NODE to memory location COUNTER and make it with atomic operation - if USE_ATOMIC is true. */ +/* Allocate gcov_kvp from heap. If we are recursively called, then allocate + it from a list of pre-allocated pool. */ -static inline int -gcov_counter_set_if_null (gcov_type *counter, struct gcov_kvp *node, - int use_atomic) +static inline struct gcov_kvp * +allocate_gcov_kvp (void) { - if (use_atomic) - return !__sync_val_compare_and_swap (counter, NULL, (intptr_t)node); + struct gcov_kvp *new_node = NULL; + + static +#if defined(HAVE_CC_TLS) +__thread +#endif + volatile unsigned in_recursion ATTRIBUTE_UNUSED = 0; + +#if !defined(IN_GCOV_TOOL) && !defined(L_gcov_merge_topn) + if (__builtin_expect (in_recursion, 0)) + { + unsigned index; +#if GCOV_SUPPORTS_ATOMIC + index + = __atomic_fetch_add (&__gcov_kvp_pool_index, 1, __ATOMIC_RELAXED); +#else + index = __gcov_kvp_pool_index++; +#endif + if (index < GCOV_PREALLOCATED_KVP) + new_node = &__gcov_kvp_pool[index]; + else + /* Do not crash in the situation. */ + return NULL; + } else +#endif { - *counter = (intptr_t)node; - return 1; + in_recursion = 1; + new_node = (struct gcov_kvp *)xcalloc (1, sizeof (struct gcov_kvp)); + in_recursion = 0; } + + return new_node; } /* Add key value pair VALUE:COUNT to a top N COUNTERS. When INCREMENT_TOTAL @@ -414,7 +458,7 @@ gcov_topn_add_value (gcov_type *counters, gcov_type value, gcov_type count, struct gcov_kvp *prev_node = NULL; struct gcov_kvp *minimal_node = NULL; - struct gcov_kvp *current_node = (struct gcov_kvp *)counters[2]; + struct gcov_kvp *current_node = (struct gcov_kvp *)(intptr_t)counters[2]; while (current_node) { @@ -442,17 +486,42 @@ gcov_topn_add_value (gcov_type *counters, gcov_type value, gcov_type count, } else { - struct gcov_kvp *new_node - = (struct gcov_kvp *)xcalloc (1, sizeof (struct gcov_kvp)); + struct gcov_kvp *new_node = allocate_gcov_kvp (); + if (new_node == NULL) + return; + new_node->value = value; new_node->count = count; int success = 0; if (!counters[2]) - success = gcov_counter_set_if_null (&counters[2], new_node, use_atomic); + { +#if GCOV_SUPPORTS_ATOMIC + if (use_atomic) + { + struct gcov_kvp **ptr = (struct gcov_kvp **)(intptr_t)&counters[2]; + success = !__sync_val_compare_and_swap (ptr, 0, new_node); + } + else +#endif + { + counters[2] = (intptr_t)new_node; + success = 1; + } + } else if (prev_node && !prev_node->next) - success = gcov_counter_set_if_null ((gcov_type *)&prev_node->next, - new_node, use_atomic); + { +#if GCOV_SUPPORTS_ATOMIC + if (use_atomic) + success = !__sync_val_compare_and_swap (&prev_node->next, 0, + new_node); + else +#endif + { + prev_node->next = new_node; + success = 1; + } + } /* Increment number of nodes. */ if (success)