X-Git-Url: https://git.libre-soc.org/?a=blobdiff_plain;f=libgcc%2Flibgcov.h;h=8be5bebcac08a8f5d761eba75b700f1db8b6ced0;hb=57ea089421a3cfce936f91f3c0c92bf95ac71da1;hp=144b4817a371bb382f834c58e731c5f5845cfb0d;hpb=92d417175b9f7b41d5ebe3ceb723f808917ed964;p=gcc.git diff --git a/libgcc/libgcov.h b/libgcc/libgcov.h index 144b4817a37..8be5bebcac0 100644 --- a/libgcc/libgcov.h +++ b/libgcc/libgcov.h @@ -1,5 +1,5 @@ /* Header file for libgcov-*.c. - Copyright (C) 1996-2019 Free Software Foundation, Inc. + Copyright (C) 1996-2020 Free Software Foundation, Inc. This file is part of GCC. @@ -85,6 +85,19 @@ typedef unsigned gcov_type_unsigned __attribute__ ((mode (QI))); #define GCOV_LOCKED 0 #endif +#ifndef GCOV_SUPPORTS_ATOMIC +/* Detect whether target can support atomic update of profilers. */ +#if __SIZEOF_LONG_LONG__ == 4 && __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4 +#define GCOV_SUPPORTS_ATOMIC 1 +#else +#if __SIZEOF_LONG_LONG__ == 8 && __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8 +#define GCOV_SUPPORTS_ATOMIC 1 +#else +#define GCOV_SUPPORTS_ATOMIC 0 +#endif +#endif +#endif + /* In libgcov we need these functions to be extern, so prefix them with __gcov. In libgcov they must also be hidden so that the instance in the executable is not also used in a DSO. */ @@ -126,7 +139,7 @@ typedef unsigned gcov_position_t; #define L_gcov 1 #define L_gcov_merge_add 1 -#define L_gcov_merge_single 1 +#define L_gcov_merge_topn 1 #define L_gcov_merge_ior 1 #define L_gcov_merge_time_profile 1 @@ -147,7 +160,7 @@ extern struct gcov_info *gcov_list; /* Poison these, so they don't accidentally slip in. */ #pragma GCC poison gcov_write_string gcov_write_tag gcov_write_length -#pragma GCC poison gcov_time gcov_magic +#pragma GCC poison gcov_time #ifdef HAVE_GAS_HIDDEN #define ATTRIBUTE_HIDDEN __attribute__ ((__visibility__ ("hidden"))) @@ -203,7 +216,8 @@ struct gcov_info const struct gcov_fn_info *const *functions; /* pointer to pointers to function information */ #else - const struct gcov_fn_info **functions; + struct gcov_fn_info **functions; + struct gcov_summary summary; #endif /* !IN_GCOV_TOOL */ }; @@ -236,6 +250,8 @@ struct indirect_call_tuple /* Exactly one of these will be active in the process. */ extern struct gcov_master __gcov_master; +extern struct gcov_kvp __gcov_kvp_pool[GCOV_PREALLOCATED_KVP]; +extern unsigned __gcov_kvp_pool_index; /* Dump a set of gcov objects. */ extern void __gcov_dump_one (struct gcov_root *) ATTRIBUTE_HIDDEN; @@ -253,14 +269,20 @@ extern void __gcov_reset_int (void) ATTRIBUTE_HIDDEN; /* User function to enable early write of profile information so far. */ extern void __gcov_dump_int (void) ATTRIBUTE_HIDDEN; +/* Lock critical section for __gcov_dump and __gcov_reset functions. */ +extern void __gcov_lock (void) ATTRIBUTE_HIDDEN; + +/* Unlock critical section for __gcov_dump and __gcov_reset functions. */ +extern void __gcov_unlock (void) ATTRIBUTE_HIDDEN; + /* The merge function that just sums the counters. */ extern void __gcov_merge_add (gcov_type *, unsigned) ATTRIBUTE_HIDDEN; /* The merge function to select the minimum valid counter value. */ extern void __gcov_merge_time_profile (gcov_type *, unsigned) ATTRIBUTE_HIDDEN; -/* The merge function to choose the most common value. */ -extern void __gcov_merge_single (gcov_type *, unsigned) ATTRIBUTE_HIDDEN; +/* The merge function to choose the most common N values. */ +extern void __gcov_merge_topn (gcov_type *, unsigned) ATTRIBUTE_HIDDEN; /* The merge function that just ors the counters together. */ extern void __gcov_merge_ior (gcov_type *, unsigned) ATTRIBUTE_HIDDEN; @@ -271,9 +293,10 @@ extern void __gcov_interval_profiler_atomic (gcov_type *, gcov_type, int, unsigned); extern void __gcov_pow2_profiler (gcov_type *, gcov_type); extern void __gcov_pow2_profiler_atomic (gcov_type *, gcov_type); -extern void __gcov_one_value_profiler_v2 (gcov_type *, gcov_type); -extern void __gcov_one_value_profiler_v2_atomic (gcov_type *, gcov_type); +extern void __gcov_topn_values_profiler (gcov_type *, gcov_type); +extern void __gcov_topn_values_profiler_atomic (gcov_type *, gcov_type); extern void __gcov_indirect_call_profiler_v4 (gcov_type, void *); +extern void __gcov_indirect_call_profiler_v4_atomic (gcov_type, void *); extern void __gcov_time_profiler (gcov_type *); extern void __gcov_time_profiler_atomic (gcov_type *); extern void __gcov_average_profiler (gcov_type *, gcov_type); @@ -328,7 +351,7 @@ gcov_get_counter (void) when read value is equal to IGNORE_SCALING. */ static inline gcov_type -gcov_get_counter_ignore_scaling (gcov_type ignore_scaling) +gcov_get_counter_ignore_scaling (gcov_type ignore_scaling ATTRIBUTE_UNUSED) { #ifndef IN_GCOV_TOOL /* This version is for reading count values in libgcov runtime: @@ -366,6 +389,146 @@ gcov_get_counter_target (void) #endif } +/* Add VALUE to *COUNTER and make it with atomic operation + if USE_ATOMIC is true. */ + +static inline void +gcov_counter_add (gcov_type *counter, gcov_type value, + int use_atomic ATTRIBUTE_UNUSED) +{ +#if GCOV_SUPPORTS_ATOMIC + if (use_atomic) + __atomic_fetch_add (counter, value, __ATOMIC_RELAXED); + else +#endif + *counter += value; +} + +/* Allocate gcov_kvp from heap. If we are recursively called, then allocate + it from a list of pre-allocated pool. */ + +static inline struct gcov_kvp * +allocate_gcov_kvp (void) +{ + struct gcov_kvp *new_node = NULL; + + static +#if defined(HAVE_CC_TLS) +__thread +#endif + volatile unsigned in_recursion ATTRIBUTE_UNUSED = 0; + +#if !defined(IN_GCOV_TOOL) && !defined(L_gcov_merge_topn) + if (__builtin_expect (in_recursion, 0)) + { + unsigned index; +#if GCOV_SUPPORTS_ATOMIC + index + = __atomic_fetch_add (&__gcov_kvp_pool_index, 1, __ATOMIC_RELAXED); +#else + index = __gcov_kvp_pool_index++; +#endif + if (index < GCOV_PREALLOCATED_KVP) + new_node = &__gcov_kvp_pool[index]; + else + /* Do not crash in the situation. */ + return NULL; + } + else +#endif + { + in_recursion = 1; + new_node = (struct gcov_kvp *)xcalloc (1, sizeof (struct gcov_kvp)); + in_recursion = 0; + } + + return new_node; +} + +/* Add key value pair VALUE:COUNT to a top N COUNTERS. When INCREMENT_TOTAL + is true, add COUNT to total of the TOP counter. If USE_ATOMIC is true, + do it in atomic way. */ + +static inline void +gcov_topn_add_value (gcov_type *counters, gcov_type value, gcov_type count, + int use_atomic, int increment_total) +{ + if (increment_total) + gcov_counter_add (&counters[0], 1, use_atomic); + + struct gcov_kvp *prev_node = NULL; + struct gcov_kvp *minimal_node = NULL; + struct gcov_kvp *current_node = (struct gcov_kvp *)(intptr_t)counters[2]; + + while (current_node) + { + if (current_node->value == value) + { + gcov_counter_add (¤t_node->count, count, use_atomic); + return; + } + + if (minimal_node == NULL + || current_node->count < minimal_node->count) + minimal_node = current_node; + + prev_node = current_node; + current_node = current_node->next; + } + + if (counters[1] == GCOV_TOPN_MAXIMUM_TRACKED_VALUES) + { + if (--minimal_node->count < count) + { + minimal_node->value = value; + minimal_node->count = count; + } + } + else + { + struct gcov_kvp *new_node = allocate_gcov_kvp (); + if (new_node == NULL) + return; + + new_node->value = value; + new_node->count = count; + + int success = 0; + if (!counters[2]) + { +#if GCOV_SUPPORTS_ATOMIC + if (use_atomic) + { + struct gcov_kvp **ptr = (struct gcov_kvp **)(intptr_t)&counters[2]; + success = !__sync_val_compare_and_swap (ptr, 0, new_node); + } + else +#endif + { + counters[2] = (intptr_t)new_node; + success = 1; + } + } + else if (prev_node && !prev_node->next) + { +#if GCOV_SUPPORTS_ATOMIC + if (use_atomic) + success = !__sync_val_compare_and_swap (&prev_node->next, 0, + new_node); + else +#endif + { + prev_node->next = new_node; + success = 1; + } + } + + /* Increment number of nodes. */ + if (success) + gcov_counter_add (&counters[1], 1, use_atomic); + } +} + #endif /* !inhibit_libc */ #endif /* GCC_LIBGCOV_H */