+/* Add VALUE to *COUNTER and make it with atomic operation
+ if USE_ATOMIC is true. */
+
+static inline void
+gcov_counter_add (gcov_type *counter, gcov_type value,
+ int use_atomic ATTRIBUTE_UNUSED)
+{
+#if GCOV_SUPPORTS_ATOMIC
+ if (use_atomic)
+ __atomic_fetch_add (counter, value, __ATOMIC_RELAXED);
+ else
+#endif
+ *counter += value;
+}
+
+/* Allocate gcov_kvp from heap. If we are recursively called, then allocate
+ it from a list of pre-allocated pool. */
+
+static inline struct gcov_kvp *
+allocate_gcov_kvp (void)
+{
+ struct gcov_kvp *new_node = NULL;
+
+ static
+#if defined(HAVE_CC_TLS)
+__thread
+#endif
+ volatile unsigned in_recursion ATTRIBUTE_UNUSED = 0;
+
+#if !defined(IN_GCOV_TOOL) && !defined(L_gcov_merge_topn)
+ if (__builtin_expect (in_recursion, 0))
+ {
+ unsigned index;
+#if GCOV_SUPPORTS_ATOMIC
+ index
+ = __atomic_fetch_add (&__gcov_kvp_pool_index, 1, __ATOMIC_RELAXED);
+#else
+ index = __gcov_kvp_pool_index++;
+#endif
+ if (index < GCOV_PREALLOCATED_KVP)
+ new_node = &__gcov_kvp_pool[index];
+ else
+ /* Do not crash in the situation. */
+ return NULL;
+ }
+ else
+#endif
+ {
+ in_recursion = 1;
+ new_node = (struct gcov_kvp *)xcalloc (1, sizeof (struct gcov_kvp));
+ in_recursion = 0;
+ }
+
+ return new_node;
+}
+
+/* Add key value pair VALUE:COUNT to a top N COUNTERS. When INCREMENT_TOTAL
+ is true, add COUNT to total of the TOP counter. If USE_ATOMIC is true,
+ do it in atomic way. */
+
+static inline void
+gcov_topn_add_value (gcov_type *counters, gcov_type value, gcov_type count,
+ int use_atomic, int increment_total)
+{
+ if (increment_total)
+ gcov_counter_add (&counters[0], 1, use_atomic);
+
+ struct gcov_kvp *prev_node = NULL;
+ struct gcov_kvp *minimal_node = NULL;
+ struct gcov_kvp *current_node = (struct gcov_kvp *)(intptr_t)counters[2];
+
+ while (current_node)
+ {
+ if (current_node->value == value)
+ {
+ gcov_counter_add (¤t_node->count, count, use_atomic);
+ return;
+ }
+
+ if (minimal_node == NULL
+ || current_node->count < minimal_node->count)
+ minimal_node = current_node;
+
+ prev_node = current_node;
+ current_node = current_node->next;
+ }
+
+ if (counters[1] == GCOV_TOPN_MAXIMUM_TRACKED_VALUES)
+ {
+ if (--minimal_node->count < count)
+ {
+ minimal_node->value = value;
+ minimal_node->count = count;
+ }
+ }
+ else
+ {
+ struct gcov_kvp *new_node = allocate_gcov_kvp ();
+ if (new_node == NULL)
+ return;
+
+ new_node->value = value;
+ new_node->count = count;
+
+ int success = 0;
+ if (!counters[2])
+ {
+#if GCOV_SUPPORTS_ATOMIC
+ if (use_atomic)
+ {
+ struct gcov_kvp **ptr = (struct gcov_kvp **)(intptr_t)&counters[2];
+ success = !__sync_val_compare_and_swap (ptr, 0, new_node);
+ }
+ else
+#endif
+ {
+ counters[2] = (intptr_t)new_node;
+ success = 1;
+ }
+ }
+ else if (prev_node && !prev_node->next)
+ {
+#if GCOV_SUPPORTS_ATOMIC
+ if (use_atomic)
+ success = !__sync_val_compare_and_swap (&prev_node->next, 0,
+ new_node);
+ else
+#endif
+ {
+ prev_node->next = new_node;
+ success = 1;
+ }
+ }
+
+ /* Increment number of nodes. */
+ if (success)
+ gcov_counter_add (&counters[1], 1, use_atomic);
+ }
+}
+