Attempt to fix the AppVeyor build
[mesa.git] / src / util / u_atomic.c
1 /*
2 * Copyright © 2017 Gražvydas Ignotas
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #if defined(MISSING_64BIT_ATOMICS) && defined(HAVE_PTHREAD)
25
26 #include <stdint.h>
27 #include <pthread.h>
28
29 #if defined(HAVE_FUNC_ATTRIBUTE_WEAK) && !defined(__CYGWIN__)
30 #define WEAK __attribute__((weak))
31 #else
32 #define WEAK
33 #endif
34
35 static pthread_mutex_t sync_mutex = PTHREAD_MUTEX_INITIALIZER;
36
37 WEAK uint64_t
38 __sync_add_and_fetch_8(uint64_t *ptr, uint64_t val)
39 {
40 uint64_t r;
41
42 pthread_mutex_lock(&sync_mutex);
43 *ptr += val;
44 r = *ptr;
45 pthread_mutex_unlock(&sync_mutex);
46
47 return r;
48 }
49
50 WEAK uint64_t
51 __sync_sub_and_fetch_8(uint64_t *ptr, uint64_t val)
52 {
53 uint64_t r;
54
55 pthread_mutex_lock(&sync_mutex);
56 *ptr -= val;
57 r = *ptr;
58 pthread_mutex_unlock(&sync_mutex);
59
60 return r;
61 }
62
63 WEAK uint64_t
64 __atomic_fetch_add_8(uint64_t *ptr, uint64_t val, int memorder)
65 {
66 return __sync_add_and_fetch(ptr, val);
67 }
68
69 WEAK uint64_t
70 __atomic_fetch_sub_8(uint64_t *ptr, uint64_t val, int memorder)
71 {
72 return __sync_sub_and_fetch(ptr, val);
73 }
74
75 #endif