aboutsummaryrefslogtreecommitdiffhomepage
path: root/osdep/atomic.h
blob: 9028a504ebf91bdea6e72814d1686feae5121965 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
/*
 * This file is part of mpv.
 * Copyright (c) 2013 Stefano Pigozzi <stefano.pigozzi@gmail.com>
 *
 * mpv is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Lesser General Public
 * License as published by the Free Software Foundation; either
 * version 2.1 of the License, or (at your option) any later version.
 *
 * mpv is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public
 * License along with mpv.  If not, see <http://www.gnu.org/licenses/>.
 */

#ifndef MP_ATOMIC_H
#define MP_ATOMIC_H

#include <inttypes.h>
#include "config.h"

#if HAVE_STDATOMIC
#include <stdatomic.h>
#else

// Emulate the parts of C11 stdatomic.h needed by mpv.
// Still relies on gcc/clang atomic builtins.

typedef struct { volatile unsigned long v;      } atomic_ulong;
typedef struct { volatile int v;                } atomic_int;
typedef struct { volatile unsigned int v;       } atomic_uint;
typedef struct { volatile _Bool v;              } atomic_bool;
typedef struct { volatile long long v;          } atomic_llong;
typedef struct { volatile uint_least32_t v;     } atomic_uint_least32_t;
typedef struct { volatile unsigned long long v; } atomic_ullong;

#define ATOMIC_VAR_INIT(x) \
    {.v = (x)}

#define memory_order_relaxed 1
#define memory_order_seq_cst 2

#define atomic_load_explicit(p, e) atomic_load(p)

#if HAVE_ATOMIC_BUILTINS

#define atomic_load(p) \
    __atomic_load_n(&(p)->v, __ATOMIC_SEQ_CST)
#define atomic_store(p, val) \
    __atomic_store_n(&(p)->v, val, __ATOMIC_SEQ_CST)
#define atomic_fetch_add(a, b) \
    __atomic_fetch_add(&(a)->v, b, __ATOMIC_SEQ_CST)
#define atomic_fetch_and(a, b) \
    __atomic_fetch_and(&(a)->v, b, __ATOMIC_SEQ_CST)
#define atomic_fetch_or(a, b) \
    __atomic_fetch_or(&(a)->v, b, __ATOMIC_SEQ_CST)
#define atomic_compare_exchange_strong(a, b, c) \
    __atomic_compare_exchange_n(&(a)->v, b, c, 0, __ATOMIC_SEQ_CST, \
    __ATOMIC_SEQ_CST)

#elif HAVE_SYNC_BUILTINS

#define atomic_load(p) \
    __sync_fetch_and_add(&(p)->v, 0)
#define atomic_store(p, val) \
    (__sync_synchronize(), (p)->v = (val), __sync_synchronize())
#define atomic_fetch_add(a, b) \
    __sync_fetch_and_add(&(a)->v, b)
#define atomic_fetch_and(a, b) \
    __sync_fetch_and_and(&(a)->v, b)
#define atomic_fetch_or(a, b) \
    __sync_fetch_and_or(&(a)->v, b)
// Assumes __sync_val_compare_and_swap is "strong" (using the C11 meaning).
#define atomic_compare_exchange_strong(p, old, new) \
    ({ __typeof__((p)->v) val_ = __sync_val_compare_and_swap(&(p)->v, *(old), new); \
       bool ok_ = val_ == *(old);       \
       if (!ok_) *(old) = val_;         \
       ok_; })

#elif defined(__GNUC__)

#include <pthread.h>

extern pthread_mutex_t mp_atomic_mutex;

#define atomic_load(p)                                  \
    ({ __typeof__(p) p_ = (p);                          \
       pthread_mutex_lock(&mp_atomic_mutex);            \
       __typeof__(p_->v) v = p_->v;                     \
       pthread_mutex_unlock(&mp_atomic_mutex);          \
       v; })
#define atomic_store(p, val)                            \
    ({ __typeof__(val) val_ = (val);                    \
       __typeof__(p) p_ = (p);                          \
       pthread_mutex_lock(&mp_atomic_mutex);            \
       p_->v = val_;                                    \
       pthread_mutex_unlock(&mp_atomic_mutex); })
#define atomic_fetch_op(a, b, op)                       \
    ({ __typeof__(a) a_ = (a);                          \
       __typeof__(b) b_ = (b);                          \
       pthread_mutex_lock(&mp_atomic_mutex);            \
       __typeof__(a_->v) v = a_->v;                     \
       a_->v = v op b_;                                 \
       pthread_mutex_unlock(&mp_atomic_mutex);          \
       v; })
#define atomic_fetch_add(a, b) atomic_fetch_op(a, b, +)
#define atomic_fetch_and(a, b) atomic_fetch_op(a, b, &)
#define atomic_fetch_or(a, b)  atomic_fetch_op(a, b, |)
#define atomic_compare_exchange_strong(p, old, new)     \
    ({ __typeof__(p) p_ = (p);                          \
       __typeof__(old) old_ = (old);                    \
       __typeof__(new) new_ = (new);                    \
       pthread_mutex_lock(&mp_atomic_mutex);            \
       int res = p_->v == *old_;                        \
       if (res) {                                       \
           p_->v = new_;                                \
       } else {                                         \
           *old_ = p_->v;                               \
       }                                                \
       pthread_mutex_unlock(&mp_atomic_mutex);          \
       res; })

#else
# error "this should have been a configuration error, report a bug please"
#endif /* no atomics */

#endif /* else HAVE_STDATOMIC */

#endif