density
C++11 library for paged memory management, function queues, heterogeneous queues and lifo memory management
raw_atomic.h
1 
2 // Copyright Giuseppe Campana (giu.campana@gmail.com) 2016-2018.
3 // Distributed under the Boost Software License, Version 1.0.
4 // (See accompanying file LICENSE_1_0.txt or copy at
5 // http://www.boost.org/LICENSE_1_0.txt)
6 
7 #pragma once
8 #include <cstdint>
10 #if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_IX86))
11 #include <intrin.h>
12 #endif
13 
14 namespace density
15 {
42  template <typename TYPE>
43  TYPE raw_atomic_load(
44  TYPE const * i_atomic,
45  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept = delete;
46 
73  template <typename TYPE>
74  void raw_atomic_store(
75  TYPE * i_atomic,
76  TYPE i_value,
77  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept = delete;
78 
105  template <typename TYPE>
107  TYPE * i_atomic,
108  TYPE * i_expected,
109  TYPE i_desired,
110  std::memory_order i_success,
111  std::memory_order i_failure) noexcept = delete;
112 
139  template <typename TYPE>
141  TYPE * i_atomic,
142  TYPE * i_expected,
143  TYPE i_desired,
144  std::memory_order i_success,
145  std::memory_order i_failure) noexcept = delete;
146 
147 #if defined(_MSC_VER) && (defined(_M_X64) | defined(_M_IX86))
148 
149  inline uint32_t raw_atomic_load(
150  uint32_t const * i_atomic,
151  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept
152  {
153  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
154 
155  switch (i_memory_order)
156  {
157  case std::memory_order_relaxed:
158  return *i_atomic;
159 
160  default:
161  DENSITY_ASSUME(false); // invalid memory order
162  case std::memory_order_consume:
163  case std::memory_order_acquire:
164  case std::memory_order_seq_cst:
165  {
166  auto const value = *i_atomic;
167  _ReadWriteBarrier();
168  return value;
169  }
170  }
171  }
172 
173 #if defined(_M_X64)
174  inline uint64_t raw_atomic_load(
175  uint64_t const * i_atomic,
176  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept
177  {
178  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
179 
180  switch (i_memory_order)
181  {
182  case std::memory_order_relaxed:
183  return *i_atomic;
184 
185  default:
186  DENSITY_ASSUME(false); // invalid memory orders
187  case std::memory_order_consume:
188  case std::memory_order_acquire:
189  case std::memory_order_seq_cst:
190  {
191  auto const value = *i_atomic;
192  _ReadWriteBarrier();
193  return value;
194  }
195  }
196  }
197 #endif
198 
199  inline void raw_atomic_store(
200  uint32_t * i_atomic,
201  uint32_t i_value,
202  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept
203  {
204  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
205 
206  switch (i_memory_order)
207  {
208  case std::memory_order_relaxed:
209  *i_atomic = i_value;
210  break;
211 
212  case std::memory_order_release:
213  _ReadWriteBarrier();
214  *i_atomic = i_value;
215  break;
216 
217  case std::memory_order_seq_cst:
218  _InterlockedExchange(reinterpret_cast<long *>(i_atomic), static_cast<long>(i_value));
219  break;
220 
221  default:
222  DENSITY_ASSUME(false); // invalid memory order
223  }
224  }
225 
226 #if defined(_M_X64)
227  inline void raw_atomic_store(
228  uint64_t * i_atomic,
229  uint64_t i_value,
230  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept
231  {
232  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
233 
234  switch (i_memory_order)
235  {
236  case std::memory_order_relaxed:
237  *i_atomic = i_value;
238  break;
239 
240  case std::memory_order_release:
241  _ReadWriteBarrier();
242  *i_atomic = i_value;
243  break;
244 
245  case std::memory_order_seq_cst:
246  _InterlockedExchange64(
247  reinterpret_cast<long long *>(i_atomic), static_cast<long long>(i_value));
248  break;
249 
250  default:
251  DENSITY_ASSUME(false); // invalid memory order
252  }
253  }
254 #endif
255 
257  uint32_t * i_atomic,
258  uint32_t * i_expected,
259  uint32_t i_desired,
260  std::memory_order i_success,
261  std::memory_order i_failure) noexcept
262  {
263  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
264  (void)i_success;
265  (void)i_failure;
266  long const prev_val = _InterlockedCompareExchange(
267  reinterpret_cast<long *>(i_atomic), (long)i_desired, *(long *)i_expected);
268  if (prev_val == *(long *)i_expected)
269  {
270  return true;
271  }
272  else
273  {
274  *i_expected = (long)prev_val;
275  return false;
276  }
277  }
278 
279 #if defined(_M_X64)
281  uint64_t * i_atomic,
282  uint64_t * i_expected,
283  uint64_t i_desired,
284  std::memory_order i_success,
285  std::memory_order i_failure) noexcept
286  {
287  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
288  (void)i_success;
289  (void)i_failure;
290  long long const prev_val = _InterlockedCompareExchange64(
291  reinterpret_cast<long long *>(i_atomic), (long long)i_desired, *(long long *)i_expected);
292  if (prev_val == *(long long *)i_expected)
293  {
294  return true;
295  }
296  else
297  {
298  *i_expected = (long long)prev_val;
299  return false;
300  }
301  }
302 #endif
303 
305  uint32_t * i_atomic,
306  uint32_t * i_expected,
307  uint32_t i_desired,
308  std::memory_order i_success,
309  std::memory_order i_failure) noexcept
310  {
312  i_atomic, i_expected, i_desired, i_success, i_failure);
313  }
314 
315 #if defined(_M_X64)
316 
318  uint64_t * i_atomic,
319  uint64_t * i_expected,
320  uint64_t i_desired,
321  std::memory_order i_success,
322  std::memory_order i_failure) noexcept
323  {
325  i_atomic, i_expected, i_desired, i_success, i_failure);
326  }
327 
328 #endif
329 
330 #elif defined(__GNUG__) // gcc and clang
331 
332  namespace detail
333  {
334  inline int mem_order_cnv(std::memory_order i_order)
335  {
336  switch (i_order)
337  {
338  case std::memory_order_acq_rel:
339  return __ATOMIC_ACQ_REL;
340  case std::memory_order_acquire:
341  return __ATOMIC_ACQUIRE;
342  case std::memory_order_consume:
343  return __ATOMIC_CONSUME;
344  case std::memory_order_relaxed:
345  return __ATOMIC_RELAXED;
346  case std::memory_order_release:
347  return __ATOMIC_RELEASE;
348  case std::memory_order_seq_cst:
349  default:
350  return __ATOMIC_SEQ_CST;
351  }
352  }
353  } // namespace detail
354 
355  inline uintptr_t raw_atomic_load(
356  uintptr_t const * i_atomic,
357  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept
358  {
359  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
360 
361  return __atomic_load_n(i_atomic, detail::mem_order_cnv(i_memory_order));
362  }
363 
364  inline void raw_atomic_store(
365  uintptr_t * i_atomic,
366  uintptr_t i_value,
367  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept
368  {
369  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
370 
371  __atomic_store_n(i_atomic, i_value, detail::mem_order_cnv(i_memory_order));
372  }
373 
375  uintptr_t * i_atomic,
376  uintptr_t * i_expected,
377  uintptr_t i_desired,
378  std::memory_order i_success,
379  std::memory_order i_failure) noexcept
380  {
381  DENSITY_ASSUME_ALIGNED((void *)i_atomic, alignof(decltype(i_atomic)));
382 
383  return __atomic_compare_exchange_n(
384  i_atomic,
385  i_expected,
386  i_desired,
387  false,
388  detail::mem_order_cnv(i_success),
389  detail::mem_order_cnv(i_failure));
390  }
391 
393  uintptr_t * i_atomic,
394  uintptr_t * i_expected,
395  uintptr_t i_desired,
396  std::memory_order i_success,
397  std::memory_order i_failure) noexcept
398  {
399  return __atomic_compare_exchange_n(
400  i_atomic,
401  i_expected,
402  i_desired,
403  true,
404  detail::mem_order_cnv(i_success),
405  detail::mem_order_cnv(i_failure));
406  }
407 
408 #endif
409 
436  template <typename TYPE>
438  TYPE * i_atomic,
439  TYPE * i_expected,
440  TYPE i_desired,
441  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept
442  {
444  i_atomic, i_expected, i_desired, i_memory_order, i_memory_order);
445  }
446 
473  template <typename TYPE>
475  TYPE * i_atomic,
476  TYPE * i_expected,
477  TYPE i_desired,
478  std::memory_order i_memory_order = std::memory_order_seq_cst) noexcept
479  {
481  i_atomic, i_expected, i_desired, i_memory_order, i_memory_order);
482  }
483 
484 } // namespace density
TYPE raw_atomic_load(TYPE const *i_atomic, std::memory_order i_memory_order=std::memory_order_seq_cst) noexcept=delete
Definition: conc_function_queue.h:11
Definition: runtime_type.h:1061
bool raw_atomic_compare_exchange_strong(TYPE *i_atomic, TYPE *i_expected, TYPE i_desired, std::memory_order i_success, std::memory_order i_failure) noexcept=delete
#define DENSITY_ASSUME_ALIGNED(address, constexpr_alignment)
Definition: density_config.h:59
#define DENSITY_ASSUME(bool_expr,...)
Definition: density_config.h:46
bool raw_atomic_compare_exchange_weak(TYPE *i_atomic, TYPE *i_expected, TYPE i_desired, std::memory_order i_success, std::memory_order i_failure) noexcept=delete
void raw_atomic_store(TYPE *i_atomic, TYPE i_value, std::memory_order i_memory_order=std::memory_order_seq_cst) noexcept=delete