2012-04-19 14:21:43 +00:00
|
|
|
/*
|
|
|
|
* viratomic.h: atomic integer operations
|
|
|
|
*
|
2012-07-11 13:35:43 +00:00
|
|
|
* Copyright (C) 2012 Red Hat, Inc.
|
2012-04-19 14:21:43 +00:00
|
|
|
*
|
2012-07-11 13:35:43 +00:00
|
|
|
* Based on code taken from GLib 2.32, under the LGPLv2+
|
|
|
|
*
|
|
|
|
* Copyright (C) 2011 Ryan Lortie
|
2012-04-19 14:21:43 +00:00
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
2012-09-20 22:30:55 +00:00
|
|
|
* License along with this library. If not, see
|
2012-07-21 10:06:23 +00:00
|
|
|
* <http://www.gnu.org/licenses/>.
|
2012-04-19 14:21:43 +00:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
#pragma once
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
#include "internal.h"
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
#ifdef VIR_ATOMIC_OPS_GCC
|
|
|
|
# define VIR_STATIC /* Nothing; we just never define the functions */
|
|
|
|
#else
|
|
|
|
# define VIR_STATIC static
|
|
|
|
#endif
|
2012-08-21 19:51:00 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
/**
|
|
|
|
* virAtomicIntGet:
|
|
|
|
* Gets the current value of atomic.
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier
|
|
|
|
* (before the get)
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC int virAtomicIntGet(volatile int *atomic)
|
2012-07-11 13:35:43 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
/**
|
|
|
|
* virAtomicIntSet:
|
|
|
|
* Sets the value of atomic to newval.
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier
|
|
|
|
* (after the set)
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC void virAtomicIntSet(volatile int *atomic,
|
2012-08-21 20:27:32 +00:00
|
|
|
int newval)
|
2012-07-11 13:35:43 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
/**
|
|
|
|
* virAtomicIntInc:
|
|
|
|
* Increments the value of atomic by 1.
|
|
|
|
*
|
|
|
|
* Think of this operation as an atomic version of
|
|
|
|
* { *atomic += 1; return *atomic; }
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier.
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC int virAtomicIntInc(volatile int *atomic)
|
2012-07-11 13:35:43 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
/**
|
|
|
|
* virAtomicIntDecAndTest:
|
|
|
|
* Decrements the value of atomic by 1.
|
|
|
|
*
|
|
|
|
* Think of this operation as an atomic version of
|
|
|
|
* { *atomic -= 1; return *atomic == 0; }
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier.
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC bool virAtomicIntDecAndTest(volatile int *atomic)
|
2012-04-24 15:13:53 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
2012-07-11 13:35:43 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* virAtomicIntCompareExchange:
|
|
|
|
* Compares atomic to oldval and, if equal, sets it to newval. If
|
|
|
|
* atomic was not equal to oldval then no change occurs.
|
|
|
|
*
|
|
|
|
* This compare and exchange is done atomically.
|
|
|
|
*
|
|
|
|
* Think of this operation as an atomic version of
|
|
|
|
* { if (*atomic == oldval) { *atomic = newval; return true; }
|
|
|
|
* else return false; }
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier.
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC bool virAtomicIntCompareExchange(volatile int *atomic,
|
2012-08-21 20:27:32 +00:00
|
|
|
int oldval,
|
|
|
|
int newval)
|
2012-04-24 15:13:53 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
2012-07-11 13:35:43 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* virAtomicIntAdd:
|
|
|
|
* Atomically adds val to the value of atomic.
|
|
|
|
*
|
|
|
|
* Think of this operation as an atomic version of
|
|
|
|
* { tmp = *atomic; *atomic += val; return tmp; }
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier.
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC int virAtomicIntAdd(volatile int *atomic,
|
2012-08-21 20:27:32 +00:00
|
|
|
int val)
|
2012-04-24 15:13:53 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
2012-07-11 13:35:43 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* virAtomicIntAnd:
|
|
|
|
* Performs an atomic bitwise 'and' of the value of atomic
|
|
|
|
* and val, storing the result back in atomic.
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier.
|
|
|
|
*
|
|
|
|
* Think of this operation as an atomic version of
|
|
|
|
* { tmp = *atomic; *atomic &= val; return tmp; }
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC unsigned int virAtomicIntAnd(volatile unsigned int *atomic,
|
2012-08-21 20:27:32 +00:00
|
|
|
unsigned int val)
|
2012-04-24 15:13:53 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
2012-07-11 13:35:43 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* virAtomicIntOr:
|
|
|
|
* Performs an atomic bitwise 'or' of the value of atomic
|
|
|
|
* and val, storing the result back in atomic.
|
|
|
|
*
|
|
|
|
* Think of this operation as an atomic version of
|
|
|
|
* { tmp = *atomic; *atomic |= val; return tmp; }
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier.
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC unsigned int virAtomicIntOr(volatile unsigned int *atomic,
|
2012-08-21 20:27:32 +00:00
|
|
|
unsigned int val)
|
2012-04-24 15:13:53 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
2012-07-11 13:35:43 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* virAtomicIntXor:
|
|
|
|
* Performs an atomic bitwise 'xor' of the value of atomic
|
|
|
|
* and val, storing the result back in atomic.
|
|
|
|
*
|
|
|
|
* Think of this operation as an atomic version of
|
|
|
|
* { tmp = *atomic; *atomic ^= val; return tmp; }
|
|
|
|
*
|
|
|
|
* This call acts as a full compiler and hardware memory barrier.
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
VIR_STATIC unsigned int virAtomicIntXor(volatile unsigned int *atomic,
|
2012-08-21 20:27:32 +00:00
|
|
|
unsigned int val)
|
2012-04-24 15:13:53 +00:00
|
|
|
ATTRIBUTE_NONNULL(1);
|
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
#undef VIR_STATIC
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
#ifdef VIR_ATOMIC_OPS_GCC
|
2012-07-11 13:35:43 +00:00
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntGet(atomic) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void)(0 ? *(atomic) ^ *(atomic) : 0); \
|
|
|
|
__sync_synchronize(); \
|
|
|
|
(int)*(atomic); \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntSet(atomic, newval) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void)(0 ? *(atomic) ^ (newval) : 0); \
|
|
|
|
*(atomic) = (newval); \
|
|
|
|
__sync_synchronize(); \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntInc(atomic) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void)(0 ? *(atomic) ^ *(atomic) : 0); \
|
|
|
|
__sync_add_and_fetch((atomic), 1); \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntDecAndTest(atomic) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void)(0 ? *(atomic) ^ *(atomic) : 0); \
|
|
|
|
__sync_fetch_and_sub((atomic), 1) == 1; \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntCompareExchange(atomic, oldval, newval) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void)(0 ? *(atomic) ^ (newval) ^ (oldval) : 0); \
|
|
|
|
(bool)__sync_bool_compare_and_swap((atomic), \
|
|
|
|
(oldval), (newval)); \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntAdd(atomic, val) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void)(0 ? *(atomic) ^ (val) : 0); \
|
|
|
|
(int) __sync_fetch_and_add((atomic), (val)); \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntAnd(atomic, val) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void) (0 ? *(atomic) ^ (val) : 0); \
|
|
|
|
(unsigned int) __sync_fetch_and_and((atomic), (val)); \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntOr(atomic, val) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void) (0 ? *(atomic) ^ (val) : 0); \
|
|
|
|
(unsigned int) __sync_fetch_and_or((atomic), (val)); \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntXor(atomic, val) \
|
2017-11-03 12:09:47 +00:00
|
|
|
(__extension__ ({ \
|
|
|
|
(void)verify_true(sizeof(*(atomic)) == sizeof(int)); \
|
|
|
|
(void) (0 ? *(atomic) ^ (val) : 0); \
|
|
|
|
(unsigned int) __sync_fetch_and_xor((atomic), (val)); \
|
2012-07-11 13:35:43 +00:00
|
|
|
}))
|
|
|
|
|
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
#else
|
2012-07-11 13:35:43 +00:00
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
# ifdef VIR_ATOMIC_OPS_WIN32
|
2012-07-11 13:35:43 +00:00
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
# include <winsock2.h>
|
|
|
|
# include <windows.h>
|
|
|
|
# include <intrin.h>
|
|
|
|
# if !defined(_M_AMD64) && !defined (_M_IA64) && !defined(_M_X64)
|
|
|
|
# define InterlockedAnd _InterlockedAnd
|
|
|
|
# define InterlockedOr _InterlockedOr
|
|
|
|
# define InterlockedXor _InterlockedXor
|
|
|
|
# endif
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
/*
|
|
|
|
* http://msdn.microsoft.com/en-us/library/ms684122(v=vs.85).aspx
|
|
|
|
*/
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntGet(volatile int *atomic)
|
2012-04-19 14:21:43 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
MemoryBarrier();
|
|
|
|
return *atomic;
|
2012-04-19 14:21:43 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline void
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntSet(volatile int *atomic,
|
|
|
|
int newval)
|
2012-04-19 14:21:43 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
*atomic = newval;
|
|
|
|
MemoryBarrier();
|
|
|
|
}
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntInc(volatile int *atomic)
|
|
|
|
{
|
|
|
|
return InterlockedIncrement((volatile LONG *)atomic);
|
|
|
|
}
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline bool
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntDecAndTest(volatile int *atomic)
|
|
|
|
{
|
|
|
|
return InterlockedDecrement((volatile LONG *)atomic) == 0;
|
|
|
|
}
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline bool
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntCompareExchange(volatile int *atomic,
|
|
|
|
int oldval,
|
|
|
|
int newval)
|
|
|
|
{
|
|
|
|
return InterlockedCompareExchange((volatile LONG *)atomic, newval, oldval) == oldval;
|
|
|
|
}
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntAdd(volatile int *atomic,
|
|
|
|
int val)
|
|
|
|
{
|
|
|
|
return InterlockedExchangeAdd((volatile LONG *)atomic, val);
|
2012-04-19 14:21:43 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline unsigned int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntAnd(volatile unsigned int *atomic,
|
|
|
|
unsigned int val)
|
2012-04-19 14:21:43 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
return InterlockedAnd((volatile LONG *)atomic, val);
|
|
|
|
}
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline unsigned int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntOr(volatile unsigned int *atomic,
|
|
|
|
unsigned int val)
|
|
|
|
{
|
|
|
|
return InterlockedOr((volatile LONG *)atomic, val);
|
|
|
|
}
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline unsigned int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntXor(volatile unsigned int *atomic,
|
|
|
|
unsigned int val)
|
|
|
|
{
|
|
|
|
return InterlockedXor((volatile LONG *)atomic, val);
|
|
|
|
}
|
2012-04-19 14:21:43 +00:00
|
|
|
|
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
# else
|
|
|
|
# ifdef VIR_ATOMIC_OPS_PTHREAD
|
|
|
|
# include <pthread.h>
|
2012-04-19 14:21:43 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
extern pthread_mutex_t virAtomicLock;
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntGet(volatile int *atomic)
|
|
|
|
{
|
2012-04-24 15:13:53 +00:00
|
|
|
int value;
|
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
|
|
|
value = *atomic;
|
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
|
|
|
|
|
|
|
return value;
|
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline void
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntSet(volatile int *atomic,
|
|
|
|
int value)
|
2012-04-24 15:13:53 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
|
|
|
*atomic = value;
|
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
2012-04-24 15:13:53 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntInc(volatile int *atomic)
|
2012-04-24 15:13:53 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
int value;
|
|
|
|
|
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
|
|
|
value = ++(*atomic);
|
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
|
|
|
|
|
|
|
return value;
|
2012-04-24 15:13:53 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline bool
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntDecAndTest(volatile int *atomic)
|
2012-04-24 15:13:53 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
bool is_zero;
|
|
|
|
|
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
|
|
|
is_zero = --(*atomic) == 0;
|
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
|
|
|
|
|
|
|
return is_zero;
|
2012-04-24 15:13:53 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline bool
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntCompareExchange(volatile int *atomic,
|
|
|
|
int oldval,
|
|
|
|
int newval)
|
|
|
|
{
|
|
|
|
bool success;
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
if ((success = (*atomic == oldval)))
|
|
|
|
*atomic = newval;
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
return success;
|
|
|
|
}
|
2012-04-24 15:13:53 +00:00
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntAdd(volatile int *atomic,
|
|
|
|
int val)
|
2012-04-24 15:13:53 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
int oldval;
|
|
|
|
|
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
|
|
|
oldval = *atomic;
|
|
|
|
*atomic = oldval + val;
|
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
|
|
|
|
|
|
|
return oldval;
|
2012-04-24 15:13:53 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline unsigned int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntAnd(volatile unsigned int *atomic,
|
|
|
|
unsigned int val)
|
2012-04-24 15:13:53 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
unsigned int oldval;
|
|
|
|
|
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
|
|
|
oldval = *atomic;
|
|
|
|
*atomic = oldval & val;
|
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
|
|
|
|
|
|
|
return oldval;
|
2012-04-24 15:13:53 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline unsigned int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntOr(volatile unsigned int *atomic,
|
|
|
|
unsigned int val)
|
2012-04-24 15:13:53 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
unsigned int oldval;
|
|
|
|
|
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
|
|
|
oldval = *atomic;
|
|
|
|
*atomic = oldval | val;
|
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
|
|
|
|
|
|
|
return oldval;
|
2012-04-24 15:13:53 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 19:51:00 +00:00
|
|
|
static inline unsigned int
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntXor(volatile unsigned int *atomic,
|
|
|
|
unsigned int val)
|
2012-04-24 15:13:53 +00:00
|
|
|
{
|
2012-07-11 13:35:43 +00:00
|
|
|
unsigned int oldval;
|
|
|
|
|
|
|
|
pthread_mutex_lock(&virAtomicLock);
|
|
|
|
oldval = *atomic;
|
|
|
|
*atomic = oldval ^ val;
|
|
|
|
pthread_mutex_unlock(&virAtomicLock);
|
|
|
|
|
|
|
|
return oldval;
|
2012-04-24 15:13:53 +00:00
|
|
|
}
|
|
|
|
|
2012-07-11 13:35:43 +00:00
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
# else
|
|
|
|
# error "No atomic integer impl for this platform"
|
2012-07-11 13:35:43 +00:00
|
|
|
# endif
|
2019-06-18 16:12:46 +00:00
|
|
|
# endif
|
2012-07-11 13:35:43 +00:00
|
|
|
|
|
|
|
/* The int/unsigned int casts here ensure that you can
|
|
|
|
* pass either an int or unsigned int to all atomic op
|
|
|
|
* functions, in the same way that we can with GCC
|
|
|
|
* atomic op helpers.
|
|
|
|
*/
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntGet(atomic) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntGet((int *)atomic)
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntSet(atomic, val) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntSet((int *)atomic, val)
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntInc(atomic) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntInc((int *)atomic)
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntDecAndTest(atomic) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntDecAndTest((int *)atomic)
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntCompareExchange(atomic, oldval, newval) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntCompareExchange((int *)atomic, oldval, newval)
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntAdd(atomic, val) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntAdd((int *)atomic, val)
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntAnd(atomic, val) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntAnd((unsigned int *)atomic, val)
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntOr(atomic, val) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntOr((unsigned int *)atomic, val)
|
2019-06-18 16:12:46 +00:00
|
|
|
# define virAtomicIntXor(atomic, val) \
|
2012-07-11 13:35:43 +00:00
|
|
|
virAtomicIntXor((unsigned int *)atomic, val)
|
|
|
|
|
2019-06-18 16:12:46 +00:00
|
|
|
#endif
|