C:/rwlock/atomic_util_lnx.h File Reference

Go to the source code of this file.

Classes

struct  atomic_t

Defines

#define CPU_LOCK   "lock; "
#define VOLATILE_PREF   __volatile__
#define atomic_cmpexch   atomic_cmpexch_return

Functions

static __inline__ void atomic_inc (atomic_t *vptr)
static __inline__ void atomic_dec (atomic_t *vptr)
static __inline__ void atomic_add (atomic_t *vptr, const int additive)
static __inline__ void atomic_and (atomic_t *vptr, const int par)
static __inline__ int atomic_inc_return (atomic_t *vptr)
static __inline__ int atomic_dec_return (atomic_t *vptr)
static __inline__ int atomic_add_return (atomic_t *vptr, int additive)
static __inline__ int atomic_cmpexch_return (atomic_t *vptr, const int new_val, int comparand)
void atomic_inc (volatile int *vptr)
void atomic_dec (volatile int *vptr)
void atomic_add (volatile int *vptr, const int additive)
int atomic_inc_return (volatile int *vptr)
int atomic_dec_return (volatile int *vptr)
int atomic_add_return (volatile int *vptr, int additive)
int atomic_cmpexch_return (volatile int *v, const int new_val, int comparand)
void atomic_and (volatile int *vptr, const int par)


Define Documentation

#define atomic_cmpexch   atomic_cmpexch_return

Definition at line 76 of file atomic_util_lnx.h.

#define CPU_LOCK   "lock; "

#define VOLATILE_PREF   __volatile__


Function Documentation

void atomic_add ( volatile int *  vptr,
const int  additive 
) [inline]

Definition at line 88 of file atomic_util_lnx.h.

References atomic_add().

00089 {
00090     atomic_add((atomic_t *)vptr, additive);
00091 }

static __inline__ void atomic_add ( atomic_t vptr,
const int  additive 
) [static]

Definition at line 30 of file atomic_util_lnx.h.

References atomic_t::counter, CPU_LOCK, and VOLATILE_PREF.

Referenced by atomic_add(), EvgRWLock2::enter_write(), and SharedCounterRWLock2::leave_write().

00031 {
00032         __asm__ VOLATILE_PREF (CPU_LOCK "addl %1, %0"
00033                               : "=m" (vptr->counter)
00034                               : "r" (additive) );
00035 }

int atomic_add_return ( volatile int *  vptr,
int  additive 
) [inline]

Definition at line 102 of file atomic_util_lnx.h.

References atomic_add_return().

00103 {
00104     return   atomic_add_return((atomic_t *)vptr, additive);
00105 }

static __inline__ int atomic_add_return ( atomic_t vptr,
int  additive 
) [static]

Definition at line 59 of file atomic_util_lnx.h.

References atomic_t::counter, CPU_LOCK, and VOLATILE_PREF.

Referenced by atomic_add_return(), SharedCounterRWLock2::enter_write(), and main().

00060 {
00061         __asm__ VOLATILE_PREF ( CPU_LOCK "xaddl %1, %0"
00062                           : "=m" (vptr->counter), "=a" (additive)
00063                           : "m" (vptr->counter), "a" (additive) );
00064         return additive;
00065 }

void atomic_and ( volatile int *  vptr,
const int  par 
) [inline]

Definition at line 112 of file atomic_util_lnx.h.

References atomic_and().

00113 {
00114         atomic_and((atomic_t *)vptr, par);
00115 }

static __inline__ void atomic_and ( atomic_t vptr,
const int  par 
) [static]

Definition at line 36 of file atomic_util_lnx.h.

References atomic_t::counter, CPU_LOCK, and VOLATILE_PREF.

Referenced by atomic_and(), EvgRWLock2::leave_write(), and SharedCounterRWLock::leave_write().

00037 {
00038         __asm__ VOLATILE_PREF (CPU_LOCK "andl %1, %0"
00039                               : "=m" (vptr->counter)
00040                               : "r" (par) );
00041 }

int atomic_cmpexch_return ( volatile int *  v,
const int  new_val,
int  comparand 
) [inline]

Definition at line 107 of file atomic_util_lnx.h.

References atomic_cmpexch_return().

00108 {
00109     return atomic_cmpexch_return((atomic_t *)v, new_val, comparand);
00110 }

static __inline__ int atomic_cmpexch_return ( atomic_t vptr,
const int  new_val,
int  comparand 
) [static]

Definition at line 67 of file atomic_util_lnx.h.

References atomic_t::counter, CPU_LOCK, and VOLATILE_PREF.

Referenced by atomic_cmpexch_return(), SimpleRWLock::enter_write(), EvgRWLock2::enter_write(), SharedCounterRWLock::enter_write(), and main().

00068 {
00069         __asm__ VOLATILE_PREF (CPU_LOCK "cmpxchgl %2, %0"
00070                               : "=m" (vptr->counter), "=a" (comparand)
00071                               : "r" (new_val), "a" (comparand)
00072                               : "1" );
00073         return comparand;
00074 }

void atomic_dec ( volatile int *  vptr  )  [inline]

Definition at line 83 of file atomic_util_lnx.h.

References atomic_dec().

00084 {
00085     atomic_dec((atomic_t *)vptr);
00086 }

static __inline__ void atomic_dec ( atomic_t vptr  )  [static]

int atomic_dec_return ( volatile int *  vptr  )  [inline]

Definition at line 97 of file atomic_util_lnx.h.

References atomic_dec_return().

00098 {
00099     return atomic_dec_return((atomic_t*)vptr);
00100 }

static __inline__ int atomic_dec_return ( atomic_t vptr  )  [static]

Definition at line 50 of file atomic_util_lnx.h.

References atomic_t::counter, CPU_LOCK, and VOLATILE_PREF.

Referenced by atomic_dec_return(), EvgRWLock2::enter_read(), SharedCounterRWLock2::enter_read(), SharedCounterRWLock::enter_read(), and main().

00051 {
00052     int ret;
00053         __asm__ VOLATILE_PREF ( "movl $-1, %1; " CPU_LOCK "xaddl %1, %0; decl %1"
00054                           : "=m" (vptr->counter), "=a" (ret)
00055                            );
00056         return ret;
00057 }

void atomic_inc ( volatile int *  vptr  )  [inline]

Definition at line 78 of file atomic_util_lnx.h.

References atomic_inc().

00079 {
00080     atomic_inc((atomic_t *)vptr);
00081 }

static __inline__ void atomic_inc ( atomic_t vptr  )  [static]

int atomic_inc_return ( volatile int *  vptr  )  [inline]

Definition at line 92 of file atomic_util_lnx.h.

References atomic_inc_return().

00093 {
00094     return atomic_inc_return((atomic_t*)vptr);
00095 }

static __inline__ int atomic_inc_return ( atomic_t vptr  )  [static]

Definition at line 42 of file atomic_util_lnx.h.

References atomic_t::counter, CPU_LOCK, and VOLATILE_PREF.

Referenced by atomic_inc_return(), EvgRWLock2::enter_read(), SharedCounterRWLock2::enter_read(), SharedCounterRWLock::enter_read(), main(), and RWLockLoopTest::run().

00043 {
00044     int ret;
00045         __asm__  VOLATILE_PREF("movl $1, %1; " CPU_LOCK "xaddl %1, %0; incl %1"
00046                           : "=m" (vptr->counter), "=a" (ret)
00047                           );
00048         return ret;
00049 }


Generated on Wed Mar 19 14:59:59 2008 for rwlock by  doxygen 1.5.5