home *** CD-ROM | disk | FTP | other *** search
/ Mac Easy 2010 May / Mac Life Ubuntu.iso / casper / filesystem.squashfs / usr / src / linux-headers-2.6.28-15 / include / asm-frv / atomic.h next >
Encoding:
C/C++ Source or Header  |  2008-12-24  |  4.9 KB  |  203 lines

  1. /* atomic.h: atomic operation emulation for FR-V
  2.  *
  3.  * For an explanation of how atomic ops work in this arch, see:
  4.  *   Documentation/frv/atomic-ops.txt
  5.  *
  6.  * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
  7.  * Written by David Howells (dhowells@redhat.com)
  8.  *
  9.  * This program is free software; you can redistribute it and/or
  10.  * modify it under the terms of the GNU General Public License
  11.  * as published by the Free Software Foundation; either version
  12.  * 2 of the License, or (at your option) any later version.
  13.  */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16.  
  17. #include <linux/types.h>
  18. #include <asm/spr-regs.h>
  19. #include <asm/system.h>
  20.  
  21. #ifdef CONFIG_SMP
  22. #error not SMP safe
  23. #endif
  24.  
  25. /*
  26.  * Atomic operations that C can't guarantee us.  Useful for
  27.  * resource counting etc..
  28.  *
  29.  * We do not have SMP systems, so we don't have to deal with that.
  30.  */
  31.  
  32. /* Atomic operations are already serializing */
  33. #define smp_mb__before_atomic_dec()    barrier()
  34. #define smp_mb__after_atomic_dec()    barrier()
  35. #define smp_mb__before_atomic_inc()    barrier()
  36. #define smp_mb__after_atomic_inc()    barrier()
  37.  
  38. typedef struct {
  39.     int counter;
  40. } atomic_t;
  41.  
  42. #define ATOMIC_INIT(i)        { (i) }
  43. #define atomic_read(v)        ((v)->counter)
  44. #define atomic_set(v, i)    (((v)->counter) = (i))
  45.  
  46. #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
  47. static inline int atomic_add_return(int i, atomic_t *v)
  48. {
  49.     unsigned long val;
  50.  
  51.     asm("0:                        \n"
  52.         "    orcc        gr0,gr0,gr0,icc3    \n"    /* set ICC3.Z */
  53.         "    ckeq        icc3,cc7        \n"
  54.         "    ld.p        %M0,%1            \n"    /* LD.P/ORCR must be atomic */
  55.         "    orcr        cc7,cc7,cc3        \n"    /* set CC3 to true */
  56.         "    add%I2        %1,%2,%1        \n"
  57.         "    cst.p        %1,%M0        ,cc3,#1    \n"
  58.         "    corcc        gr29,gr29,gr0    ,cc3,#1    \n"    /* clear ICC3.Z if store happens */
  59.         "    beq        icc3,#0,0b        \n"
  60.         : "+U"(v->counter), "=&r"(val)
  61.         : "NPr"(i)
  62.         : "memory", "cc7", "cc3", "icc3"
  63.         );
  64.  
  65.     return val;
  66. }
  67.  
  68. static inline int atomic_sub_return(int i, atomic_t *v)
  69. {
  70.     unsigned long val;
  71.  
  72.     asm("0:                        \n"
  73.         "    orcc        gr0,gr0,gr0,icc3    \n"    /* set ICC3.Z */
  74.         "    ckeq        icc3,cc7        \n"
  75.         "    ld.p        %M0,%1            \n"    /* LD.P/ORCR must be atomic */
  76.         "    orcr        cc7,cc7,cc3        \n"    /* set CC3 to true */
  77.         "    sub%I2        %1,%2,%1        \n"
  78.         "    cst.p        %1,%M0        ,cc3,#1    \n"
  79.         "    corcc        gr29,gr29,gr0    ,cc3,#1    \n"    /* clear ICC3.Z if store happens */
  80.         "    beq        icc3,#0,0b        \n"
  81.         : "+U"(v->counter), "=&r"(val)
  82.         : "NPr"(i)
  83.         : "memory", "cc7", "cc3", "icc3"
  84.         );
  85.  
  86.     return val;
  87. }
  88.  
  89. #else
  90.  
  91. extern int atomic_add_return(int i, atomic_t *v);
  92. extern int atomic_sub_return(int i, atomic_t *v);
  93.  
  94. #endif
  95.  
  96. static inline int atomic_add_negative(int i, atomic_t *v)
  97. {
  98.     return atomic_add_return(i, v) < 0;
  99. }
  100.  
  101. static inline void atomic_add(int i, atomic_t *v)
  102. {
  103.     atomic_add_return(i, v);
  104. }
  105.  
  106. static inline void atomic_sub(int i, atomic_t *v)
  107. {
  108.     atomic_sub_return(i, v);
  109. }
  110.  
  111. static inline void atomic_inc(atomic_t *v)
  112. {
  113.     atomic_add_return(1, v);
  114. }
  115.  
  116. static inline void atomic_dec(atomic_t *v)
  117. {
  118.     atomic_sub_return(1, v);
  119. }
  120.  
  121. #define atomic_dec_return(v)        atomic_sub_return(1, (v))
  122. #define atomic_inc_return(v)        atomic_add_return(1, (v))
  123.  
  124. #define atomic_sub_and_test(i,v)    (atomic_sub_return((i), (v)) == 0)
  125. #define atomic_dec_and_test(v)        (atomic_sub_return(1, (v)) == 0)
  126. #define atomic_inc_and_test(v)        (atomic_add_return(1, (v)) == 0)
  127.  
  128. /*****************************************************************************/
  129. /*
  130.  * exchange value with memory
  131.  */
  132. #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
  133.  
  134. #define xchg(ptr, x)                                \
  135. ({                                        \
  136.     __typeof__(ptr) __xg_ptr = (ptr);                    \
  137.     __typeof__(*(ptr)) __xg_orig;                        \
  138.                                         \
  139.     switch (sizeof(__xg_orig)) {                        \
  140.     case 4:                                    \
  141.         asm volatile(                            \
  142.             "swap%I0 %M0,%1"                    \
  143.             : "+m"(*__xg_ptr), "=r"(__xg_orig)            \
  144.             : "1"(x)                        \
  145.             : "memory"                        \
  146.             );                            \
  147.         break;                                \
  148.                                         \
  149.     default:                                \
  150.         __xg_orig = (__typeof__(__xg_orig))0;                \
  151.         asm volatile("break");                        \
  152.         break;                                \
  153.     }                                    \
  154.                                         \
  155.     __xg_orig;                                \
  156. })
  157.  
  158. #else
  159.  
  160. extern uint32_t __xchg_32(uint32_t i, volatile void *v);
  161.  
  162. #define xchg(ptr, x)                                        \
  163. ({                                                \
  164.     __typeof__(ptr) __xg_ptr = (ptr);                            \
  165.     __typeof__(*(ptr)) __xg_orig;                                \
  166.                                                 \
  167.     switch (sizeof(__xg_orig)) {                                \
  168.     case 4: __xg_orig = (__typeof__(*(ptr))) __xchg_32((uint32_t) x, __xg_ptr);    break;    \
  169.     default:                                        \
  170.         __xg_orig = (__typeof__(__xg_orig))0;                                    \
  171.         asm volatile("break");                                \
  172.         break;                                        \
  173.     }                                            \
  174.     __xg_orig;                                        \
  175. })
  176.  
  177. #endif
  178.  
  179. #define tas(ptr) (xchg((ptr), 1))
  180.  
  181. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
  182. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  183.  
  184. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  185. {
  186.     int c, old;
  187.     c = atomic_read(v);
  188.     for (;;) {
  189.         if (unlikely(c == (u)))
  190.             break;
  191.         old = atomic_cmpxchg((v), c, c + (a));
  192.         if (likely(old == c))
  193.             break;
  194.         c = old;
  195.     }
  196.     return c != (u);
  197. }
  198.  
  199. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  200.  
  201. #include <asm-generic/atomic.h>
  202. #endif /* _ASM_ATOMIC_H */
  203.