Changes in kernel/arch/amd64/include/atomic.h [7a0359b:d99c1d2] in mainline
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/amd64/include/atomic.h
r7a0359b rd99c1d2 39 39 #include <arch/barrier.h> 40 40 #include <preemption.h> 41 #include <trace.h>42 41 43 NO_TRACEstatic inline void atomic_inc(atomic_t *val)42 static inline void atomic_inc(atomic_t *val) 44 43 { 45 44 #ifdef CONFIG_SMP … … 56 55 } 57 56 58 NO_TRACEstatic inline void atomic_dec(atomic_t *val)57 static inline void atomic_dec(atomic_t *val) 59 58 { 60 59 #ifdef CONFIG_SMP … … 71 70 } 72 71 73 NO_TRACEstatic inline atomic_count_t atomic_postinc(atomic_t *val)72 static inline atomic_count_t atomic_postinc(atomic_t *val) 74 73 { 75 74 atomic_count_t r = 1; … … 84 83 } 85 84 86 NO_TRACEstatic inline atomic_count_t atomic_postdec(atomic_t *val)85 static inline atomic_count_t atomic_postdec(atomic_t *val) 87 86 { 88 87 atomic_count_t r = -1; … … 100 99 #define atomic_predec(val) (atomic_postdec(val) - 1) 101 100 102 NO_TRACEstatic inline atomic_count_t test_and_set(atomic_t *val)101 static inline atomic_count_t test_and_set(atomic_t *val) 103 102 { 104 103 atomic_count_t v = 1; … … 114 113 115 114 /** amd64 specific fast spinlock */ 116 NO_TRACEstatic inline void atomic_lock_arch(atomic_t *val)115 static inline void atomic_lock_arch(atomic_t *val) 117 116 { 118 117 atomic_count_t tmp; … … 121 120 asm volatile ( 122 121 "0:\n" 123 " 124 " 125 " 126 " 122 "pause\n" 123 "mov %[count], %[tmp]\n" 124 "testq %[tmp], %[tmp]\n" 125 "jnz 0b\n" /* lightweight looping on locked spinlock */ 127 126 128 " 129 " 130 " 131 " 127 "incq %[tmp]\n" /* now use the atomic operation */ 128 "xchgq %[count], %[tmp]\n" 129 "testq %[tmp], %[tmp]\n" 130 "jnz 0b\n" 132 131 : [count] "+m" (val->count), 133 132 [tmp] "=&r" (tmp)
Note:
See TracChangeset
for help on using the changeset viewer.