Changes in kernel/arch/amd64/include/atomic.h [7a0359b:c00589d] in mainline
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/amd64/include/atomic.h
r7a0359b rc00589d 36 36 #define KERN_amd64_ATOMIC_H_ 37 37 38 #include < typedefs.h>38 #include <arch/types.h> 39 39 #include <arch/barrier.h> 40 40 #include <preemption.h> 41 #include <trace.h>42 41 43 NO_TRACE static inline void atomic_inc(atomic_t *val) 44 { 42 static inline void atomic_inc(atomic_t *val) { 45 43 #ifdef CONFIG_SMP 46 44 asm volatile ( … … 56 54 } 57 55 58 NO_TRACE static inline void atomic_dec(atomic_t *val) 59 { 56 static inline void atomic_dec(atomic_t *val) { 60 57 #ifdef CONFIG_SMP 61 58 asm volatile ( … … 71 68 } 72 69 73 NO_TRACE static inline atomic_count_t atomic_postinc(atomic_t *val) 70 static inline long atomic_postinc(atomic_t *val) 74 71 { 75 atomic_count_tr = 1;72 long r = 1; 76 73 77 74 asm volatile ( 78 75 "lock xaddq %[r], %[count]\n" 79 : [count] "+m" (val->count), 80 [r] "+r" (r) 76 : [count] "+m" (val->count), [r] "+r" (r) 81 77 ); 82 78 … … 84 80 } 85 81 86 NO_TRACE static inline atomic_count_t atomic_postdec(atomic_t *val) 82 static inline long atomic_postdec(atomic_t *val) 87 83 { 88 atomic_count_tr = -1;84 long r = -1; 89 85 90 86 asm volatile ( 91 87 "lock xaddq %[r], %[count]\n" 92 : [count] "+m" (val->count), 93 [r] "+r" (r) 88 : [count] "+m" (val->count), [r] "+r" (r) 94 89 ); 95 90 … … 100 95 #define atomic_predec(val) (atomic_postdec(val) - 1) 101 96 102 NO_TRACE static inline atomic_count_t test_and_set(atomic_t *val) 103 { 104 atomic_count_t v = 1; 97 static inline uint64_t test_and_set(atomic_t *val) { 98 uint64_t v; 105 99 106 100 asm volatile ( 101 "movq $1, %[v]\n" 107 102 "xchgq %[v], %[count]\n" 108 : [v] "+r" (v), 109 [count] "+m" (val->count) 103 : [v] "=r" (v), [count] "+m" (val->count) 110 104 ); 111 105 … … 113 107 } 114 108 109 115 110 /** amd64 specific fast spinlock */ 116 NO_TRACEstatic inline void atomic_lock_arch(atomic_t *val)111 static inline void atomic_lock_arch(atomic_t *val) 117 112 { 118 atomic_count_t tmp;113 uint64_t tmp; 119 114 120 115 preemption_disable(); 121 116 asm volatile ( 122 117 "0:\n" 123 " 124 " 125 " 126 " 118 "pause\n" 119 "mov %[count], %[tmp]\n" 120 "testq %[tmp], %[tmp]\n" 121 "jnz 0b\n" /* lightweight looping on locked spinlock */ 127 122 128 " incq %[tmp]\n" /* now use the atomic operation */ 129 " xchgq %[count], %[tmp]\n" 130 " testq %[tmp], %[tmp]\n" 131 " jnz 0b\n" 132 : [count] "+m" (val->count), 133 [tmp] "=&r" (tmp) 123 "incq %[tmp]\n" /* now use the atomic operation */ 124 "xchgq %[count], %[tmp]\n" 125 "testq %[tmp], %[tmp]\n" 126 "jnz 0b\n" 127 : [count] "+m" (val->count), [tmp] "=&r" (tmp) 134 128 ); 135 136 129 /* 137 130 * Prevent critical section code from bleeding out this way up.
Note:
See TracChangeset
for help on using the changeset viewer.