Changes in kernel/arch/ia32/include/atomic.h [7a0359b:c00589d] in mainline
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/ia32/include/atomic.h
r7a0359b rc00589d 36 36 #define KERN_ia32_ATOMIC_H_ 37 37 38 #include < typedefs.h>38 #include <arch/types.h> 39 39 #include <arch/barrier.h> 40 40 #include <preemption.h> 41 #include <trace.h>42 41 43 NO_TRACE static inline void atomic_inc(atomic_t *val) 44 { 42 static inline void atomic_inc(atomic_t *val) { 45 43 #ifdef CONFIG_SMP 46 44 asm volatile ( … … 56 54 } 57 55 58 NO_TRACE static inline void atomic_dec(atomic_t *val) 59 { 56 static inline void atomic_dec(atomic_t *val) { 60 57 #ifdef CONFIG_SMP 61 58 asm volatile ( … … 71 68 } 72 69 73 NO_TRACE static inline atomic_count_t atomic_postinc(atomic_t *val) 70 static inline long atomic_postinc(atomic_t *val) 74 71 { 75 atomic_count_tr = 1;72 long r = 1; 76 73 77 74 asm volatile ( 78 75 "lock xaddl %[r], %[count]\n" 79 : [count] "+m" (val->count), 80 [r] "+r" (r) 76 : [count] "+m" (val->count), [r] "+r" (r) 81 77 ); 82 78 … … 84 80 } 85 81 86 NO_TRACE static inline atomic_count_t atomic_postdec(atomic_t *val) 82 static inline long atomic_postdec(atomic_t *val) 87 83 { 88 atomic_count_tr = -1;84 long r = -1; 89 85 90 86 asm volatile ( 91 87 "lock xaddl %[r], %[count]\n" 92 : [count] "+m" (val->count), 93 [r] "+r" (r) 88 : [count] "+m" (val->count), [r] "+r"(r) 94 89 ); 95 90 … … 100 95 #define atomic_predec(val) (atomic_postdec(val) - 1) 101 96 102 NO_TRACE static inline atomic_count_t test_and_set(atomic_t *val) 103 { 104 atomic_count_t v = 1; 97 static inline uint32_t test_and_set(atomic_t *val) { 98 uint32_t v; 105 99 106 100 asm volatile ( 101 "movl $1, %[v]\n" 107 102 "xchgl %[v], %[count]\n" 108 : [v] "+r" (v), 109 [count] "+m" (val->count) 103 : [v] "=r" (v), [count] "+m" (val->count) 110 104 ); 111 105 … … 114 108 115 109 /** ia32 specific fast spinlock */ 116 NO_TRACEstatic inline void atomic_lock_arch(atomic_t *val)110 static inline void atomic_lock_arch(atomic_t *val) 117 111 { 118 atomic_count_t tmp;112 uint32_t tmp; 119 113 120 114 preemption_disable(); … … 130 124 "testl %[tmp], %[tmp]\n" 131 125 "jnz 0b\n" 132 : [count] "+m" (val->count), 133 [tmp] "=&r" (tmp) 126 : [count] "+m" (val->count), [tmp] "=&r" (tmp) 134 127 ); 135 136 128 /* 137 129 * Prevent critical section code from bleeding out this way up.
Note:
See TracChangeset
for help on using the changeset viewer.