Changeset ad2e39b in mainline for kernel/arch/amd64/include/atomic.h
- Timestamp:
- 2008-06-16T21:36:39Z (16 years ago)
- Branches:
- lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
- Children:
- 9f491d7
- Parents:
- 5336643
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/amd64/include/atomic.h
r5336643 rad2e39b 42 42 static inline void atomic_inc(atomic_t *val) { 43 43 #ifdef CONFIG_SMP 44 asm volatile ("lock incq %0\n" : " =m" (val->count));44 asm volatile ("lock incq %0\n" : "+m" (val->count)); 45 45 #else 46 asm volatile ("incq %0\n" : " =m" (val->count));46 asm volatile ("incq %0\n" : "+m" (val->count)); 47 47 #endif /* CONFIG_SMP */ 48 48 } … … 50 50 static inline void atomic_dec(atomic_t *val) { 51 51 #ifdef CONFIG_SMP 52 asm volatile ("lock decq %0\n" : " =m" (val->count));52 asm volatile ("lock decq %0\n" : "+m" (val->count)); 53 53 #else 54 asm volatile ("decq %0\n" : " =m" (val->count));54 asm volatile ("decq %0\n" : "+m" (val->count)); 55 55 #endif /* CONFIG_SMP */ 56 56 } … … 62 62 asm volatile ( 63 63 "lock xaddq %1, %0\n" 64 : " =m" (val->count), "+r" (r)64 : "+m" (val->count), "+r" (r) 65 65 ); 66 66 … … 74 74 asm volatile ( 75 75 "lock xaddq %1, %0\n" 76 : " =m" (val->count), "+r" (r)76 : "+m" (val->count), "+r" (r) 77 77 ); 78 78 … … 80 80 } 81 81 82 #define atomic_preinc(val) (atomic_postinc(val) +1)83 #define atomic_predec(val) (atomic_postdec(val) -1)82 #define atomic_preinc(val) (atomic_postinc(val) + 1) 83 #define atomic_predec(val) (atomic_postdec(val) - 1) 84 84 85 85 static inline uint64_t test_and_set(atomic_t *val) { … … 89 89 "movq $1, %0\n" 90 90 "xchgq %0, %1\n" 91 : "=r" (v), "=m" (val->count)91 : "=r" (v), "+m" (val->count) 92 92 ); 93 93 … … 103 103 preemption_disable(); 104 104 asm volatile ( 105 "0: ;"105 "0:\n" 106 106 #ifdef CONFIG_HT 107 "pause ;"107 "pause\n" 108 108 #endif 109 "mov %0, %1 ;"110 "testq %1, %1 ;"111 "jnz 0b ;" /* Lightweight looping on locked spinlock */109 "mov %0, %1\n" 110 "testq %1, %1\n" 111 "jnz 0b\n" /* Lightweight looping on locked spinlock */ 112 112 113 "incq %1 ;" /* now use the atomic operation */114 "xchgq %0, %1 ;"115 "testq %1, %1 ;"116 "jnz 0b ;"117 : " =m"(val->count),"=r"(tmp)118 113 "incq %1\n" /* now use the atomic operation */ 114 "xchgq %0, %1\n" 115 "testq %1, %1\n" 116 "jnz 0b\n" 117 : "+m" (val->count), "=r"(tmp) 118 ); 119 119 /* 120 120 * Prevent critical section code from bleeding out this way up.
Note:
See TracChangeset
for help on using the changeset viewer.