Changeset 1c6c3e1d in mainline for kernel/arch/arm32/src/atomic.c
- Timestamp:
- 2023-10-22T17:55:33Z (15 months ago)
- Branches:
- ticket/834-toolchain-update
- Children:
- 350ec74
- Parents:
- 315d487 (diff), 133461c (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the(diff)
links above to see all the changes relative to each parent. - File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
kernel/arch/arm32/src/atomic.c
r315d487 r1c6c3e1d 38 38 #include <arch/asm.h> 39 39 40 unsigned __atomic_fetch_add_4(volatile void *mem , unsigned val, int model)40 unsigned __atomic_fetch_add_4(volatile void *mem0, unsigned val, int model) 41 41 { 42 volatile unsigned *mem = mem0; 43 42 44 /* 43 45 * This implementation is for UP pre-ARMv6 systems where we do not have … … 45 47 */ 46 48 ipl_t ipl = interrupts_disable(); 47 unsigned ret = * ((volatile unsigned *)mem);48 * ((volatile unsigned *)mem)+= val;49 unsigned ret = *mem; 50 *mem += val; 49 51 interrupts_restore(ipl); 50 52 return ret; 51 53 } 52 54 53 unsigned __atomic_fetch_sub_4(volatile void *mem , unsigned val, int model)55 unsigned __atomic_fetch_sub_4(volatile void *mem0, unsigned val, int model) 54 56 { 57 volatile unsigned *mem = mem0; 58 55 59 ipl_t ipl = interrupts_disable(); 56 unsigned ret = * ((volatile unsigned *)mem);57 * ((volatile unsigned *)mem)-= val;60 unsigned ret = *mem; 61 *mem -= val; 58 62 interrupts_restore(ipl); 59 63 return ret; … … 67 71 * returns the previous value of \a *ptr. 68 72 */ 69 unsigned __sync_val_compare_and_swap_4(volatile void *ptr, unsigned expected, unsigned new_val) 73 unsigned __sync_val_compare_and_swap_4(volatile void *ptr0, unsigned expected, 74 unsigned new_val) 70 75 { 76 volatile unsigned *ptr = ptr0; 77 71 78 /* 72 79 * Using an interrupt disabling spinlock might still lead to deadlock … … 78 85 irq_spinlock_lock(&cas_lock, true); 79 86 80 unsigned cur_val = * ((volatile unsigned *)ptr);87 unsigned cur_val = *ptr; 81 88 82 89 if (cur_val == expected) { 83 * ((volatile unsigned *)ptr)= new_val;90 *ptr = new_val; 84 91 } 85 92 … … 96 103 /* Naive implementations of the newer intrinsics. */ 97 104 98 _Bool __atomic_compare_exchange_4(volatile void *mem, void *expected, unsigned desired, _Bool weak, int success, int failure) 105 _Bool __atomic_compare_exchange_4(volatile void *mem, void *expected0, 106 unsigned desired, _Bool weak, int success, int failure) 99 107 { 108 unsigned *expected = expected0; 109 100 110 (void) weak; 101 111 (void) success; 102 112 (void) failure; 103 113 104 unsigned old = * ((unsigned *)expected);114 unsigned old = *expected; 105 115 unsigned new = __sync_val_compare_and_swap_4(mem, old, desired); 106 116 if (old == new) { 107 117 return 1; 108 118 } else { 109 * ((unsigned *)expected)= new;119 *expected = new; 110 120 return 0; 111 121 } 112 122 } 113 123 114 unsigned __atomic_exchange_4(volatile void *mem , unsigned val, int model)124 unsigned __atomic_exchange_4(volatile void *mem0, unsigned val, int model) 115 125 { 126 volatile unsigned *mem = mem0; 127 116 128 (void) model; 117 129 118 130 irq_spinlock_lock(&cas_lock, true); 119 unsigned old = * ((unsigned *)mem);120 * ((unsigned *)mem)= val;131 unsigned old = *mem; 132 *mem = val; 121 133 irq_spinlock_unlock(&cas_lock, true); 122 134
Note:
See TracChangeset
for help on using the changeset viewer.