Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * arch/s390/lib/spinlock.c |
| 3 | * Out of line spinlock code. |
| 4 | * |
Christian Ehrhardt | 9656716 | 2006-03-09 17:33:49 -0800 | [diff] [blame] | 5 | * Copyright (C) IBM Corp. 2004, 2006 |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 6 | * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com) |
| 7 | */ |
| 8 | |
| 9 | #include <linux/types.h> |
| 10 | #include <linux/module.h> |
| 11 | #include <linux/spinlock.h> |
| 12 | #include <linux/init.h> |
Martin Schwidefsky | 8b646bd | 2012-03-11 11:59:26 -0400 | [diff] [blame] | 13 | #include <linux/smp.h> |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 14 | #include <asm/io.h> |
| 15 | |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 16 | int spin_retry = 1000; |
| 17 | |
| 18 | /** |
| 19 | * spin_retry= parameter |
| 20 | */ |
| 21 | static int __init spin_retry_setup(char *str) |
| 22 | { |
| 23 | spin_retry = simple_strtoul(str, &str, 0); |
| 24 | return 1; |
| 25 | } |
| 26 | __setup("spin_retry=", spin_retry_setup); |
| 27 | |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 28 | void arch_spin_lock_wait(arch_spinlock_t *lp) |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 29 | { |
| 30 | int count = spin_retry; |
Martin Schwidefsky | 3c1fcfe | 2006-09-30 23:27:45 -0700 | [diff] [blame] | 31 | unsigned int cpu = ~smp_processor_id(); |
Gerald Schaefer | 59b6978 | 2010-02-26 22:37:40 +0100 | [diff] [blame] | 32 | unsigned int owner; |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 33 | |
| 34 | while (1) { |
Gerald Schaefer | 59b6978 | 2010-02-26 22:37:40 +0100 | [diff] [blame] | 35 | owner = lp->owner_cpu; |
| 36 | if (!owner || smp_vcpu_scheduled(~owner)) { |
| 37 | for (count = spin_retry; count > 0; count--) { |
| 38 | if (arch_spin_is_locked(lp)) |
| 39 | continue; |
| 40 | if (_raw_compare_and_swap(&lp->owner_cpu, 0, |
| 41 | cpu) == 0) |
| 42 | return; |
| 43 | } |
| 44 | if (MACHINE_IS_LPAR) |
| 45 | continue; |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 46 | } |
Gerald Schaefer | 59b6978 | 2010-02-26 22:37:40 +0100 | [diff] [blame] | 47 | owner = lp->owner_cpu; |
| 48 | if (owner) |
Martin Schwidefsky | 8b646bd | 2012-03-11 11:59:26 -0400 | [diff] [blame] | 49 | smp_yield_cpu(~owner); |
Heiko Carstens | 3b4beb3 | 2008-01-26 14:11:03 +0100 | [diff] [blame] | 50 | if (_raw_compare_and_swap(&lp->owner_cpu, 0, cpu) == 0) |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 51 | return; |
| 52 | } |
| 53 | } |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 54 | EXPORT_SYMBOL(arch_spin_lock_wait); |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 55 | |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 56 | void arch_spin_lock_wait_flags(arch_spinlock_t *lp, unsigned long flags) |
Hisashi Hifumi | 894cdde | 2008-01-26 14:11:28 +0100 | [diff] [blame] | 57 | { |
| 58 | int count = spin_retry; |
| 59 | unsigned int cpu = ~smp_processor_id(); |
Gerald Schaefer | 59b6978 | 2010-02-26 22:37:40 +0100 | [diff] [blame] | 60 | unsigned int owner; |
Hisashi Hifumi | 894cdde | 2008-01-26 14:11:28 +0100 | [diff] [blame] | 61 | |
| 62 | local_irq_restore(flags); |
| 63 | while (1) { |
Gerald Schaefer | 59b6978 | 2010-02-26 22:37:40 +0100 | [diff] [blame] | 64 | owner = lp->owner_cpu; |
| 65 | if (!owner || smp_vcpu_scheduled(~owner)) { |
| 66 | for (count = spin_retry; count > 0; count--) { |
| 67 | if (arch_spin_is_locked(lp)) |
| 68 | continue; |
| 69 | local_irq_disable(); |
| 70 | if (_raw_compare_and_swap(&lp->owner_cpu, 0, |
| 71 | cpu) == 0) |
| 72 | return; |
| 73 | local_irq_restore(flags); |
| 74 | } |
| 75 | if (MACHINE_IS_LPAR) |
| 76 | continue; |
Hisashi Hifumi | 894cdde | 2008-01-26 14:11:28 +0100 | [diff] [blame] | 77 | } |
Gerald Schaefer | 59b6978 | 2010-02-26 22:37:40 +0100 | [diff] [blame] | 78 | owner = lp->owner_cpu; |
| 79 | if (owner) |
Martin Schwidefsky | 8b646bd | 2012-03-11 11:59:26 -0400 | [diff] [blame] | 80 | smp_yield_cpu(~owner); |
Hisashi Hifumi | 894cdde | 2008-01-26 14:11:28 +0100 | [diff] [blame] | 81 | local_irq_disable(); |
| 82 | if (_raw_compare_and_swap(&lp->owner_cpu, 0, cpu) == 0) |
| 83 | return; |
| 84 | local_irq_restore(flags); |
| 85 | } |
| 86 | } |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 87 | EXPORT_SYMBOL(arch_spin_lock_wait_flags); |
Hisashi Hifumi | 894cdde | 2008-01-26 14:11:28 +0100 | [diff] [blame] | 88 | |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 89 | int arch_spin_trylock_retry(arch_spinlock_t *lp) |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 90 | { |
Martin Schwidefsky | 3c1fcfe | 2006-09-30 23:27:45 -0700 | [diff] [blame] | 91 | unsigned int cpu = ~smp_processor_id(); |
| 92 | int count; |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 93 | |
Martin Schwidefsky | 3c1fcfe | 2006-09-30 23:27:45 -0700 | [diff] [blame] | 94 | for (count = spin_retry; count > 0; count--) { |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 95 | if (arch_spin_is_locked(lp)) |
Christian Ehrhardt | 9656716 | 2006-03-09 17:33:49 -0800 | [diff] [blame] | 96 | continue; |
Heiko Carstens | 3b4beb3 | 2008-01-26 14:11:03 +0100 | [diff] [blame] | 97 | if (_raw_compare_and_swap(&lp->owner_cpu, 0, cpu) == 0) |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 98 | return 1; |
| 99 | } |
| 100 | return 0; |
| 101 | } |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 102 | EXPORT_SYMBOL(arch_spin_trylock_retry); |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 103 | |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 104 | void arch_spin_relax(arch_spinlock_t *lock) |
Martin Schwidefsky | 3c1fcfe | 2006-09-30 23:27:45 -0700 | [diff] [blame] | 105 | { |
| 106 | unsigned int cpu = lock->owner_cpu; |
Gerald Schaefer | 59b6978 | 2010-02-26 22:37:40 +0100 | [diff] [blame] | 107 | if (cpu != 0) { |
| 108 | if (MACHINE_IS_VM || MACHINE_IS_KVM || |
| 109 | !smp_vcpu_scheduled(~cpu)) |
Martin Schwidefsky | 8b646bd | 2012-03-11 11:59:26 -0400 | [diff] [blame] | 110 | smp_yield_cpu(~cpu); |
Gerald Schaefer | 59b6978 | 2010-02-26 22:37:40 +0100 | [diff] [blame] | 111 | } |
Martin Schwidefsky | 3c1fcfe | 2006-09-30 23:27:45 -0700 | [diff] [blame] | 112 | } |
Thomas Gleixner | 0199c4e | 2009-12-02 20:01:25 +0100 | [diff] [blame] | 113 | EXPORT_SYMBOL(arch_spin_relax); |
Martin Schwidefsky | 3c1fcfe | 2006-09-30 23:27:45 -0700 | [diff] [blame] | 114 | |
Thomas Gleixner | fb3a6bb | 2009-12-03 20:01:19 +0100 | [diff] [blame] | 115 | void _raw_read_lock_wait(arch_rwlock_t *rw) |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 116 | { |
| 117 | unsigned int old; |
| 118 | int count = spin_retry; |
| 119 | |
| 120 | while (1) { |
| 121 | if (count-- <= 0) { |
Martin Schwidefsky | 8b646bd | 2012-03-11 11:59:26 -0400 | [diff] [blame] | 122 | smp_yield(); |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 123 | count = spin_retry; |
| 124 | } |
Thomas Gleixner | e593194 | 2009-12-03 20:08:46 +0100 | [diff] [blame] | 125 | if (!arch_read_can_lock(rw)) |
Christian Ehrhardt | 9656716 | 2006-03-09 17:33:49 -0800 | [diff] [blame] | 126 | continue; |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 127 | old = rw->lock & 0x7fffffffU; |
| 128 | if (_raw_compare_and_swap(&rw->lock, old, old + 1) == old) |
| 129 | return; |
| 130 | } |
| 131 | } |
| 132 | EXPORT_SYMBOL(_raw_read_lock_wait); |
| 133 | |
Thomas Gleixner | fb3a6bb | 2009-12-03 20:01:19 +0100 | [diff] [blame] | 134 | void _raw_read_lock_wait_flags(arch_rwlock_t *rw, unsigned long flags) |
Heiko Carstens | ce58ae6 | 2009-06-12 10:26:22 +0200 | [diff] [blame] | 135 | { |
| 136 | unsigned int old; |
| 137 | int count = spin_retry; |
| 138 | |
| 139 | local_irq_restore(flags); |
| 140 | while (1) { |
| 141 | if (count-- <= 0) { |
Martin Schwidefsky | 8b646bd | 2012-03-11 11:59:26 -0400 | [diff] [blame] | 142 | smp_yield(); |
Heiko Carstens | ce58ae6 | 2009-06-12 10:26:22 +0200 | [diff] [blame] | 143 | count = spin_retry; |
| 144 | } |
Thomas Gleixner | e593194 | 2009-12-03 20:08:46 +0100 | [diff] [blame] | 145 | if (!arch_read_can_lock(rw)) |
Heiko Carstens | ce58ae6 | 2009-06-12 10:26:22 +0200 | [diff] [blame] | 146 | continue; |
| 147 | old = rw->lock & 0x7fffffffU; |
| 148 | local_irq_disable(); |
| 149 | if (_raw_compare_and_swap(&rw->lock, old, old + 1) == old) |
| 150 | return; |
| 151 | } |
| 152 | } |
| 153 | EXPORT_SYMBOL(_raw_read_lock_wait_flags); |
| 154 | |
Thomas Gleixner | fb3a6bb | 2009-12-03 20:01:19 +0100 | [diff] [blame] | 155 | int _raw_read_trylock_retry(arch_rwlock_t *rw) |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 156 | { |
| 157 | unsigned int old; |
| 158 | int count = spin_retry; |
| 159 | |
| 160 | while (count-- > 0) { |
Thomas Gleixner | e593194 | 2009-12-03 20:08:46 +0100 | [diff] [blame] | 161 | if (!arch_read_can_lock(rw)) |
Christian Ehrhardt | 9656716 | 2006-03-09 17:33:49 -0800 | [diff] [blame] | 162 | continue; |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 163 | old = rw->lock & 0x7fffffffU; |
| 164 | if (_raw_compare_and_swap(&rw->lock, old, old + 1) == old) |
| 165 | return 1; |
| 166 | } |
| 167 | return 0; |
| 168 | } |
| 169 | EXPORT_SYMBOL(_raw_read_trylock_retry); |
| 170 | |
Thomas Gleixner | fb3a6bb | 2009-12-03 20:01:19 +0100 | [diff] [blame] | 171 | void _raw_write_lock_wait(arch_rwlock_t *rw) |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 172 | { |
| 173 | int count = spin_retry; |
| 174 | |
| 175 | while (1) { |
| 176 | if (count-- <= 0) { |
Martin Schwidefsky | 8b646bd | 2012-03-11 11:59:26 -0400 | [diff] [blame] | 177 | smp_yield(); |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 178 | count = spin_retry; |
| 179 | } |
Thomas Gleixner | e593194 | 2009-12-03 20:08:46 +0100 | [diff] [blame] | 180 | if (!arch_write_can_lock(rw)) |
Christian Ehrhardt | 9656716 | 2006-03-09 17:33:49 -0800 | [diff] [blame] | 181 | continue; |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 182 | if (_raw_compare_and_swap(&rw->lock, 0, 0x80000000) == 0) |
| 183 | return; |
| 184 | } |
| 185 | } |
| 186 | EXPORT_SYMBOL(_raw_write_lock_wait); |
| 187 | |
Thomas Gleixner | fb3a6bb | 2009-12-03 20:01:19 +0100 | [diff] [blame] | 188 | void _raw_write_lock_wait_flags(arch_rwlock_t *rw, unsigned long flags) |
Heiko Carstens | ce58ae6 | 2009-06-12 10:26:22 +0200 | [diff] [blame] | 189 | { |
| 190 | int count = spin_retry; |
| 191 | |
| 192 | local_irq_restore(flags); |
| 193 | while (1) { |
| 194 | if (count-- <= 0) { |
Martin Schwidefsky | 8b646bd | 2012-03-11 11:59:26 -0400 | [diff] [blame] | 195 | smp_yield(); |
Heiko Carstens | ce58ae6 | 2009-06-12 10:26:22 +0200 | [diff] [blame] | 196 | count = spin_retry; |
| 197 | } |
Thomas Gleixner | e593194 | 2009-12-03 20:08:46 +0100 | [diff] [blame] | 198 | if (!arch_write_can_lock(rw)) |
Heiko Carstens | ce58ae6 | 2009-06-12 10:26:22 +0200 | [diff] [blame] | 199 | continue; |
| 200 | local_irq_disable(); |
| 201 | if (_raw_compare_and_swap(&rw->lock, 0, 0x80000000) == 0) |
| 202 | return; |
| 203 | } |
| 204 | } |
| 205 | EXPORT_SYMBOL(_raw_write_lock_wait_flags); |
| 206 | |
Thomas Gleixner | fb3a6bb | 2009-12-03 20:01:19 +0100 | [diff] [blame] | 207 | int _raw_write_trylock_retry(arch_rwlock_t *rw) |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 208 | { |
| 209 | int count = spin_retry; |
| 210 | |
| 211 | while (count-- > 0) { |
Thomas Gleixner | e593194 | 2009-12-03 20:08:46 +0100 | [diff] [blame] | 212 | if (!arch_write_can_lock(rw)) |
Christian Ehrhardt | 9656716 | 2006-03-09 17:33:49 -0800 | [diff] [blame] | 213 | continue; |
Martin Schwidefsky | 951f22d | 2005-07-27 11:44:57 -0700 | [diff] [blame] | 214 | if (_raw_compare_and_swap(&rw->lock, 0, 0x80000000) == 0) |
| 215 | return 1; |
| 216 | } |
| 217 | return 0; |
| 218 | } |
| 219 | EXPORT_SYMBOL(_raw_write_trylock_retry); |