blob: 9633750167d06b3e9f4d285e105656d01a8ce32f [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
2 * atomic.S: These things are too big to do inline.
3 *
4 * Copyright (C) 1999 David S. Miller (davem@redhat.com)
5 */
6
Linus Torvalds1da177e2005-04-16 15:20:36 -07007#include <asm/asi.h>
8
Linus Torvalds1da177e2005-04-16 15:20:36 -07009 .text
10
11 /* Two versions of the atomic routines, one that
12 * does not return a value and does not perform
13 * memory barriers, and a second which returns
14 * a value and does the barriers.
15 */
16 .globl atomic_add
17 .type atomic_add,#function
18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
191: lduw [%o1], %g1
20 add %g1, %o0, %g7
21 cas [%o1], %g1, %g7
22 cmp %g1, %g7
23 bne,pn %icc, 1b
24 nop
25 retl
26 nop
27 .size atomic_add, .-atomic_add
28
29 .globl atomic_sub
30 .type atomic_sub,#function
31atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
321: lduw [%o1], %g1
33 sub %g1, %o0, %g7
34 cas [%o1], %g1, %g7
35 cmp %g1, %g7
36 bne,pn %icc, 1b
37 nop
38 retl
39 nop
40 .size atomic_sub, .-atomic_sub
41
David S. Millerb445e262005-06-27 15:42:04 -070042 /* On SMP we need to use memory barriers to ensure
43 * correct memory operation ordering, nop these out
44 * for uniprocessor.
45 */
46#ifdef CONFIG_SMP
47
48#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
49#define ATOMIC_POST_BARRIER \
50 ba,pt %xcc, 80b; \
51 membar #StoreLoad | #StoreStore
52
5380: retl
54 nop
55#else
56#define ATOMIC_PRE_BARRIER
57#define ATOMIC_POST_BARRIER
58#endif
59
Linus Torvalds1da177e2005-04-16 15:20:36 -070060 .globl atomic_add_ret
61 .type atomic_add_ret,#function
62atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
63 ATOMIC_PRE_BARRIER
641: lduw [%o1], %g1
65 add %g1, %o0, %g7
66 cas [%o1], %g1, %g7
67 cmp %g1, %g7
68 bne,pn %icc, 1b
69 add %g7, %o0, %g7
David S. Millerb445e262005-06-27 15:42:04 -070070 sra %g7, 0, %o0
Linus Torvalds1da177e2005-04-16 15:20:36 -070071 ATOMIC_POST_BARRIER
72 retl
David S. Millerb445e262005-06-27 15:42:04 -070073 nop
Linus Torvalds1da177e2005-04-16 15:20:36 -070074 .size atomic_add_ret, .-atomic_add_ret
75
76 .globl atomic_sub_ret
77 .type atomic_sub_ret,#function
78atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
79 ATOMIC_PRE_BARRIER
801: lduw [%o1], %g1
81 sub %g1, %o0, %g7
82 cas [%o1], %g1, %g7
83 cmp %g1, %g7
84 bne,pn %icc, 1b
85 sub %g7, %o0, %g7
David S. Millerb445e262005-06-27 15:42:04 -070086 sra %g7, 0, %o0
Linus Torvalds1da177e2005-04-16 15:20:36 -070087 ATOMIC_POST_BARRIER
88 retl
David S. Millerb445e262005-06-27 15:42:04 -070089 nop
Linus Torvalds1da177e2005-04-16 15:20:36 -070090 .size atomic_sub_ret, .-atomic_sub_ret
91
92 .globl atomic64_add
93 .type atomic64_add,#function
94atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
951: ldx [%o1], %g1
96 add %g1, %o0, %g7
97 casx [%o1], %g1, %g7
98 cmp %g1, %g7
99 bne,pn %xcc, 1b
100 nop
101 retl
102 nop
103 .size atomic64_add, .-atomic64_add
104
105 .globl atomic64_sub
106 .type atomic64_sub,#function
107atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
1081: ldx [%o1], %g1
109 sub %g1, %o0, %g7
110 casx [%o1], %g1, %g7
111 cmp %g1, %g7
112 bne,pn %xcc, 1b
113 nop
114 retl
115 nop
116 .size atomic64_sub, .-atomic64_sub
117
118 .globl atomic64_add_ret
119 .type atomic64_add_ret,#function
120atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
121 ATOMIC_PRE_BARRIER
1221: ldx [%o1], %g1
123 add %g1, %o0, %g7
124 casx [%o1], %g1, %g7
125 cmp %g1, %g7
126 bne,pn %xcc, 1b
127 add %g7, %o0, %g7
David S. Millerb445e262005-06-27 15:42:04 -0700128 mov %g7, %o0
Linus Torvalds1da177e2005-04-16 15:20:36 -0700129 ATOMIC_POST_BARRIER
130 retl
David S. Millerb445e262005-06-27 15:42:04 -0700131 nop
Linus Torvalds1da177e2005-04-16 15:20:36 -0700132 .size atomic64_add_ret, .-atomic64_add_ret
133
134 .globl atomic64_sub_ret
135 .type atomic64_sub_ret,#function
136atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
137 ATOMIC_PRE_BARRIER
1381: ldx [%o1], %g1
139 sub %g1, %o0, %g7
140 casx [%o1], %g1, %g7
141 cmp %g1, %g7
142 bne,pn %xcc, 1b
143 sub %g7, %o0, %g7
David S. Millerb445e262005-06-27 15:42:04 -0700144 mov %g7, %o0
Linus Torvalds1da177e2005-04-16 15:20:36 -0700145 ATOMIC_POST_BARRIER
146 retl
David S. Millerb445e262005-06-27 15:42:04 -0700147 nop
Linus Torvalds1da177e2005-04-16 15:20:36 -0700148 .size atomic64_sub_ret, .-atomic64_sub_ret