This source file includes following definitions.
- xchg_u8
- xchg_u16
- xchg_u32
- xchg_u64
- __xchg
1
2
3
4
5
6
7
8
9
10 #ifndef __ASM_MIPS_SYSTEM_H
11 #define __ASM_MIPS_SYSTEM_H
12
13 #include <linux/kernel.h>
14
15 #if defined (__R4000__)
16 #define sti() \
17 __asm__ __volatile__( \
18 ".set\tnoreorder\n\t" \
19 ".set\tnoat\n\t" \
20 "mfc0\t$1,$12\n\t" \
21 "ori\t$1,0x1f\n\t" \
22 "xori\t$1,0x1e\n\t" \
23 "mtc0\t$1,$12\n\t" \
24 ".set\tat\n\t" \
25 ".set\treorder" \
26 : \
27 : \
28 : "$1")
29
30 #define cli() \
31 __asm__ __volatile__( \
32 ".set\tnoreorder\n\t" \
33 ".set\tnoat\n\t" \
34 "mfc0\t$1,$12\n\t" \
35 "ori\t$1,1\n\t" \
36 "xori\t$1,1\n\t" \
37 "mtc0\t$1,$12\n\t" \
38 "nop\n\t" \
39 "nop\n\t" \
40 "nop\n\t" \
41 ".set\tat\n\t" \
42 ".set\treorder" \
43 : \
44 : \
45 : "$1")
46
47 #else
48
49
50
51 #define sti() \
52 __asm__ __volatile__( \
53 ".set\tnoreorder\n\t" \
54 ".set\tnoat\n\t" \
55 "mfc0\t$1,$12\n\t" \
56 "ori\t$1,0x01\n\t" \
57 "mtc0\t$1,$12\n\t" \
58 ".set\tat\n\t" \
59 ".set\treorder" \
60 : \
61 : \
62 : "$1")
63
64 #define cli() \
65 __asm__ __volatile__( \
66 ".set\tnoreorder\n\t" \
67 ".set\tnoat\n\t" \
68 "mfc0\t$1,$12\n\t" \
69 "ori\t$1,1\n\t" \
70 "xori\t$1,1\n\t" \
71 "mtc0\t$1,$12\n\t" \
72 ".set\tat\n\t" \
73 ".set\treorder" \
74 : \
75 : \
76 : "$1")
77 #endif
78
79 #define nop() __asm__ __volatile__ ("nop")
80
81 #define save_flags(x) \
82 __asm__ __volatile__( \
83 ".set\tnoreorder\n\t" \
84 "mfc0\t%0,$12\n\t" \
85 ".set\treorder" \
86 : "=r" (x)) \
87
88 #define restore_flags(x) \
89 __asm__ __volatile__( \
90 ".set\tnoreorder\n\t" \
91 "mtc0\t%0,$12\n\t" \
92 "nop\n\t" \
93 "nop\n\t" \
94 "nop\n\t" \
95 ".set\treorder" \
96 : \
97 : "r" (x)) \
98
99 #define sync_mem() \
100 __asm__ __volatile__( \
101 ".set\tnoreorder\n\t" \
102 "sync\n\t" \
103 ".set\treorder") \
104
105
106
107
108
109 extern inline unsigned long xchg_u8(volatile char * m, unsigned long val)
110 {
111 unsigned long flags, retval;
112
113 save_flags(flags);
114 sti();
115 retval = *m;
116 *m = val;
117 restore_flags(flags);
118
119 return retval;
120 }
121
122 extern inline unsigned long xchg_u16(volatile short * m, unsigned long val)
123 {
124 unsigned long flags, retval;
125
126 save_flags(flags);
127 sti();
128 retval = *m;
129 *m = val;
130 restore_flags(flags);
131
132 return retval;
133 }
134
135
136
137
138
139 extern inline unsigned long xchg_u32(volatile int * m, unsigned long val)
140 {
141 unsigned long dummy;
142
143 __asm__ __volatile__(
144 ".set\tnoreorder\n\t"
145 ".set\tnoat\n\t"
146 "ll\t%0,(%1)\n"
147 "1:\tmove\t$1,%2\n\t"
148 "sc\t$1,(%1)\n\t"
149 "beqzl\t$1,1b\n\t"
150 "ll\t%0,(%1)\n\t"
151 ".set\tat\n\t"
152 ".set\treorder"
153 : "=r" (val), "=r" (m), "=r" (dummy)
154 : "1" (m), "2" (val));
155
156 return val;
157 }
158
159
160
161
162 extern inline unsigned long xchg_u64(volatile long * m, unsigned long val)
163 {
164 unsigned long dummy;
165
166 __asm__ __volatile__(
167 ".set\tnoreorder\n\t"
168 ".set\tnoat\n\t"
169 "lld\t%0,(%1)\n"
170 "1:\tmove\t$1,%2\n\t"
171 "scd\t$1,(%1)\n\t"
172 "beqzl\t$1,1b\n\t"
173 "ll\t%0,(%1)\n\t"
174 ".set\tat\n\t"
175 ".set\treorder"
176 : "=r" (val), "=r" (m), "=r" (dummy)
177 : "1" (m), "2" (val));
178
179 return val;
180 }
181
182 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
183 #define tas(ptr) (xchg((ptr),1))
184
185
186
187
188
189
190
191
192
193 extern void __xchg_called_with_bad_pointer(void);
194
195 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
196 {
197 switch (size) {
198 case 1:
199 return xchg_u8(ptr, x);
200 case 2:
201 return xchg_u16(ptr, x);
202 case 4:
203 return xchg_u32(ptr, x);
204 case 8:
205 return xchg_u64(ptr, x);
206 }
207 __xchg_called_with_bad_pointer();
208 return x;
209 }
210
211 extern unsigned long IRQ_vectors[16];
212 extern unsigned long exception_handlers[32];
213
214 #define set_int_vector(n,addr) \
215 IRQ_vectors[n] = (unsigned long) (addr)
216
217 #define set_except_vector(n,addr) \
218 exception_handlers[n] = (unsigned long) (addr)
219
220 #endif