This source file includes following definitions.
- rdusp
- wrusp
- __xchg
- __xchg
1 #ifndef _M68K_SYSTEM_H
2 #define _M68K_SYSTEM_H
3
4 #include <linux/config.h>
5 #include <linux/linkage.h>
6 #include <asm/segment.h>
7
8 extern inline unsigned long rdusp(void) {
9 unsigned long usp;
10
11 __asm__ __volatile__("movec %/usp,%0"
12 : "=d" (usp));
13 return usp;
14 }
15
16 extern inline void wrusp(unsigned long usp) {
17 __asm__ __volatile__("movec %0,%/usp"
18 :
19 : "d" (usp));
20 }
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44 asmlinkage void resume(void);
45 #define switch_to(prev,next) { \
46 register int k __asm__ ("a1") = (int)&((struct task_struct *)0)->tss; \
47 register int n __asm__ ("d1") = (int)next; \
48 __asm__ __volatile__("jbsr " SYMBOL_NAME_STR(resume) "\n\t" \
49 : : "a" (k), "d" (n) \
50 : "d0", "d1", "d2", "d3", "d4", "d5", "a0", "a1"); \
51 }
52
53 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
54 #define tas(ptr) (xchg((ptr),1))
55
56 struct __xchg_dummy { unsigned long a[100]; };
57 #define __xg(x) ((volatile struct __xchg_dummy *)(x))
58
59 #if defined(CONFIG_ATARI) && !defined(CONFIG_AMIGA) && !defined(CONFIG_MAC)
60
61 #define sti() __asm__ __volatile__ ("andiw #0xfbff,%/sr": : : "memory")
62 #else
63 #define sti() __asm__ __volatile__ ("andiw #0xf8ff,%/sr": : : "memory")
64 #endif
65 #define cli() __asm__ __volatile__ ("oriw #0x0700,%/sr": : : "memory")
66 #define nop() __asm__ __volatile__ ("nop"::)
67
68 #define save_flags(x) \
69 __asm__ __volatile__("movew %/sr,%0":"=d" (x) : :"memory")
70
71 #define restore_flags(x) \
72 __asm__ __volatile__("movew %0,%/sr": :"d" (x) : "memory")
73
74 #define iret() __asm__ __volatile__ ("rte": : :"memory", "sp", "cc")
75
76 #if 1
77 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
78 {
79 unsigned long tmp, flags;
80
81 save_flags(flags);
82 cli();
83
84 switch (size) {
85 case 1:
86 __asm__ __volatile__
87 ("moveb %2,%0\n\t"
88 "moveb %1,%2"
89 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
90 break;
91 case 2:
92 __asm__ __volatile__
93 ("movew %2,%0\n\t"
94 "movew %1,%2"
95 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
96 break;
97 case 4:
98 __asm__ __volatile__
99 ("movel %2,%0\n\t"
100 "movel %1,%2"
101 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
102 break;
103 }
104 restore_flags(flags);
105 return tmp;
106 }
107 #else
108 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
109 {
110 switch (size) {
111 case 1:
112 __asm__ __volatile__
113 ("moveb %2,%0\n\t"
114 "1:\n\t"
115 "casb %0,%1,%2\n\t"
116 "jne 1b"
117 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
118 break;
119 case 2:
120 __asm__ __volatile__
121 ("movew %2,%0\n\t"
122 "1:\n\t"
123 "casw %0,%1,%2\n\t"
124 "jne 1b"
125 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
126 break;
127 case 4:
128 __asm__ __volatile__
129 ("movel %2,%0\n\t"
130 "1:\n\t"
131 "casl %0,%1,%2\n\t"
132 "jne 1b"
133 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
134 break;
135 }
136 return x;
137 }
138 #endif
139
140 #endif