1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 .file "mul_Xsig.S"
22
23
24 #include "fpu_asm.h"
25
26 .text
27 .align 2,144
28 .globl _mul32_Xsig
29 _mul32_Xsig:
30 pushl %ebp
31 movl %esp,%ebp
32 subl $16,%esp
33 pushl %esi
34
35 movl PARAM1,%esi
36 movl PARAM2,%ecx
37
38 xor %eax,%eax
39 movl %eax,-4(%ebp)
40 movl %eax,-8(%ebp)
41
42 movl (%esi),%eax
43 mull %ecx
44 movl %edx,-12(%ebp)
45
46 movl 4(%esi),%eax
47 mull %ecx
48 addl %eax,-12(%ebp)
49 adcl %edx,-8(%ebp)
50 adcl $0,-4(%ebp)
51
52 movl 8(%esi),%eax
53 mull %ecx
54 addl %eax,-8(%ebp)
55 adcl %edx,-4(%ebp)
56
57 movl -12(%ebp),%eax
58 movl %eax,(%esi)
59 movl -8(%ebp),%eax
60 movl %eax,4(%esi)
61 movl -4(%ebp),%eax
62 movl %eax,8(%esi)
63
64 popl %esi
65 leave
66 ret
67
68
69 .align 2,144
70 .globl _mul64_Xsig
71 _mul64_Xsig:
72 pushl %ebp
73 movl %esp,%ebp
74 subl $16,%esp
75 pushl %esi
76
77 movl PARAM1,%esi
78 movl PARAM2,%ecx
79
80 xor %eax,%eax
81 movl %eax,-4(%ebp)
82 movl %eax,-8(%ebp)
83
84 movl (%esi),%eax
85 mull 4(%ecx)
86 movl %edx,-12(%ebp)
87
88 movl 4(%esi),%eax
89 mull (%ecx)
90 addl %edx,-12(%ebp)
91 adcl $0,-8(%ebp)
92 adcl $0,-4(%ebp)
93
94 movl 4(%esi),%eax
95 mull 4(%ecx)
96 addl %eax,-12(%ebp)
97 adcl %edx,-8(%ebp)
98 adcl $0,-4(%ebp)
99
100 movl 8(%esi),%eax
101 mull (%ecx)
102 addl %eax,-12(%ebp)
103 adcl %edx,-8(%ebp)
104 adcl $0,-4(%ebp)
105
106 movl 8(%esi),%eax
107 mull 4(%ecx)
108 addl %eax,-8(%ebp)
109 adcl %edx,-4(%ebp)
110
111 movl -12(%ebp),%eax
112 movl %eax,(%esi)
113 movl -8(%ebp),%eax
114 movl %eax,4(%esi)
115 movl -4(%ebp),%eax
116 movl %eax,8(%esi)
117
118 popl %esi
119 leave
120 ret
121
122
123
124 .align 2,144
125 .globl _mul_Xsig_Xsig
126 _mul_Xsig_Xsig:
127 pushl %ebp
128 movl %esp,%ebp
129 subl $16,%esp
130 pushl %esi
131
132 movl PARAM1,%esi
133 movl PARAM2,%ecx
134
135 xor %eax,%eax
136 movl %eax,-4(%ebp)
137 movl %eax,-8(%ebp)
138
139 movl (%esi),%eax
140 mull 8(%ecx)
141 movl %edx,-12(%ebp)
142
143 movl 4(%esi),%eax
144 mull 4(%ecx)
145 addl %edx,-12(%ebp)
146 adcl $0,-8(%ebp)
147 adcl $0,-4(%ebp)
148
149 movl 8(%esi),%eax
150 mull (%ecx)
151 addl %edx,-12(%ebp)
152 adcl $0,-8(%ebp)
153 adcl $0,-4(%ebp)
154
155 movl 4(%esi),%eax
156 mull 8(%ecx)
157 addl %eax,-12(%ebp)
158 adcl %edx,-8(%ebp)
159 adcl $0,-4(%ebp)
160
161 movl 8(%esi),%eax
162 mull 4(%ecx)
163 addl %eax,-12(%ebp)
164 adcl %edx,-8(%ebp)
165 adcl $0,-4(%ebp)
166
167 movl 8(%esi),%eax
168 mull 8(%ecx)
169 addl %eax,-8(%ebp)
170 adcl %edx,-4(%ebp)
171
172 movl -12(%ebp),%edx
173 movl %edx,(%esi)
174 movl -8(%ebp),%edx
175 movl %edx,4(%esi)
176 movl -4(%ebp),%edx
177 movl %edx,8(%esi)
178
179 popl %esi
180 leave
181 ret
182