source: trunk/source/lisp-kernel/x86-asmutils32.s @ 11246

Last change on this file since 11246 was 11246, checked in by gb, 12 years ago

Simplify the code in (and the comments preceding) switch_to_foreign_stack;
we're only called from one place and always get 3 arguments that'll go
on the new stack, and we may need to push a fourth word (or be very
careful not to do so) in some cases.

File size: 7.9 KB
Line 
1/*   Copyright (C) 2005 Clozure Associates */
2/*   This file is part of OpenMCL.   */
3 
4/*   OpenMCL is licensed under the terms of the Lisp Lesser GNU Public */
5/*   License , known as the LLGPL and distributed with OpenMCL as the */
6/*   file "LICENSE".  The LLGPL consists of a preamble and the LGPL, */
7/*   which is distributed with OpenMCL as the file "LGPL".  Where these */
8/*   conflict, the preamble takes precedence.   */
9 
10/*   OpenMCL is referenced in the preamble as the "LIBRARY." */
11 
12/*   The LLGPL is also available online at */
13/*   http://opensource.franz.com/preamble.html */
14
15
16       
17
18        include(lisp.s)
19
20        _beginfile
21
22_exportfn(C(current_stack_pointer))
23        __(movl %esp,%eax)
24        __(ret)
25_endfn
26                       
27_exportfn(C(count_leading_zeros))
28        __(bsr 4(%esp),%eax)
29        __(xor $31,%eax)
30        __(ret)
31_endfn
32
33_exportfn(C(noop))
34        __(ret)
35_endfn
36
37_exportfn(C(set_mxcsr))
38        __(ldmxcsr 4(%esp))
39        __(ret)
40_endfn
41       
42_exportfn(C(get_mxcsr))
43        __(push $0)
44        __(stmxcsr (%esp))
45        __(pop %eax)
46        __(ret)
47_endfn
48
49_exportfn(C(save_fp_context))
50_endfn
51       
52_exportfn(C(restore_fp_context))
53_endfn                       
54
55/*  Atomically store new in *p, if *p == old. */
56/*  Return actual old value. */
57/* natural store_conditional(natural *p, natural old, natural new) */
58_exportfn(C(store_conditional))
59        __(movl 12(%esp),%edx)  /* new */
60        __(movl 8(%esp),%eax)   /* old */
61        __(movl 4(%esp),%ecx)   /* ptr */
62        __(lock)
63        __(cmpxchgl %edx,(%ecx))
64        __(cmovne %edx,%eax)
65        __(ret)
66_endfn
67
68/*      Atomically store val in *p; return previous *p */
69/*      of *%rdi. */
70/* signed_natural atomic_swap(signed_natural *p, signed_natural val) */
71_exportfn(C(atomic_swap))
72        __(movl 8(%esp),%eax)
73        __(movl 4(%esp),%edx)
74        __(lock)
75        __(xchg %eax,(%edx))
76        __(ret)
77_endfn
78
79/*      Logior the value in *p with mask (presumably a */
80/*      bitmask with exactly 1 bit set.)  Return non-zero if any of */
81/*      the bits in that bitmask were already set. */
82/* natural atomic_ior(natural *p, natural mask) */
83_exportfn(C(atomic_ior))
84        __(movl 4(%esp),%edx)   /* ptr */
850:      __(movl (%edx),%eax)
86        __(movl %eax,%ecx)
87        __(orl 8(%esp),%ecx)
88        __(lock)
89        __(cmpxchg %ecx,(%edx))
90        __(jnz 0b)
91        __(andl 8(%esp),%eax)
92        __(ret)
93_endfn
94       
95       
96/* Logand the value in *p with mask (presumably a bitmask with exactly 1 */
97/* bit set.)  Return the value now in *p (for some value of "now"). */
98/* natural atomic_and(natural *p, natural mask) */
99_exportfn(C(atomic_and))
100        __(movl 4(%esp),%edx)
1010:      __(movl (%edx),%eax)
102        __(movl %eax,%ecx)
103        __(and 8(%esp),%ecx)
104        __(lock)
105        __(cmpxchg %ecx,(%edx))
106        __(jnz 0b)
107        __(movl %ecx,%eax)
108        __(ret)
109_endfn
110
111
112        __ifdef([DARWIN])
113_exportfn(C(pseudo_sigreturn))
114        __(hlt)
115        __(jmp C(pseudo_sigreturn))
116_endfn
117        __endif   
118
119/* int cpuid (int code, int *pebx, int *pecx, int *pedx)  */
120_exportfn(C(cpuid))
121        __(push %ebx)           /* %ebx is non-volatile */
122        __(push %esi)           /* ditto here */
123        __(movl 12(%esp),%eax)
124        __(xorl %ecx,%ecx)
125        __(cpuid)
126        __(movl 16(%esp),%esi)
127        __(movl %ebx,(%esi))
128        __(movl 20(%esp),%esi)
129        __(movl %ecx,(%esi))
130        __(movl 24(%esp),%esi)
131        __(movl %edx,(%esi))
132        __(pop %esi)
133        __(pop %ebx)
134        __(ret)
135_endfn
136
137/* switch_to_foreign_stack(new_sp, func, arg_0, arg_1, arg_2)  */
138/*   Not fully general, but should get us off of the signal stack */
139/* Beware: on Darwin, GDB can get very confused by this code, and
140   doesn't really get unconfused until the target function - the
141   handler - has built its stack frame
142   The lone caller of this function passes 3 arguments (besides
143   the new stack pointer and the handler address.)
144   On platforms where the C stack must be 16-byte aligned, pushing
145   a 4th word helps make the stack aligned before the return
146   address is (re-)pushed.
147   On Linux, there are severe constraints on what the top of stack
148   can look like when rt_sigreturn (the code at the return address)
149   runs, and there aren't any constraints on stack alignment, so
150   we don't push the extra word on the new stack.*/
151_exportfn(C(switch_to_foreign_stack))
152        __(addl $4,%esp)        /* discard return address, on wrong stack */
153        __(pop %edi)            /* new esp */
154        __(pop %esi)            /* handler */
155        __(pop %eax)            /* arg_0 */
156        __(pop %ebx)            /* arg_1 */
157        __(pop %ecx)            /* arg_2 */
158        __(mov %edi,%esp)
159        __(pop %edi)            /* Return address pushed by caller */
160        __ifndef([LINUX])
161        __(push $0)             /* For alignment. See comment above */
162        __endif
163        __(push %ecx)           /* arg_2 */
164        __(push %ebx)           /* arg_1 */
165        __(push %eax)           /* arg_0 */
166        __(push %edi)           /* return address */
167        __(jmp *%esi)           /* On some platforms, we don't really return */
168_endfn
169
170_exportfn(C(freebsd_sigreturn))
171        __(movl $417,%eax)      /* SYS_sigreturn */
172        __(int $80)                             
173_endfn
174               
175_exportfn(C(get_vector_registers))
176        __(ret)
177_endfn
178
179_exportfn(C(put_vector_registers))
180        __(ret)
181_endfn                         
182
183        __ifdef([WIN_32])
184_exportfn(C(restore_windows_context))
185Xrestore_windows_context_start:
186        __(movl 12(%esp),%edx)  /* old valence */
187        __(movl 8(%esp),%eax)   /* tcr */
188        __(movw tcr.ldt_selector(%eax), %rcontext_reg)
189        __(movl 4(%esp),%ecx)   /* context */
190        __(movl %edx,rcontext(tcr.valence))
191        __(movl $0,rcontext(tcr.pending_exception_context))
192        __(frstor win32_context.FloatSave(%ecx))
193        /* Windows doesn't bother to align the context, so use
194          'movupd' here */
195        __(movupd win32_context.Xmm0(%ecx),%xmm0)
196        __(movupd win32_context.Xmm1(%ecx),%xmm1)
197        __(movupd win32_context.Xmm2(%ecx),%xmm2)
198        __(movupd win32_context.Xmm3(%ecx),%xmm3)
199        __(movupd win32_context.Xmm4(%ecx),%xmm4)
200        __(movupd win32_context.Xmm5(%ecx),%xmm5)
201        __(movupd win32_context.Xmm6(%ecx),%xmm6)
202        __(movupd win32_context.Xmm7(%ecx),%xmm7)
203        __(ldmxcsr win32_context.MXCSR(%ecx))
204        __(movl win32_context.Ebp(%ecx),%ebp)
205        __(movl win32_context.Edi(%ecx),%edi)
206        __(movl win32_context.Esi(%ecx),%esi)
207        __(movl win32_context.Edx(%ecx),%edx)
208        __(movl win32_context.Ebx(%ecx),%ebx)
209        __(movl win32_context.Eax(%ecx),%eax)
210        __(movl win32_context.Esp(%ecx),%esp)
211        __(pushl win32_context.Eip(%ecx))
212Xrestore_windows_context_load_rcx:               
213        __(movl win32_context.Ecx(%ecx),%ecx)
214Xrestore_windows_context_iret:           
215        __(ret)
216Xrestore_windows_context_end:             
217        __(nop)
218_endfn
219       
220_exportfn(C(windows_switch_to_foreign_stack))
221        __(pop %eax)
222        __(pop %ebx)            /* new %esp */
223        __(pop %ecx)            /* handler */
224        __(pop %edx)            /* arg */
225        __(movl %ebx,%esp)
226        __(subl $0x10,%esp)
227        __(movl %edx,(%esp))
228        __(push %eax)
229        __(jmp *%ecx)
230_endfn       
231
232        .data
233        .globl C(restore_windows_context_start)
234        .globl C(restore_windows_context_end)
235        .globl C(restore_windows_context_load_rcx)
236        .globl C(restore_windows_context_iret)
237C(restore_windows_context_start):  .long Xrestore_windows_context_start
238C(restore_windows_context_end): .long Xrestore_windows_context_end
239C(restore_windows_context_load_rcx):  .long Xrestore_windows_context_load_rcx
240C(restore_windows_context_iret): .long Xrestore_windows_context_iret
241        .text
242       
243        __ifdef([WIN32_ES_HACK])
244/* Something that we shouldn't return to */
245_exportfn(C(windows_halt))
246        __(hlt)
247_endfn         
248        __endif
249_exportfn(C(ensure_safe_for_string_operations))
250        __ifdef([WIN32_ES_HACK])
251        __(movw %es,%ax)
252        __(movw %ds,%dx)
253        __(cmpw %ax,%dx)
254        __(jne 9f)
2550:      __(movw %dx,%es)
256        __endif
257        __(cld)       
258        __(ret)
259        __ifdef([WIN32_ES_HACK])
2609:      __(hlt)
261        __(jmp 0b)
262        __endif
263_endfn                                       
264        __endif
265        _endfile
266
Note: See TracBrowser for help on using the repository browser.