source: trunk/source/lisp-kernel/x86-asmutils64.s @ 14993

Last change on this file since 14993 was 14993, checked in by rme, 10 years ago

Remove DARWIN_GS_HACK code.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 7.9 KB
Line 
1/*   Copyright (C) 2005-2009 Clozure Associates */
2/*   This file is part of Clozure CL.   */
3 
4/*   Clozure CL is licensed under the terms of the Lisp Lesser GNU Public */
5/*   License , known as the LLGPL and distributed with Clozure CL as the */
6/*   file "LICENSE".  The LLGPL consists of a preamble and the LGPL, */
7/*   which is distributed with Clozure CL as the file "LGPL".  Where these */
8/*   conflict, the preamble takes precedence.   */
9 
10/*   Clozure CL is referenced in the preamble as the "LIBRARY." */
11 
12/*   The LLGPL is also available online at */
13/*   http://opensource.franz.com/preamble.html */
14
15
16       
17
18        include(lisp.s)
19
20        _beginfile
21
22/* Flush %carg1 cache lines, starting at address in %carg0.  Each line is */
23/*   assumed to be %carg2 bytes wide. */
24_exportfn(C(flush_cache_lines))
25        __(cmpq $0,%carg1)
26        __(jmp 2f)
271:      __(clflush (%carg0))
28        __(addq %carg2,%carg0)
29        __(subq $1,%carg1)
302:      __(jg 1b)       
31        __(repret)
32_endfn
33
34_exportfn(C(current_stack_pointer))
35        __(movq %rsp,%cret)
36        __(ret)
37_endfn
38
39_exportfn(C(touch_page))
40        __(movq %carg0,(%carg0))
41        __(movq $0,(%carg0))
42        __(movl $1,%cret_l)
43        .globl C(touch_page_end)
44C(touch_page_end):     
45        __(ret)
46                       
47_exportfn(C(count_leading_zeros))
48        __(bsrq %carg0,%cret)
49        __(xorq $63,%cret)
50        __(ret)
51_endfn
52
53_exportfn(C(noop))
54        __(retq)
55_endfn
56
57_exportfn(C(set_mxcsr))
58        __(pushq %carg0)
59        __(ldmxcsr (%rsp))
60        __(addq $8,%rsp)
61        __(ret)
62_endfn
63       
64_exportfn(C(get_mxcsr))
65        __(pushq $0)
66        __(stmxcsr (%rsp))
67        __(popq %cret)
68        __(ret)
69_endfn
70
71_exportfn(C(save_fp_context))
72_endfn
73       
74_exportfn(C(restore_fp_context))
75_endfn                       
76
77/*  Atomically store new value (%carg2) in *%carg0, if old value == %carg1. */
78/*  Return actual old value. */
79_exportfn(C(store_conditional))
80        __(mov %carg1,%cret)
81        __(lock)
82        __(cmpxchgq %carg2,(%carg0))
83        __(cmovne %carg2,%cret)
84        __(ret)
85_endfn
86
87/*      Atomically store new_value(%carg1) in *%carg0 ;  return previous contents */
88/*      of *%carg0. */
89
90_exportfn(C(atomic_swap))
91        __(lock) 
92        __(xchg %carg1,(%carg0))
93        __(mov %carg1,%cret)
94        __(ret)
95_endfn
96
97/*        Logior the value in *%carg0 with the value in %carg1 (presumably a */
98/*      bitmask with exactly 1 bit set.)  Return non-zero if any of */
99/*      the bits in that bitmask were already set. */
100_exportfn(C(atomic_ior))
1010:      __(movq (%carg0),%cret)
102        __(movq %cret,%carg2)
103        __(orq %carg1,%carg2)
104        __(lock)
105        __(cmpxchg %carg2,(%carg0))
106        __(jnz 0b)
107        __(andq %carg1,%cret)
108        __(ret)
109_endfn
110       
111       
112/* Logand the value in *carg0 with the value in carg1 (presumably a bitmask with exactly 1 */
113/* bit set.)  Return the value now in *carg0 (for some value of "now" */
114
115_exportfn(C(atomic_and))
1160:      __(movq (%carg0),%cret)
117        __(movq %cret,%carg2)
118        __(and %carg1,%carg2)
119        __(lock)
120        __(cmpxchg %carg2,(%carg0))
121        __(jnz 0b)
122        __(movq %carg2,%cret)
123        __(ret)
124_endfn
125
126
127        __ifdef(`DARWIN')
128_exportfn(C(pseudo_sigreturn))
129        __(hlt)
130        __(jmp C(pseudo_sigreturn))
131_endfn
132        __endif                       
133
134/* int cpuid (natural code, natural *pebx, natural *pecx, natural *pedx)  */
135_exportfn(C(cpuid))
136        __(pushq %carg2)
137        __(pushq %carg3)
138        __(movq %carg1, %ctemp0)
139        __(pushq %rbx)          /* non-volatile reg, clobbered by CPUID */
140        __(movq %carg0, %rax)
141        __(xorq %rcx,%rcx)
142        __(cpuid)
143        __(movq %rbx,(%ctemp0))
144        __(popq %rbx)
145        __(popq %ctemp0)           /* recover pedx */
146        __(movq %rdx,(%ctemp0))
147        __(popq %ctemp0)                /* recover pecx */
148        __(movq %rcx,(%ctemp0))
149        __(ret)
150_endfn
151
152/* switch_to_foreign_stack(new_sp, func, arg_0, arg_1, arg_2, arg_3)  */
153/*   Not fully general, but should get us off of the signal stack */
154        __ifndef(`WINDOWS')
155_exportfn(C(switch_to_foreign_stack))
156        __(movq %rdi,%rsp)
157        __(movq %rsi,%rax)
158        __(movq %rdx,%rdi)
159        __(movq %rcx,%rsi)
160        __(movq %r8,%rdx)
161        __(movq %r9,%rcx)
162        __(jmp *%rax)
163_endfn
164        __endif
165       
166_exportfn(C(freebsd_sigreturn))
167        __(movl $417,%eax)      /* SYS_sigreturn */
168        __(syscall)                             
169       
170_exportfn(C(get_vector_registers))
171_endfn
172
173_exportfn(C(put_vector_registers))
174_endfn                         
175       
176        __ifdef(`DARWIN')
177_exportfn(C(darwin_sigreturn))
178        .globl C(sigreturn)
179/* Need to set the sigreturn 'infostyle' argument, which is mostly
180   undocumented.  On x8664 Darwin, sigtramp() sets it to 0x1e, and
181   since we're trying to do what sigtramp() would do if we'd returned
182   to it ... */
183        __(movl $0x1e,%esi)
184        __(movl $0x20000b8,%eax)
185        __(syscall)
186        __(ret)
187_endfn
188        __endif
189       
190        __ifdef(`WIN_64')
191/* %rcx = CONTEXT, %rdx = tcr, %r8 = old_valence.  This pretty
192   much has to be uninterruptible */       
193_exportfn(C(restore_windows_context))
194Xrestore_windows_context_start:         
195        __(subq $0x38,%rsp)
196        __(xorl %eax,%eax)
197        __(movq %r8,tcr.valence(%rdx))
198        __(movq %rax,tcr.pending_exception_context(%rdx))
199        __(fxrstor win64_context.fpstate(%rcx))
200        __(movapd win64_context.Xmm0(%rcx),%xmm0)
201        __(movapd win64_context.Xmm1(%rcx),%xmm1)
202        __(movapd win64_context.Xmm2(%rcx),%xmm2)
203        __(movapd win64_context.Xmm3(%rcx),%xmm3)
204        __(movapd win64_context.Xmm4(%rcx),%xmm4)
205        __(movapd win64_context.Xmm5(%rcx),%xmm5)
206        __(movapd win64_context.Xmm6(%rcx),%xmm6)
207        __(movapd win64_context.Xmm7(%rcx),%xmm7)
208        __(movapd win64_context.Xmm8(%rcx),%xmm8)
209        __(movapd win64_context.Xmm9(%rcx),%xmm9)
210        __(movapd win64_context.Xmm10(%rcx),%xmm10)
211        __(movapd win64_context.Xmm11(%rcx),%xmm11)
212        __(movapd win64_context.Xmm12(%rcx),%xmm12)
213        __(movapd win64_context.Xmm13(%rcx),%xmm13)
214        __(movapd win64_context.Xmm14(%rcx),%xmm14)
215        __(movapd win64_context.Xmm15(%rcx),%xmm15)
216        __(ldmxcsr win64_context.MxCsr(%rcx))
217        __(movw win64_context.SegSs(%rcx),%ax)
218        __(movw %ax,0x20(%rsp))
219        __(movq win64_context.Rsp(%rcx),%rax)
220        __(movq %rax,0x18(%rsp))
221        __(movl win64_context.EFlags(%rcx),%eax)
222        __(movl %eax,0x10(%rsp))
223        __(movw win64_context.SegCs(%rcx),%ax)
224        __(movw %ax,8(%rsp))
225        __(movq win64_context.Rip(%rcx),%rax)
226        __(movq %rax,(%rsp))
227        __(movq win64_context.Rax(%rcx),%rax)
228        __(movq win64_context.Rbx(%rcx),%rbx)
229        __(movq win64_context.Rdx(%rcx),%rdx)
230        __(movq win64_context.Rdi(%rcx),%rdi)
231        __(movq win64_context.Rsi(%rcx),%rsi)
232        __(movq win64_context.Rbp(%rcx),%rbp)
233        __(movq win64_context.R8(%rcx),%r8)
234        __(movq win64_context.R9(%rcx),%r9)
235        __(movq win64_context.R10(%rcx),%r10)
236        __(movq win64_context.R11(%rcx),%r11)
237        __(movq win64_context.R12(%rcx),%r12)
238        __(movq win64_context.R13(%rcx),%r13)
239        __(movq win64_context.R14(%rcx),%r14)
240        __(movq win64_context.R15(%rcx),%r15)
241        /* This must be the last thing before the iret, e.g., if we're
242        interrupted before the iret, the context we're returning to here
243        is still in %rcx.  If we're interrupted -at- the iret, then
244        everything but that which the iret will restore has been restored. */
245        __(movq win64_context.Rcx(%rcx),%rcx)
246Xrestore_windows_context_iret:           
247        __(iretq)
248Xrestore_windows_context_end:             
249        __(nop)
250_endfn
251       
252_exportfn(C(windows_switch_to_foreign_stack))
253        __(pop %rax)
254        __(lea -0x20(%rcx),%rsp)
255        __(push %rax)
256        __(movq %r8,%rcx)
257        __(jmp *%rdx)
258_endfn       
259
260        .data
261        .globl C(restore_windows_context_start)
262        .globl C(restore_windows_context_end)
263        .globl C(restore_windows_context_iret)
264C(restore_windows_context_start):  .quad Xrestore_windows_context_start
265C(restore_windows_context_end): .quad Xrestore_windows_context_end
266C(restore_windows_context_iret): .quad Xrestore_windows_context_iret
267        .text
268
269/* Something that we shouldn't return to */
270_exportfn(C(windows_halt))
271        __(hlt)
272_endfn         
273_exportfn(C(ensure_safe_for_string_operations))
274        __(cld)
275        __(ret)
276_endfn                                       
277        __endif
278        _endfile
Note: See TracBrowser for help on using the repository browser.