source: trunk/source/lisp-kernel/x86-asmutils64.s @ 15132

Last change on this file since 15132 was 15132, checked in by gb, 10 years ago

Refer to the external symbol 'exp', so that libm isn't discarded
by the linker.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 8.0 KB
Line 
1/*   Copyright (C) 2005-2009 Clozure Associates */
2/*   This file is part of Clozure CL.   */
3 
4/*   Clozure CL is licensed under the terms of the Lisp Lesser GNU Public */
5/*   License , known as the LLGPL and distributed with Clozure CL as the */
6/*   file "LICENSE".  The LLGPL consists of a preamble and the LGPL, */
7/*   which is distributed with Clozure CL as the file "LGPL".  Where these */
8/*   conflict, the preamble takes precedence.   */
9 
10/*   Clozure CL is referenced in the preamble as the "LIBRARY." */
11 
12/*   The LLGPL is also available online at */
13/*   http://opensource.franz.com/preamble.html */
14
15
16       
17
18        include(lisp.s)
19
20        _beginfile
21
22/* Flush %carg1 cache lines, starting at address in %carg0.  Each line is */
23/*   assumed to be %carg2 bytes wide. */
24_exportfn(C(flush_cache_lines))
25        __(cmpq $0,%carg1)
26        __(jmp 2f)
271:      __(clflush (%carg0))
28        __(addq %carg2,%carg0)
29        __(subq $1,%carg1)
302:      __(jg 1b)       
31        __(repret)
32_endfn
33
34_exportfn(C(current_stack_pointer))
35        __(movq %rsp,%cret)
36        __(ret)
37_endfn
38
39_exportfn(C(touch_page))
40        __(movq %carg0,(%carg0))
41        __(movq $0,(%carg0))
42        __(movl $1,%cret_l)
43        .globl C(touch_page_end)
44C(touch_page_end):     
45        __(ret)
46                       
47_exportfn(C(count_leading_zeros))
48        __(bsrq %carg0,%cret)
49        __(xorq $63,%cret)
50        __(ret)
51_endfn
52
53_exportfn(C(noop))
54        __(retq)
55_endfn
56
57_exportfn(C(set_mxcsr))
58        __(pushq %carg0)
59        __(ldmxcsr (%rsp))
60        __(addq $8,%rsp)
61        __(ret)
62_endfn
63       
64_exportfn(C(get_mxcsr))
65        __(pushq $0)
66        __(stmxcsr (%rsp))
67        __(popq %cret)
68        __(ret)
69_endfn
70
71_exportfn(C(save_fp_context))
72_endfn
73       
74_exportfn(C(restore_fp_context))
75_endfn                       
76
77/*  Atomically store new value (%carg2) in *%carg0, if old value == %carg1. */
78/*  Return actual old value. */
79_exportfn(C(store_conditional))
80        __(mov %carg1,%cret)
81        __(lock)
82        __(cmpxchgq %carg2,(%carg0))
83        __(cmovne %carg2,%cret)
84        __(ret)
85_endfn
86
87/*      Atomically store new_value(%carg1) in *%carg0 ;  return previous contents */
88/*      of *%carg0. */
89
90_exportfn(C(atomic_swap))
91        __(lock) 
92        __(xchg %carg1,(%carg0))
93        __(mov %carg1,%cret)
94        __(ret)
95_endfn
96
97/*        Logior the value in *%carg0 with the value in %carg1 (presumably a */
98/*      bitmask with exactly 1 bit set.)  Return non-zero if any of */
99/*      the bits in that bitmask were already set. */
100_exportfn(C(atomic_ior))
1010:      __(movq (%carg0),%cret)
102        __(movq %cret,%carg2)
103        __(orq %carg1,%carg2)
104        __(lock)
105        __(cmpxchg %carg2,(%carg0))
106        __(jnz 0b)
107        __(andq %carg1,%cret)
108        __(ret)
109_endfn
110       
111       
112/* Logand the value in *carg0 with the value in carg1 (presumably a bitmask with exactly 1 */
113/* bit set.)  Return the value now in *carg0 (for some value of "now" */
114
115_exportfn(C(atomic_and))
1160:      __(movq (%carg0),%cret)
117        __(movq %cret,%carg2)
118        __(and %carg1,%carg2)
119        __(lock)
120        __(cmpxchg %carg2,(%carg0))
121        __(jnz 0b)
122        __(movq %carg2,%cret)
123        __(ret)
124_endfn
125
126
127        __ifdef(`DARWIN')
128_exportfn(C(pseudo_sigreturn))
129        __(hlt)
130        __(jmp C(pseudo_sigreturn))
131_endfn
132        __endif                       
133
134/* int cpuid (natural code, natural *pebx, natural *pecx, natural *pedx)  */
135_exportfn(C(cpuid))
136        __(pushq %carg2)
137        __(pushq %carg3)
138        __(movq %carg1, %ctemp0)
139        __(pushq %rbx)          /* non-volatile reg, clobbered by CPUID */
140        __(movq %carg0, %rax)
141        __(xorq %rcx,%rcx)
142        __(cpuid)
143        __(movq %rbx,(%ctemp0))
144        __(popq %rbx)
145        __(popq %ctemp0)           /* recover pedx */
146        __(movq %rdx,(%ctemp0))
147        __(popq %ctemp0)                /* recover pecx */
148        __(movq %rcx,(%ctemp0))
149        __(ret)
150        .globl C(exp)
151        .quad C(exp)
152_endfn
153
154/* switch_to_foreign_stack(new_sp, func, arg_0, arg_1, arg_2, arg_3)  */
155/*   Not fully general, but should get us off of the signal stack */
156        __ifndef(`WINDOWS')
157_exportfn(C(switch_to_foreign_stack))
158        __(movq %rdi,%rsp)
159        __(movq %rsi,%rax)
160        __(movq %rdx,%rdi)
161        __(movq %rcx,%rsi)
162        __(movq %r8,%rdx)
163        __(movq %r9,%rcx)
164        __(jmp *%rax)
165_endfn
166        __endif
167       
168_exportfn(C(freebsd_sigreturn))
169        __(movl $417,%eax)      /* SYS_sigreturn */
170        __(syscall)                             
171       
172_exportfn(C(get_vector_registers))
173_endfn
174
175_exportfn(C(put_vector_registers))
176_endfn                         
177       
178        __ifdef(`DARWIN')
179_exportfn(C(darwin_sigreturn))
180        .globl C(sigreturn)
181       
182/* Need to set the sigreturn 'infostyle' argument, which is mostly
183   undocumented.  On x8664 Darwin, sigtramp() sets it to 0x1e, and
184   since we're trying to do what sigtramp() would do if we'd returned
185   to it ... */
186        __(movl $0x1e,%esi)
187        __(movl $0x20000b8,%eax)
188        __(syscall)
189        __(ret)
190_endfn
191        __endif
192       
193        __ifdef(`WIN_64')
194/* %rcx = CONTEXT, %rdx = tcr, %r8 = old_valence.  This pretty
195   much has to be uninterruptible */       
196_exportfn(C(restore_windows_context))
197Xrestore_windows_context_start:         
198        __(subq $0x38,%rsp)
199        __(xorl %eax,%eax)
200        __(movq %r8,tcr.valence(%rdx))
201        __(movq %rax,tcr.pending_exception_context(%rdx))
202        __(fxrstor win64_context.fpstate(%rcx))
203        __(movapd win64_context.Xmm0(%rcx),%xmm0)
204        __(movapd win64_context.Xmm1(%rcx),%xmm1)
205        __(movapd win64_context.Xmm2(%rcx),%xmm2)
206        __(movapd win64_context.Xmm3(%rcx),%xmm3)
207        __(movapd win64_context.Xmm4(%rcx),%xmm4)
208        __(movapd win64_context.Xmm5(%rcx),%xmm5)
209        __(movapd win64_context.Xmm6(%rcx),%xmm6)
210        __(movapd win64_context.Xmm7(%rcx),%xmm7)
211        __(movapd win64_context.Xmm8(%rcx),%xmm8)
212        __(movapd win64_context.Xmm9(%rcx),%xmm9)
213        __(movapd win64_context.Xmm10(%rcx),%xmm10)
214        __(movapd win64_context.Xmm11(%rcx),%xmm11)
215        __(movapd win64_context.Xmm12(%rcx),%xmm12)
216        __(movapd win64_context.Xmm13(%rcx),%xmm13)
217        __(movapd win64_context.Xmm14(%rcx),%xmm14)
218        __(movapd win64_context.Xmm15(%rcx),%xmm15)
219        __(ldmxcsr win64_context.MxCsr(%rcx))
220        __(movw win64_context.SegSs(%rcx),%ax)
221        __(movw %ax,0x20(%rsp))
222        __(movq win64_context.Rsp(%rcx),%rax)
223        __(movq %rax,0x18(%rsp))
224        __(movl win64_context.EFlags(%rcx),%eax)
225        __(movl %eax,0x10(%rsp))
226        __(movw win64_context.SegCs(%rcx),%ax)
227        __(movw %ax,8(%rsp))
228        __(movq win64_context.Rip(%rcx),%rax)
229        __(movq %rax,(%rsp))
230        __(movq win64_context.Rax(%rcx),%rax)
231        __(movq win64_context.Rbx(%rcx),%rbx)
232        __(movq win64_context.Rdx(%rcx),%rdx)
233        __(movq win64_context.Rdi(%rcx),%rdi)
234        __(movq win64_context.Rsi(%rcx),%rsi)
235        __(movq win64_context.Rbp(%rcx),%rbp)
236        __(movq win64_context.R8(%rcx),%r8)
237        __(movq win64_context.R9(%rcx),%r9)
238        __(movq win64_context.R10(%rcx),%r10)
239        __(movq win64_context.R11(%rcx),%r11)
240        __(movq win64_context.R12(%rcx),%r12)
241        __(movq win64_context.R13(%rcx),%r13)
242        __(movq win64_context.R14(%rcx),%r14)
243        __(movq win64_context.R15(%rcx),%r15)
244        /* This must be the last thing before the iret, e.g., if we're
245        interrupted before the iret, the context we're returning to here
246        is still in %rcx.  If we're interrupted -at- the iret, then
247        everything but that which the iret will restore has been restored. */
248        __(movq win64_context.Rcx(%rcx),%rcx)
249Xrestore_windows_context_iret:           
250        __(iretq)
251Xrestore_windows_context_end:             
252        __(nop)
253_endfn
254       
255_exportfn(C(windows_switch_to_foreign_stack))
256        __(pop %rax)
257        __(lea -0x20(%rcx),%rsp)
258        __(push %rax)
259        __(movq %r8,%rcx)
260        __(jmp *%rdx)
261_endfn       
262
263        .data
264        .globl C(restore_windows_context_start)
265        .globl C(restore_windows_context_end)
266        .globl C(restore_windows_context_iret)
267C(restore_windows_context_start):  .quad Xrestore_windows_context_start
268C(restore_windows_context_end): .quad Xrestore_windows_context_end
269C(restore_windows_context_iret): .quad Xrestore_windows_context_iret
270        .text
271
272/* Something that we shouldn't return to */
273_exportfn(C(windows_halt))
274        __(hlt)
275_endfn         
276_exportfn(C(ensure_safe_for_string_operations))
277        __(cld)
278        __(ret)
279_endfn                                       
280        __endif
281        _endfile
Note: See TracBrowser for help on using the repository browser.