source: release/1.9/source/lisp-kernel/x86-asmutils64.s @ 15706

Last change on this file since 15706 was 15706, checked in by gb, 7 years ago

Propagate recent trunk changes.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 8.0 KB
Line 
1/*   Copyright (C) 2005-2009 Clozure Associates */
2/*   This file is part of Clozure CL.   */
3 
4/*   Clozure CL is licensed under the terms of the Lisp Lesser GNU Public */
5/*   License , known as the LLGPL and distributed with Clozure CL as the */
6/*   file "LICENSE".  The LLGPL consists of a preamble and the LGPL, */
7/*   which is distributed with Clozure CL as the file "LGPL".  Where these */
8/*   conflict, the preamble takes precedence.   */
9 
10/*   Clozure CL is referenced in the preamble as the "LIBRARY." */
11 
12/*   The LLGPL is also available online at */
13/*   http://opensource.franz.com/preamble.html */
14
15
16       
17
18        include(lisp.s)
19
20        _beginfile
21
22/* Flush %carg1 cache lines, starting at address in %carg0.  Each line is */
23/*   assumed to be %carg2 bytes wide. */
24_exportfn(C(flush_cache_lines))
25        __(cmpq $0,%carg1)
26        __(jmp 2f)
271:      __(clflush (%carg0))
28        __(addq %carg2,%carg0)
29        __(subq $1,%carg1)
302:      __(jg 1b)       
31        __(repret)
32_endfn
33
34_exportfn(C(current_stack_pointer))
35        __(movq %rsp,%cret)
36        __(ret)
37_endfn
38
39_exportfn(C(touch_page))
40        __(movq %carg0,(%carg0))
41        __(movq $0,(%carg0))
42        __(movl $1,%cret_l)
43        .globl C(touch_page_end)
44C(touch_page_end):     
45        __(ret)
46                       
47_exportfn(C(count_leading_zeros))
48        __(bsrq %carg0,%cret)
49        __(xorq $63,%cret)
50        __(ret)
51_endfn
52
53_exportfn(C(noop))
54        __(retq)
55_endfn
56
57_exportfn(C(set_mxcsr))
58        __(pushq %carg0)
59        __(ldmxcsr (%rsp))
60        __(addq $8,%rsp)
61        __(ret)
62_endfn
63       
64_exportfn(C(get_mxcsr))
65        __(pushq $0)
66        __(stmxcsr (%rsp))
67        __(popq %cret)
68        __(ret)
69_endfn
70
71_exportfn(C(save_fp_context))
72_endfn
73       
74_exportfn(C(restore_fp_context))
75_endfn                       
76
77/*  Atomically store new value (%carg2) in *%carg0, if old value == %carg1. */
78/*  Return actual old value. */
79_exportfn(C(store_conditional))
80        __(mov %carg1,%cret)
81        __(lock)
82        __(cmpxchgq %carg2,(%carg0))
83        __(cmovne %carg2,%cret)
84        __(ret)
85_endfn
86
87/*      Atomically store new_value(%carg1) in *%carg0 ;  return previous contents */
88/*      of *%carg0. */
89
90_exportfn(C(atomic_swap))
91        __(lock) 
92        __(xchg %carg1,(%carg0))
93        __(mov %carg1,%cret)
94        __(ret)
95_endfn
96
97/*        Logior the value in *%carg0 with the value in %carg1 (presumably a */
98/*      bitmask with exactly 1 bit set.)  Return non-zero if any of */
99/*      the bits in that bitmask were already set. */
100_exportfn(C(atomic_ior))
1010:      __(movq (%carg0),%cret)
102        __(movq %cret,%carg2)
103        __(orq %carg1,%carg2)
104        __(lock)
105        __(cmpxchg %carg2,(%carg0))
106        __(jz 1f)
107        __(pause)
108        __(jmp 0b)
1091:      __(andq %carg1,%cret)
110        __(ret)
111_endfn
112       
113       
114/* Logand the value in *carg0 with the value in carg1 (presumably a bitmask with exactly 1 */
115/* bit set.)  Return the value now in *carg0 (for some value of "now" */
116
117_exportfn(C(atomic_and))
1180:      __(movq (%carg0),%cret)
119        __(movq %cret,%carg2)
120        __(and %carg1,%carg2)
121        __(lock)
122        __(cmpxchg %carg2,(%carg0))
123        __(jz 1f)
124        __(pause)
125        __(jmp 0b)
1261:      __(movq %carg2,%cret)
127        __(ret)
128_endfn
129
130
131
132/* int cpuid (natural code, natural *pebx, natural *pecx, natural *pedx)  */
133_exportfn(C(cpuid))
134        __(pushq %carg2)
135        __(pushq %carg3)
136        __(movq %carg1, %ctemp0)
137        __(pushq %rbx)          /* non-volatile reg, clobbered by CPUID */
138        __(movq %carg0, %rax)
139        __(xorq %rcx,%rcx)
140        __(cpuid)
141        __(movq %rbx,(%ctemp0))
142        __(popq %rbx)
143        __(popq %ctemp0)           /* recover pedx */
144        __(movq %rdx,(%ctemp0))
145        __(popq %ctemp0)                /* recover pecx */
146        __(movq %rcx,(%ctemp0))
147        __(ret)
148_endfn
149
150/* switch_to_foreign_stack(new_sp, func, arg_0, arg_1, arg_2, arg_3)  */
151/*   Not fully general, but should get us off of the signal stack */
152        __ifndef(`WINDOWS')
153_exportfn(C(switch_to_foreign_stack))
154        __(movq %rdi,%rsp)
155        __(movq %rsi,%rax)
156        __(movq %rdx,%rdi)
157        __(movq %rcx,%rsi)
158        __(movq %r8,%rdx)
159        __(movq %r9,%rcx)
160        __(jmp *%rax)
161_endfn
162        __endif
163       
164_exportfn(C(freebsd_sigreturn))
165        __(movl $417,%eax)      /* SYS_sigreturn */
166        __(syscall)     
167        __(ret)                 
168       
169_exportfn(C(get_vector_registers))
170_endfn
171
172_exportfn(C(put_vector_registers))
173_endfn                         
174       
175        __ifdef(`DARWIN')
176_exportfn(C(darwin_sigreturn))
177        .globl C(sigreturn)
178        __(movl $0x20000b8,%eax)
179        __(syscall)
180        __(ret)
181_endfn
182        __endif
183       
184        __ifdef(`WIN_64')
185/* %rcx = CONTEXT, %rdx = tcr, %r8 = old_valence.  This pretty
186   much has to be uninterruptible */       
187_exportfn(C(restore_windows_context))
188Xrestore_windows_context_start:         
189        __(subq $0x38,%rsp)
190        __(xorl %eax,%eax)
191        __(movq %r8,tcr.valence(%rdx))
192        __(movq %rax,tcr.pending_exception_context(%rdx))
193        __(fxrstor win64_context.fpstate(%rcx))
194        __(movapd win64_context.Xmm0(%rcx),%xmm0)
195        __(movapd win64_context.Xmm1(%rcx),%xmm1)
196        __(movapd win64_context.Xmm2(%rcx),%xmm2)
197        __(movapd win64_context.Xmm3(%rcx),%xmm3)
198        __(movapd win64_context.Xmm4(%rcx),%xmm4)
199        __(movapd win64_context.Xmm5(%rcx),%xmm5)
200        __(movapd win64_context.Xmm6(%rcx),%xmm6)
201        __(movapd win64_context.Xmm7(%rcx),%xmm7)
202        __(movapd win64_context.Xmm8(%rcx),%xmm8)
203        __(movapd win64_context.Xmm9(%rcx),%xmm9)
204        __(movapd win64_context.Xmm10(%rcx),%xmm10)
205        __(movapd win64_context.Xmm11(%rcx),%xmm11)
206        __(movapd win64_context.Xmm12(%rcx),%xmm12)
207        __(movapd win64_context.Xmm13(%rcx),%xmm13)
208        __(movapd win64_context.Xmm14(%rcx),%xmm14)
209        __(movapd win64_context.Xmm15(%rcx),%xmm15)
210        __(ldmxcsr win64_context.MxCsr(%rcx))
211        __(movw win64_context.SegSs(%rcx),%ax)
212        __(movw %ax,0x20(%rsp))
213        __(movq win64_context.Rsp(%rcx),%rax)
214        __(movq %rax,0x18(%rsp))
215        __(movl win64_context.EFlags(%rcx),%eax)
216        __(movl %eax,0x10(%rsp))
217        __(movw win64_context.SegCs(%rcx),%ax)
218        __(movw %ax,8(%rsp))
219        __(movq win64_context.Rip(%rcx),%rax)
220        __(movq %rax,(%rsp))
221        __(movq win64_context.Rax(%rcx),%rax)
222        __(movq win64_context.Rbx(%rcx),%rbx)
223        __(movq win64_context.Rdx(%rcx),%rdx)
224        __(movq win64_context.Rdi(%rcx),%rdi)
225        __(movq win64_context.Rsi(%rcx),%rsi)
226        __(movq win64_context.Rbp(%rcx),%rbp)
227        __(movq win64_context.R8(%rcx),%r8)
228        __(movq win64_context.R9(%rcx),%r9)
229        __(movq win64_context.R10(%rcx),%r10)
230        __(movq win64_context.R11(%rcx),%r11)
231        __(movq win64_context.R12(%rcx),%r12)
232        __(movq win64_context.R13(%rcx),%r13)
233        __(movq win64_context.R14(%rcx),%r14)
234        __(movq win64_context.R15(%rcx),%r15)
235        /* This must be the last thing before the iret, e.g., if we're
236        interrupted before the iret, the context we're returning to here
237        is still in %rcx.  If we're interrupted -at- the iret, then
238        everything but that which the iret will restore has been restored. */
239        __(movq win64_context.Rcx(%rcx),%rcx)
240Xrestore_windows_context_iret:           
241        __(iretq)
242Xrestore_windows_context_end:             
243        __(nop)
244_endfn
245       
246_exportfn(C(windows_switch_to_foreign_stack))
247        __(pop %rax)
248        __(lea -0x20(%rcx),%rsp)
249        __(push %rax)
250        __(movq %r8,%rcx)
251        __(jmp *%rdx)
252_endfn       
253
254        .data
255        .globl C(restore_windows_context_start)
256        .globl C(restore_windows_context_end)
257        .globl C(restore_windows_context_iret)
258C(restore_windows_context_start):  .quad Xrestore_windows_context_start
259C(restore_windows_context_end): .quad Xrestore_windows_context_end
260C(restore_windows_context_iret): .quad Xrestore_windows_context_iret
261        .text
262
263/* Something that we shouldn't return to */
264_exportfn(C(windows_halt))
265        __(hlt)
266_endfn         
267_exportfn(C(ensure_safe_for_string_operations))
268        __(cld)
269        __(ret)
270_endfn                                       
271        __endif
272
273/* zero N (%carg1) dnodes, starting at the dnode-aligned address in %carg0 */
274_exportfn(C(zero_dnodes))
275        __(pxor %xmm0,%xmm0)
276        __(cmpq $0,%carg1)
277        __(jmp 1f)
2780:      __(movdqa %xmm0,(%carg0))
279        __(lea 16(%carg0),%carg0)
280        __(subq $1,%carg1)
2811:      __(jne 0b)
282        __(repret)
283_endfn       
284        _endfile
Note: See TracBrowser for help on using the repository browser.