source: trunk/ccl/lisp-kernel/x86-spentry64.s @ 3454

Last change on this file since 3454 was 3454, checked in by gb, 15 years ago

Every day, more stuff.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 53.8 KB
Line 
1/*
2   Copyright (C) 2005-2006 Clozure Associates and contributors
3   This file is part of OpenMCL. 
4
5   OpenMCL is licensed under the terms of the Lisp Lesser GNU Public
6   License , known as the LLGPL and distributed with OpenMCL as the
7   file "LICENSE".  The LLGPL consists of a preamble and the LGPL,
8   which is distributed with OpenMCL as the file "LGPL".  Where these
9   conflict, the preamble takes precedence. 
10
11   OpenMCL is referenced in the preamble as the "LIBRARY."
12
13   The LLGPL is also available online at
14   http://opensource.franz.com/preamble.html
15*/
16
17               
18        include(lisp.s)
19        _beginfile
20       
21        .align 2
22define([_spentry],[ifdef([__func_name],[_endfn],[])
23        _exportfn(_SP$1)
24        .line  __line__
25])
26
27             
28define([_endsubp],[
29        _endfn(_SP$1)
30# __line__
31])
32
33define([jump_builtin],[
34        ref_nrs_value(builtin_functions,%fname)
35        set_nargs($2)
36        vrefr(%fname,%fname,$1)
37        jump_fname()
38])
39
40/* %arg_z has verflowed by one bit.  Make a bignum with 2 (32-bit) digits. */
41_startfn(C(fix_one_bit_overflow))
42        __(movq $two_digit_bignum_header,%imm0)
43        __(Misc_Alloc_Fixed([],aligned_bignum_size(2)))
44        __(unbox_fixnum(%arg_z,%imm0))
45        __(mov %temp0,%arg_z)
46        __(xorq overflow_mask(%rip),%imm0)
47        __(movq %imm0,misc_data_offset(%arg_z))
48        __(jmp *%ra0)   
49overflow_mask:  .quad 0xe000000000000000
50_endfn
51       
52/* Make a lisp integer (fixnum or two-digit bignum) from the signed
53   64-bit value in %imm0.  Shift left 3 bits - a bit at a time, via
54   addition - and check for overlow after each add, since the overflow
55   bit isn't cumulative on x86.
56*/
57_spentry(makes64)
58        __(movq %imm0,%imm1)
59        __(shlq $fixnumshift,%imm1)
60        __(movq %imm1,%arg_z)
61        __(sarq $fixnumshift,%imm1)
62        __(cmpq %imm1,%imm0)
63        __(jz,pt 0f)
640:      __(jmp *%ra0)
651:      __(movd %imm0,%mm0)
66        __(movq $two_digit_bignum_header,%imm0)
67        __(Misc_Alloc_Fixed(%arg_z,aligned_bignum_size(2)))
68        __(movq %mm0,misc_data_offset(%arg_z))
69        __(jmp *%ra0)
70_endsubp(makes64)       
71                               
72
73/* %imm1:%imm0 constitute a signed integer, almost certainly a bignum.
74   Make a lisp integer out of those 128 bits .. */
75_startfn(C(makes128))
76/* We're likely to have to make a bignum out of the integer in %imm1 and
77   %imm0. We'll need to use %imm0 and %imm1 to cons the bignum, and
78   will need to do some arithmetic (determining significant bigits)
79   on %imm0 and %imm1 in order to know how large that bignum needs to be.
80   Cache %imm0 and %imm1 in %mm0 and %mm1. */
81   
82        __(movd %imm0,%mm0)
83        __(movd %imm1,%mm1)
84       
85/* If %imm1 is just a sign extension of %imm0, make a 64-bit signed integer. */
86       
87        __(sarq $63,%imm0) 
88        __(cmpq %imm0,%imm1)
89        __(movd %mm0,%imm0)
90        __(je _SPmakes64)
91       
92/* Otherwise, if the high 32 bits of %imm1 are a sign-extension of the
93   low 32 bits of %imm1, make a 3-digit bignum.  If the upper 32 bits
94   of %imm1 are significant, make a 4 digit bignum */
95        __(movq %imm1,%imm0)
96        __(shlq $32,%imm0)
97        __(sarq $32,%imm0)
98        __(cmpq %imm0,%imm1)
99        __(jz 3f)
100        __(mov $four_digit_bignum_header,%imm0)
101        __(Misc_Alloc_Fixed(%arg_z,aligned_bignum_size(4)))
102        __(movq %mm0,misc_data_offset(%arg_z))
103        __(movq %mm1,misc_data_offset+8(%arg_z))
104        __(jmp *%ra0)
1053:      __(mov $three_digit_bignum_header,%imm0)
106        __(Misc_Alloc_Fixed(%arg_z,aligned_bignum_size(3)))
107        __(movq %mm0,misc_data_offset(%arg_z))
108        __(movd %mm1,misc_data_offset+8(%arg_z))
109        __(jmp *%ra0)
110_endfn
111
112/* %imm1:%imm0 constitute an unsigned integer, almost certainly a bignum.
113   Make a lisp integer out of those 128 bits .. */
114_startfn(C(makeu128))
115/* We're likely to have to make a bignum out of the integer in %imm1 and
116   %imm0. We'll need to use %imm0 and %imm1 to cons the bignum, and
117   will need to do some arithmetic (determining significant bigits)
118   on %imm0 and %imm1 in order to know how large that bignum needs to be.
119   Cache %imm0 and %imm1 in %mm0 and %mm1. */
120
121/* If the high word is 0, make an unsigned-byte 64 ... */       
122        __(testq %imm1,%imm1)
123        __(jz _SPmakeu64)
124       
125        __(movd %imm0,%mm0)
126        __(movd %imm1,%mm1)
127
128        __(js 5f)               /* Sign bit set in %imm1. Need 5 digits */
129        __(bsrq %imm1,%imm0)
130        __(rcmpb(%imm0_b,$31))
131        __(jae 4f)              /* Some high bits in %imm1.  Need 4 digits */
132        __(testl %imm1_l,%imm1_l)
133        __(movd %mm0,%imm0)
134        __(jz _SPmakeu64)
135        /* Need 3 digits */
136        __(movq $three_digit_bignum_header,%imm0)
137        __(Misc_Alloc_Fixed(%arg_z,aligned_bignum_size(3)))
138        __(movq %mm0,misc_data_offset(%arg_z))
139        __(movd %mm1,misc_data_offset+8(%arg_z))
140        __(jmp *%ra0)
1414:      __(movq $four_digit_bignum_header,%imm0)
142        __(Misc_Alloc_Fixed(%arg_z,aligned_bignum_size(4)))
143        __(jmp 6f)
1445:      __(movq $five_digit_bignum_header,%imm0)
145        __(Misc_Alloc_Fixed(%arg_z,aligned_bignum_size(5)))
1466:      __(movq %mm0,misc_data_offset(%arg_z))
147        __(movq %mm0,misc_data_offset+8(%arg_z))
148        __(jmpq *%ra0)
149_endfn
150
151_spentry(misc_ref)
152        __(movb $tagmask,%imm0_b)
153        __(andb %arg_y_b,%imm0_b)
154        __(cmpb $tag_misc,%imm0_b)
155        __(je,pt 0f)
156        __(uuo_error_reg_not_tag(Rarg_y,tag_misc))
1570:      __(testb $fixnummask,%arg_z_b)
158        __(je,pt 1f)
159        __(uuo_error_reg_not_fixnum(Rarg_z))
1601:      __(movq %arg_z,%imm0)
161        __(shlq $num_subtag_bits-fixnumshift,%imm0)
162        __(movb $-1,%imm0_b)
163        __(cmpq misc_header_offset(%arg_y),%imm0)
164        __(jb 2f)
165        /* This is a vector bounds trap, which is hard to encode */
166        .byte 0xcd,0xc8,0xf7
1672:      __(movb misc_subtag_offset(%arg_y),%imm1_b)
168_endsubp(misc_ref)
169       
170/* %imm1.b = subtag, %arg_y = uvector, %arg_z = index.
171   Bounds/type-checking done in caller */       
172_startfn(C(misc_ref_common))
173        __(extract_fulltag(%imm1,%imm0))
174        __(cmpb $ivector_class_64_bit,%imm0_b)
175        __(je local_label(misc_ref_64))
176        __(cmpb $ivector_class_32_bit,%imm0_b)
177        __(je local_label(misc_ref_32))
178        __(cmpb $ivector_class_other_bit,%imm0_b)
179        __(je local_label(misc_ref_other))
180        /* Node vector.  Functions are funny: the first  N words
181           are treated as (UNSIGNED-BYTE 64), where N is the low
182           32 bits of the first word. */
183        __(cmpb $subtag_function,%imm1_b)
184        __(jne local_label(misc_ref_node))
185        __(movl misc_data_offset(%arg_y),%imm0_l)
186        __(shl $fixnumshift,%imm0)
187        __(rcmpq(%arg_z,%imm0))
188        __(jl local_label(misc_ref_u64))
189local_label(misc_ref_node):
190        __(movq misc_data_offset(%arg_y,%arg_z),%arg_z)
191        __(jmp *%ra0)
192local_label(misc_ref_u64):
193        __(movq misc_data_offset(%arg_y,%arg_z),%imm0)
194        __(jmp _SPmakeu64)
195local_label(misc_ref_double_float_vector):
196        __(movsd misc_data_offset(%arg_y,%arg_z),%fp1)
197        __(movq $double_float_header,%imm0)
198        __(Misc_Alloc_Fixed(%arg_z,double_float.size))
199        __(movsd %fp1,double_float.value(%arg_z))
200        __(jmp *%ra0)
201local_label(misc_ref_64):
202        __(cmpb $subtag_double_float_vector,%imm1_b)
203        __(je local_label(misc_ref_double_float_vector))
204        __(cmpb $subtag_s64_vector,%imm0_b)
205        __(jne local_label(misc_ref_u64))
206local_label(misc_ref_s64):     
207        __(movq misc_data_offset(%arg_y,%arg_z),%imm0)
208        __(jmp _SPmakes64)
209local_label(misc_ref_u32):
210        __(movl misc_data_offset(%arg_y,%imm0),%imm0_l)
211        __(box_fixnum(%imm0,%arg_z))
212        __(jmp *%ra0)
213local_label(misc_ref_s32):
214        __(movslq misc_data_offset(%arg_y,%imm0),%imm0)
215        __(box_fixnum(%imm0,%arg_z))
216        __(jmp *%ra0)
217local_label(misc_ref_32):
218        __(movq %arg_z,%imm0)
219        __(shr $1,%imm0)
220        __(cmpb $subtag_s32_vector,%imm1_b)
221        __(je local_label(misc_ref_s32))
222        __(cmpb $subtag_single_float_vector,%imm1_b)
223        __(jne local_label(misc_ref_u32))
224local_label(misc_ref_single_float_vector):
225        __(movsd misc_data_offset(%arg_y,%imm0),%fp1)
226        __(movd %fp1,%imm0_l)
227        __(shl $32,%imm0)
228        __(lea subtag_single_float(%imm0),%arg_z)
229        __(jmp *%ra0)
230local_label(misc_ref_other):
231        __(cmpb $subtag_u16_vector,%imm1_b)
232        __(jle local_label(misc_ref_16))
233        __(cmpb $subtag_bit_vector,%imm1_b)
234        __(jz local_label(misc_ref_bit_vector))
235        /* 8-bit case:  string, u8, s8 */
236        __(movq %arg_z,%imm0)
237        __(shr $3,%imm0)
238        __(cmpb $subtag_s8_vector,%imm1_b)
239        __(je local_label(misc_ref_s8))
240        __(jl local_label(misc_ref_string))
241local_label(misc_ref_u8):
242        __(movzbl misc_data_offset(%arg_y,%imm0),%imm0_l)
243        __(box_fixnum(%imm0,%arg_z))
244        __(jmp *%ra0)
245local_label(misc_ref_s8):       
246        __(movsbq misc_data_offset(%arg_y,%imm0),%imm0)
247        __(box_fixnum(%imm0,%arg_z))
248        __(jmp *%ra0)
249local_label(misc_ref_string):
250        __(movzbl misc_data_offset(%arg_y,%imm0),%imm0_l)
251        __(shlq $charcode_shift,%imm0)
252        __(leaq subtag_character(%imm0),%arg_z)
253        __(jmp *%ra0)
254local_label(misc_ref_16):
255        __(movq %arg_z,%imm0)
256        __(shrq $2,%imm0)
257        __(cmpb $subtag_s16_vector,%imm1_b)
258        __(je local_label(misc_ref_s16))
259local_label(misc_ref_u16):     
260        __(movzwl misc_data_offset(%arg_y,%imm0),%imm0_l)
261        __(box_fixnum(%imm0,%arg_z))
262        __(jmp *%ra0)
263local_label(misc_ref_s16):     
264        __(movswq misc_data_offset(%arg_y,%imm0),%imm0)
265        __(box_fixnum(%imm0,%arg_z))
266        __(jmp *%ra0)
267local_label(misc_ref_bit_vector):
268        __(unbox_fixnum(%arg_z,%imm0))
269        __(movl $63,%imm1_l)
270        __(andb %imm0_b,%imm1_b)
271        __(shrq $6,%imm0)
272        __(btq %imm1,misc_data_offset(%arg_y,%imm0,8))
273        __(setc %imm0_b)
274        __(andl $fixnum_one,%imm0_l)
275        __(movq %imm0,%arg_z)
276        __(jmp *%ra0)                   
277_endfn(C(misc_ref_common))
278
279/* like misc_ref, only the boxed subtag is in arg_x.
280*/                                     
281_spentry(subtag_misc_ref)
282        __(movb $tagmask,%imm0_b)
283        __(andb %arg_y_b,%imm0_b)
284        __(cmpb $tag_misc,%imm0_b)
285        __(je,pt 0f)
286        __(uuo_error_reg_not_tag(Rarg_y,tag_misc))
2870:      __(testb $fixnummask,%arg_z_b)
288        __(je,pt 1f)
289        __(uuo_error_reg_not_fixnum(Rarg_z))
2901:      __(movq %arg_z,%imm0)
291        __(shlq $num_subtag_bits-fixnumshift,%imm0)
292        __(subb $1,%imm0_b)
293        __(cmpq misc_header_offset(%arg_y),%imm0)
294        __(jb 2f)
295        /* This is a vector bounds trap, which is hard to encode */
296        .byte 0xcd,0xc8,0xf7
2972:      __(unbox_fixnum(%arg_x,%imm1))
298        __(jmp C(misc_ref_common))
299_endsubp(subtag_misc_ref)
300
301/* ret1valn returns "1 multiple value" when a called function does not */
302/* return multiple values.  Its presence on the stack (as a return address) */
303/* identifies the stack frame to code which returns multiple values. */
304
305_exportfn(C(ret1valn))
306        __(leaveq)
307        __(pop %ra0)
308        __(push %arg_z)
309        __(set_nargs(1))
310        __(jmpq *%ra0)
311_endfn
312       
313        .globl C(popj)
314C(popj):
315
316_spentry(nvalret)
317        .globl C(nvalret)                       
318C(nvalret):     
319        __(ref_global(ret1val_addr,%temp1))
320        __(cmpq lisp_frame.savera0(%rbp),%temp1)
321        __(je 1f)
322        __(testw %nargs,%nargs)
323        __(movzwl %nargs,%nargs_l)
324        __(movl $nil_value,%arg_z_l)
325        __(cmovneq -node_size(%rsp,%nargs_q),%arg_z)
326        __(leaveq)
327        __(popq %ra0)
328        __(jmp *%ra0)
329       
330/* actually need to return values ; always need to copy */
3311:      __(leaq 8(%rbp),%imm1)
332        __(movq (%imm1),%ra0)
333        __(movq 0(%rbp),%rbp)
334        __(leaq (%rsp,%nargs_q),%temp0)
335        __(xorl %imm0_l,%imm0_l)
336        __(jmp 3f)
3372:      __(movq -node_size(%temp0),%temp1)
338        __(subq $node_size,%temp1)
339        __(addq $node_size,%imm0)
340        __(movq %temp1,-node_size(%imm1))
341        __(subq $node_size,%imm1)
3423:      __(cmpw %imm0_w,%nargs)
343        __(jne 2b)
344        __(movq %imm1,%rsp)
345        __(jmp *%ra0)   
346_endsubp(nvalret)
347       
348_spentry(jmpsym)
349        __(jump_fname())
350_endsubp(jmpsym)
351
352_spentry(jmpnfn)
353        __(movq %temp0,%fn)
354        __(jmp *%fn)
355_endsubp(jmpnfn)
356
357_spentry(funcall)
358        __(do_funcall())
359_endsubp(funcall)
360
361_spentry(mkcatch1v)
362        __(Make_Catch(0))
363        __(jmp *%ra0)
364_endsubp(mkcatch1v)
365
366_spentry(mkunwind)
367        __(movq $undefined,%arg_z)
368        __(Make_Catch(fixnumone))
369        __(jmp *%ra0)
370_endsubp(mkunwind)
371
372_spentry(mkcatchmv)
373        __(Make_Catch(fixnumone))
374        __(jmp *%ra0)
375_endsubp(mkcatchmv)
376
377_spentry(throw)
378        __(movq %rcontext:tcr.catch_top,%imm1)
379        __(xorl %imm0_l,%imm0_l)
380        __(movzwl %nargs,%nargs_l)
381        __(movq (%rsp,%nargs_q),%temp0) /* temp0 = tag */
382        __(jmp local_label(_throw_test))
383local_label(_throw_loop):
384        __(cmpq %temp0,catch_frame.catch_tag(%imm1))
385        __(je local_label(_throw_found))
386        __(movq catch_frame.link(%imm1),%imm1)
387        __(addq $fixnum_one,%imm0)
388local_label(_throw_test):
389        __(testq %imm1,%imm1)
390        __(jne local_label(_throw_loop))
391        __(uuo_error_reg_not_tag(Rtemp0,subtag_catch_frame))
392        __(jmp _SPthrow)
393local_label(_throw_found):     
394        __(testb $fulltagmask,catch_frame.mvflag(%imm1))
395        __(jne local_label(_throw_multiple))
396        __(testw %nargs,%nargs)
397        __(movl $nil_value,%arg_z_l)
398        __(je local_label(_throw_one_value))
399        __(movq -node_size(%rsp,%nargs_q),%arg_z)
400        __(add %nargs_q,%rsp)
401local_label(_throw_one_value):
402        __(lea local_label(_threw_one_value)(%rip),%ra0)
403        __(jmp _SPnthrow1value)
404__(tra(local_label(_threw_one_value)))
405        __(movq %rcontext:tcr.catch_top,%temp0)
406        __(movq catch_frame.db_link(%temp0),%imm1)
407        __(cmpq %imm0,%imm1)
408        __(jz local_label(_threw_one_value_dont_unbind))
409        __(push %ra0)
410        __(lea local_label(_threw_one_value_back_from_unbind)(%rip),%ra0)
411        __(jmp _SPunbind_to)
412__(tra(local_label(_threw_one_value_back_from_unbind)))
413        __(pop %ra0)
414local_label(_threw_one_value_dont_unbind):
415        __(movq catch_frame.rbp(%temp0),%rbp)
416        __(movq catch_frame.rsp(%temp0),%rsp)
417        __(movq catch_frame.foreign_sp(%temp0),%imm0)
418        __(movq catch_frame.xframe(%temp0),%imm1)
419        __(movq %imm0,%rcontext:tcr.foreign_sp)
420        __(movq %imm1,%rcontext:tcr.xframe)
421        __(movq catch_frame.link(%temp0),%imm1)
422        __(movq catch_frame._save0(%temp0),%save0)
423        __(movq catch_frame._save1(%temp0),%save1)
424        __(movq catch_frame._save2(%temp0),%save2)
425        __(movq catch_frame._save3(%temp0),%save3)
426        __(movq %imm1,%rcontext:tcr.catch_top)
427        __(movq catch_frame.pc(%temp0),%ra0)
428        __(lea -(tsp_frame.fixed_overhead+fulltag_misc)(%temp0),%imm1)
429        __(movq (%imm1),%tsp)
430        __(movq %tsp,%next_tsp)
431        __(jmp *%ra0)
432local_label(_throw_multiple):
433        __(lea local_label(_threw_multiple)(%rip),%ra0)
434        __(jmp _SPnthrowvalues)
435__(tra(local_label(_threw_multiple)))
436        __(movq %rcontext:tcr.catch_top,%temp0)
437        __(movq catch_frame.db_link(%temp0),%imm0)
438                       
439
440               
441_endsubp(throw)
442
443/* This takes N multiple values atop the vstack. */
444_spentry(nthrowvalues)
445        __(movb $1,%rcontext:tcr.unwinding)
446        __(movzwl %nargs,%nargs_l)
447local_label(_nthrowv_nextframe):
448        __(subq $fixnumone,%imm0)
449        __(js local_label(_nthrowv_done))
450        __(movd %imm0,%mm1)
451        __(movq %rcontext:tcr.catch_top,%temp0)
452        __(movq catch_frame.link(%temp0),%imm1)
453        __(movq catch_frame.db_link(%temp0),%imm0)
454        __(movq %imm1,%rcontext:tcr.catch_top)
455        __(cmpq %imm0,%rcontext:tcr.db_link)
456        __(jz local_label(_nthrowv_dont_unbind))
457        __(push %ra0)
458        __(leaq local_label(_nthrowv_back_from_unbind)(%rip),%ra0)
459        __(jmp _SPunbind_to)
460__(tra(local_label(_nthrowv_back_from_unbind)))
461
462        __(pop %ra0)
463local_label(_nthrowv_dont_unbind):
464        __(cmpb $unbound_marker,catch_frame.catch_tag(%temp0))
465        __(je local_label(_nthrowv_do_unwind))
466/* A catch frame.  If the last one, restore context from there. */
467        __(movd %mm1,%imm0)
468        __(testq %imm0,%imm0)   /* last catch frame ? */
469        __(jz local_label(_nthrowv_skip))
470        __(movq catch_frame.xframe(%temp0),%save0)
471        __(movq %save0,%rcontext:tcr.xframe)
472        __(leaq (%rsp,%nargs_q),%save1)
473        __(movq catch_frame.rsp(%temp0),%save2)
474        __(movq %nargs_q,%save0)
475        __(jmp local_label(_nthrowv_push_test))
476local_label(_nthrowv_push_loop):
477        __(subq $node_size,%save1)
478        __(subq $node_size,%save2)
479        __(movq (%save1),%temp1)
480        __(movq %temp1,(%save2))
481local_label(_nthrowv_push_test):
482        __(subq $node_size,%save0)
483        __(jns local_label(_nthrowv_push_loop))
484        __(movq %save2,%rsp)
485        __(movq catch_frame.rbp(%temp0),%rbp)
486        __(movq catch_frame._save3(%temp0),%save3)
487        __(movq catch_frame._save2(%temp0),%save2)
488        __(movq catch_frame._save1(%temp0),%save1)
489        __(movq catch_frame._save0(%temp0),%save0)
490        __(movq catch_frame.foreign_sp(%temp0),%imm1)
491        __(movq %imm1,%rcontext:tcr.foreign_sp)
492local_label(_nthrowv_skip):     
493        __(lea -(tsp_frame.fixed_overhead+fulltag_misc)(%temp0),%imm1)
494        __(movd %imm1,%tsp)
495        __(movd %imm1,%next_tsp)
496        __(movd %mm1,%imm0)
497        __(jmp local_label(_nthrowv_nextframe))
498local_label(_nthrowv_do_unwind):       
499/* This is harder.  Call the cleanup code with the multiple values and
500    nargs, the throw count, and the caller's return address in a temp
501    stack frame. */
502        __(movq catch_frame.xframe(%temp0),%save0)
503        __(movq %save0,%rcontext:tcr.xframe)
504        __(leaq (%rsp,%nargs_q),%save1)
505        __(push catch_frame._save0(%temp0))
506        __(push catch_frame._save1(%temp0))
507        __(push catch_frame._save2(%temp0))
508        __(push catch_frame._save3(%temp0))
509        __(push catch_frame.pc(%temp0))
510        __(movq catch_frame.rbp(%temp0),%rbp)
511        __(movq catch_frame.rsp(%temp0),%temp1)
512        __(movq catch_frame.foreign_sp(%temp0),%imm0)
513        __(movq %imm0,%rcontext:tcr.foreign_sp)
514        /* Discard the catch frame, so we can build a temp frame */
515        __(lea -(tsp_frame.fixed_overhead+fulltag_misc)(%temp0),%imm1)
516        __(movd %imm1,%tsp)
517        __(movd %imm1,%next_tsp)
518        /* tsp overhead, nargs, throw count, ra0 */
519        __(dnode_align(%nargs_q,(tsp_frame.fixed_overhead+(3*node_size)),%imm0))
520        __(TSP_Alloc_Var(%imm0,%imm1))
521        __(movq %nargs_q,%temp0)
522        __(movq %nargs_q,(%imm1))
523        __(movq %ra0,node_size(%imm1))
524        __(movq %mm1,node_size*2(%imm1))
525        __(leaq node_size*3(%imm1),%imm1)
526        __(jmp local_label(_nthrowv_tpushtest))
527local_label(_nthrowv_tpushloop):
528        __(movq -node_size(%save0),%temp0)
529        __(subq $node_size,%save0)
530        __(movq %temp0,(%imm1))
531        __(addq $node_size,%imm1)
532local_label(_nthrowv_tpushtest):
533        __(subw $node_size,%nargs)
534        __(jns local_label(_nthrowv_tpushloop))
535        __(pop %xfn)
536        __(pop %save3)
537        __(pop %save2)
538        __(pop %save1)
539        __(pop %save0)
540        __(movq %temp1,%rsp)
541/* Ready to call cleanup code: set up tra, jmp to %xfn */
542        __(leaq local_label(_nthrowv_called_cleanup)(%rip),%ra0)
543        __(movb $0,%rcontext:tcr.unwinding)
544        __(jmp *%xfn)
545__(tra(local_label(_nthrowv_called_cleanup)))
546
547        __(movb $1,%rcontext:tcr.unwinding)
548        __(movd %tsp,%imm1)
549        __(movq tsp_frame.data_offset+(0*node_size)(%imm1),%nargs_q)
550        __(movq tsp_frame.data_offset+(1*node_size)(%imm1),%ra0)
551        __(movq tsp_frame.data_offset+(2*node_size)(%imm1),%mm1)
552        __(movq %nargs_q,%imm0)
553        __(leaq node_size*3(%imm1),%imm1)
554        __(jmp local_label(_nthrowv_tpoptest))
555local_label(_nthrowv_tpoploop):
556        __(push (%imm1))
557        __(addq $node_size,%imm1)
558local_label(_nthrowv_tpoptest):
559        __(subq $node_size,%imm0)
560        __(jns local_label(_nthrowv_tpoploop))
561        __(movd %tsp,%imm1)
562        __(movq (%imm1),%imm1)
563        __(movd %imm1,%tsp)
564        __(movd %imm1,%next_tsp)
565        __(movd %mm1,%temp0)
566        __(jmp local_label(_nthrowv_nextframe))
567local_label(_nthrowv_done):
568        __(movb $0,%rcontext:tcr.unwinding)
569        __(movq %rcontext:tcr.tlb_pointer,%imm0)
570        __(cmpq $0,INTERRUPT_LEVEL_BINDING_INDEX(%imm1))
571        __(js local_label(_nthrowv_return))
572        __(cmpq $0,%rcontext:tcr.interrupt_pending)
573        __(je local_label(_nthrowv_return))
574        __(interrupt_now())
575local_label(_nthrowv_return):   
576        __(jmp *%ra0)   
577_endsubp(nthrowvalues)
578
579/* This is a (slight) optimization.  When running an unwind-protect,
580   save the single value and the throw count in the tstack frame.
581   Note that this takes a single value in arg_z. */
582_spentry(nthrow1value)
583        __(movb $1,%rcontext:tcr.unwinding)
584        __(movzwl %nargs,%nargs_l)
585local_label(_nthrow1v_nextframe):
586        __(subq $fixnumone,%imm0)
587        __(js local_label(_nthrow1v_done))
588        __(movd %imm0,%mm1)
589        __(movq %rcontext:tcr.catch_top,%temp0)
590        __(movq catch_frame.link(%temp0),%imm1)
591        __(movq catch_frame.db_link(%temp0),%imm0)
592        __(movq %imm1,%rcontext:tcr.catch_top)
593        __(cmpq %imm0,%rcontext:tcr.db_link)
594        __(jz local_label(_nthrow1v_dont_unbind))
595        __(push %ra0)
596        __(leaq local_label(_nthrow1v_back_from_unbind)(%rip),%ra0)
597        __(jmp _SPunbind_to)
598__(tra(local_label(_nthrow1v_back_from_unbind)))
599
600        __(pop %ra0)
601local_label(_nthrow1v_dont_unbind):
602        __(cmpb $unbound_marker,catch_frame.catch_tag(%temp0))
603        __(je local_label(_nthrow1v_do_unwind))
604/* A catch frame.  If the last one, restore context from there. */
605        __(movd %mm1,%imm0)
606        __(testq %imm0,%imm0)   /* last catch frame ? */
607        __(jz local_label(_nthrow1v_skip))
608        __(movq catch_frame.xframe(%temp0),%save0)
609        __(movq %save0,%rcontext:tcr.xframe)
610        __(leaq (%rsp,%nargs_q),%save1)
611        __(movq catch_frame.rsp(%temp0),%save2)
612        __(movq %nargs_q,%save0)
613        __(jmp local_label(_nthrow1v_push_test))
614local_label(_nthrow1v_push_loop):
615        __(subq $node_size,%save1)
616        __(subq $node_size,%save2)
617        __(movq (%save1),%temp1)
618        __(movq %temp1,(%save2))
619local_label(_nthrow1v_push_test):
620        __(subq $node_size,%save0)
621        __(jns local_label(_nthrow1v_push_loop))
622        __(movq %save2,%rsp)
623        __(movq catch_frame.rbp(%temp0),%rbp)
624        __(movq catch_frame._save3(%temp0),%save3)
625        __(movq catch_frame._save2(%temp0),%save2)
626        __(movq catch_frame._save1(%temp0),%save1)
627        __(movq catch_frame._save0(%temp0),%save0)
628        __(movq catch_frame.foreign_sp(%temp0),%imm1)
629        __(movq %imm1,%rcontext:tcr.foreign_sp)
630local_label(_nthrow1v_skip):   
631        __(lea -(tsp_frame.fixed_overhead+fulltag_misc)(%temp0),%imm1)
632        __(movd %imm1,%tsp)
633        __(movd %imm1,%next_tsp)
634        __(movd %mm1,%imm0)
635        __(jmp local_label(_nthrow1v_nextframe))
636local_label(_nthrow1v_do_unwind):
637/* This is harder, but not as hard (not as much BLTing) as the
638   multiple-value case. */
639        __(movq catch_frame.xframe(%temp0),%save0)
640        __(movq %save0,%rcontext:tcr.xframe)
641        __(movq catch_frame._save0(%temp0),%save0)
642        __(movq catch_frame._save1(%temp0),%save1)
643        __(movq catch_frame._save2(%temp0),%save2)
644        __(movq catch_frame._save3(%temp0),%save3)
645        __(movq catch_frame.pc(%temp0),%xfn)
646        __(movq catch_frame.rbp(%temp0),%rbp)
647        __(movq catch_frame.rsp(%temp0),%rsp)
648        __(movq catch_frame.foreign_sp(%temp0),%imm0)
649        __(movq %imm0,%rcontext:tcr.foreign_sp)
650        /* Discard the catch frame, so we can build a temp frame */
651        __(lea -(tsp_frame.fixed_overhead+fulltag_misc)(%temp0),%imm1)
652        __(movd %imm1,%tsp)
653        __(movd %imm1,%next_tsp)
654        /* tsp overhead, throw count, ra0, arg_z */
655        __(dnode_align(%nargs_q,(tsp_frame.fixed_overhead+(3*node_size)),%imm0))
656        __(TSP_Alloc_Fixed((2*node_size),%imm1))
657        __(addq $tsp_frame.fixed_overhead,%imm1)
658        __(movq %ra0,(%imm1))
659        __(movq %mm1,node_size*1(%imm1))
660        __(movq %arg_z,node_size*2(%imm1))
661/* Ready to call cleanup code: set up tra, jmp to %xfn */
662        __(leaq local_label(_nthrow1v_called_cleanup)(%rip),%ra0)
663        __(movb $0,%rcontext:tcr.unwinding)
664        __(jmp *%xfn)
665__(tra(local_label(_nthrow1v_called_cleanup)))
666
667        __(movb $1,%rcontext:tcr.unwinding)
668        __(movd %tsp,%imm1)
669        __(movq tsp_frame.data_offset+(0*node_size)(%imm1),%ra0)
670        __(movq tsp_frame.data_offset+(1*node_size)(%imm1),%mm1)
671        __(movq tsp_frame.data_offset+(2+node_size)(%imm1),%arg_z)
672        __(movd %tsp,%imm1)
673        __(movq (%imm1),%imm1)
674        __(movd %imm1,%tsp)
675        __(movd %imm1,%next_tsp)
676        __(movd %mm1,%temp0)
677        __(jmp local_label(_nthrow1v_nextframe))
678local_label(_nthrow1v_done):
679        __(movb $0,%rcontext:tcr.unwinding)
680        __(movq %rcontext:tcr.tlb_pointer,%imm0)
681        __(cmpq $0,INTERRUPT_LEVEL_BINDING_INDEX(%imm1))
682        __(js local_label(_nthrow1v_return))
683        __(cmpq $0,%rcontext:tcr.interrupt_pending)
684        __(je local_label(_nthrow1v_return))
685        __(interrupt_now())
686local_label(_nthrow1v_return): 
687        __(jmp *%ra0)   
688_endsubp(nthrow1value)
689
690/* This never affects the symbol's vcell */
691/* Non-null symbol in arg_y, new value in arg_z */       
692       
693_spentry(bind)
694        __(movq symbol.binding_index(%arg_y),%temp0)
695        __(cmpq %rcontext:tcr.tlb_limit,%temp0)
696        __(jb,pt 0f)
697        __(tlb_too_small())
6980:      __(testq %temp0,%temp0)
699        __(jz 9f)
700        __(movq %rcontext:tcr.tlb_pointer,%temp1)
701        __(push (%temp1,%temp0))
702        __(push %temp0)
703        __(push %rcontext:tcr.db_link)
704        __(movq %arg_z,(%temp1,%temp0))
705        __(movq %rsp,%rcontext:tcr.db_link)
706        __(jmp *%ra0)
7079:     
708        __(movq %arg_y,%arg_z)
709        __(movq $XSYMNOBIND,%arg_y)
710        __(set_nargs(2))
711        __(jmp _SPksignalerr)   
712_endsubp(bind)
713
714/* arg_z = symbol: bind it to its current value */       
715_spentry(bind_self)
716        __(movq symbol.binding_index(%arg_z),%temp0)
717        __(cmpq %rcontext:tcr.tlb_limit,%temp0)
718        __(jb,pt 0f)
719        __(tlb_too_small())
7200:      __(testq %temp0,%temp0)
721        __(jz 9f)
722        __(movq %rcontext:tcr.tlb_pointer,%temp1)
723        __(cmpb $no_thread_local_binding_marker,(%temp0,%temp1))
724        __(jz 2f)
725        __(push (%temp1,%temp0))
726        __(push %temp0)
727        __(push %rcontext:tcr.db_link)
728        __(movq %rsp,%rcontext:tcr.db_link)
729        __(jmp *%ra0)
7302:      __(movq symbol.vcell(%arg_z),%arg_y)
731        __(push (%temp1,%temp0))
732        __(push %temp0)
733        __(push %rcontext:tcr.db_link)
734        __(movq %arg_y,(%temp1,%temp0))
735        __(movq %rsp,%rcontext:tcr.db_link)
736        __(jmp *%ra0)
7379:      __(movq $XSYMNOBIND,%arg_y)
738        __(set_nargs(2))
739        __(jmp _SPksignalerr)
740_endsubp(bind_self)
741
742_spentry(bind_nil)
743        __(movq symbol.binding_index(%arg_z),%temp0)
744        __(cmpq %rcontext:tcr.tlb_limit,%temp0)
745        __(jb,pt 0f)
746        __(tlb_too_small())
7470:      __(testq %temp0,%temp0)
748        __(jz 9f)
749        __(movq %rcontext:tcr.tlb_pointer,%temp1)
750        __(push (%temp1,%temp0))
751        __(push %temp0)
752        __(push %rcontext:tcr.db_link)
753        __(movq $nil_value,(%temp0,%temp1))
754        __(movq %rsp,%rcontext:tcr.db_link)
755        __(jmp *%ra0)
7569:      __(movq $XSYMNOBIND,%arg_y)
757        __(set_nargs(2))
758        __(jmp _SPksignalerr)
759_endsubp(bind_nil)
760
761_spentry(bind_self_boundp_check)
762        __(movq symbol.binding_index(%arg_z),%temp0)
763        __(cmpq %rcontext:tcr.tlb_limit,%temp0)
764        __(jb,pt 0f)
765        __(tlb_too_small())
7660:      __(testq %temp0,%temp0)
767        __(jz 9f)
768        __(movq %rcontext:tcr.tlb_pointer,%temp1)
769        __(cmpb $no_thread_local_binding_marker,(%temp1,%temp0))
770        __(je 2f)
771        __(cmpb $unbound_marker,(%temp1,%temp0))
772        __(je 8f)
773        __(push (%temp1,%temp0))
774        __(push %temp0)
775        __(push %rcontext:tcr.db_link)
776        __(movq %rsp,%rcontext:tcr.db_link)
777        __(jmp *%ra0)
7782:      __(movq symbol.vcell(%arg_z),%arg_y)
779        __(cmpb $unbound_marker,%arg_y_b)
780        __(jz 8f)
781        __(push (%temp1,%temp0))
782        __(push %temp0)
783        __(push %rcontext:tcr.db_link)
784        __(movq %arg_y,(%temp1,%temp0))
785        __(movq %rsp,%rcontext:tcr.db_link)
786        __(jmp *%ra0)
7878:      __(uuo_error_reg_unbound(Rarg_z))
788       
7899:      __(movq $XSYMNOBIND,%arg_y)
790        __(set_nargs(2))
791        __(jmp _SPksignalerr)
792_endsubp(bind_self_boundp_check)
793
794_spentry(conslist)
795        __(movl $nil_value,%arg_z_l)
796        __(testw %nargs,%nargs)
797        __(jmp 2f)
7981:      __(pop %arg_y)
799        __(Cons(%arg_y,%arg_z,%arg_z))
800        __(subw $node_size,%nargs)
8012:      __(jnz 1b)
802        __(jmp *%ra0)           
803_endsubp(conslist)
804
805/* do list*: last arg in arg_z, all others pushed, nargs set to #args pushed.*/
806/* Cons, one cons cell at at time.  Maybe optimize this later. */
807_spentry(conslist_star)
808        __(testw %nargs,%nargs)
809        __(jmp 2f)
8101:      __(pop %arg_y)
811        __(Cons(%arg_y,%arg_z,%arg_z))
812        __(subw $node_size,%nargs)
8132:      __(jnz 1b)
814        __(jmp *%ra0)           
815_endsubp(conslist_star)
816
817_spentry(stkconslist)
818_endsubp(stkconslist)
819
820_spentry(stkconslist_star)
821_endsubp(stkconslist_star)
822
823_spentry(mkstackv)
824_endsubp(mkstackv)
825
826       
827        .globl C(egc_write_barrier_start)
828C(egc_write_barrier_start):
829/*
830   The function pc_luser_xp() - which is used to ensure that suspended threads
831   are suspended in a GC-safe way - has to treat these subprims (which implement
832   the EGC write-barrier) specially.  Specifically, a store that might introduce
833   an intergenerational reference (a young pointer stored in an old object) has
834   to "memoize" that reference by setting a bit in the global "refbits" bitmap.
835   This has to happen atomically, and has to happen atomically wrt GC.
836
837   Note that updating a word in a bitmap is itself not atomic, unless we use
838   interlocked loads and stores.
839*/
840
841/*
842  For RPLACA and RPLACD, things are fairly simple: regardless of where we are
843  in the function, we can do the store (even if it's already been done) and
844  calculate whether or not we need to set the bit out-of-line.  (Actually
845  setting the bit needs to be done atomically, unless we're sure that other
846  threads are suspended.)
847  We can unconditionally set the suspended thread's RIP to its RA0.
848*/
849       
850_spentry(rplaca)
851        .globl C(egc_rplaca)
852C(egc_rplaca):         
853_endsubp(rplaca)
854
855_spentry(rplacd)
856        .globl C(egc_rplacd)
857C(egc_rplacd):         
858_endsubp(rplacd)
859
860/*
861  Storing into a gvector can be handled the same way as storing into a CONS.
862*/
863
864_spentry(gvset)
865        .globl C(egc_gvset)
866C(egc_gvset):
867_endsubp(gvset)
868
869/* This is a special case of storing into a gvector: if we need to memoize the store,
870   record the address of the hash-table vector in the refmap, as well.
871*/       
872
873_spentry(set_hash_key)
874        .globl C(egc_set_hash_key)
875C(egc_set_hash_key): 
876_endsubp(set_hash_key)
877
878/*
879  This is a little trickier: the first instruction clears the EQ bit in CR0; the only
880  way that it can get set is if the conditional store succeeds.  So:
881  a) if we're interrupted on the first instruction, or if we're interrupted on a subsequent
882     instruction but CR0[EQ] is clear, the condtional store hasn't succeeded yet.  We don't
883     have to adjust the PC in this case; when the thread's resumed, the conditional store
884     will be (re-)attempted and will eventually either succeed or fail.
885  b) if the CR0[EQ] bit is set (on some instruction other than the first), the handler can
886     decide if/how to handle memoization.  The handler should set the PC to the LR, and
887     set arg_z to T.
888*/
889
890_spentry(store_node_conditional)
891        .globl C(egc_store_node_conditional)
892C(egc_store_node_conditional): 
893       .globl C(egc_write_barrier_end)
894C(egc_write_barrier_end):
895_endsubp(store_node_conditional)
896                               
897_spentry(setqsym)
898        __(btq $sym_vbit_const,symbol.flags(%arg_y))
899        __(je _SPspecset)
900        __(movq %arg_y,%arg_z)
901        __(movq $XCONST,%arg_y)
902        __(set_nargs(2))
903        __(jmp _SPksignalerr)
904_endsubp(setqsym)
905
906_spentry(progvsave)
907        __(int $3)
908_endsubp(progvsave)
909
910_spentry(stack_misc_alloc)
911_endsubp(stack_misc_alloc)
912
913/* subtype (boxed, of course) is pushed, followed by nargs bytes worth of
914   initial-contents.  Note that this can be used to cons any type of initialized
915   node-header'ed misc object (symbols, closures, ...) as well as vector-like
916   objects.
917   Note that we're guaranteed to win (or force GC, or run out of memory)
918   because nargs < 32K. */     
919_spentry(gvector)
920        __(movzwl %nargs,%nargs_l)
921        __(movq (%rsp,%nargs_q),%imm0)  /* boxed subtype */
922        __(sarq $fixnumshift,%imm0)
923        __(movq %nargs_q,%imm1)
924        __(shlq $num_subtag_bits-word_shift,%imm1)
925        __(orq %imm1,%imm0)
926        __(dnode_align(%nargs_q,node_size,%imm1))
927        __(Misc_Alloc(%arg_z))
928        __(movq %nargs_q,%imm1)
929        __(jmp 2f)
9301:      __(movq %temp0,misc_data_offset(%arg_z,%imm1))
9312:      __(subq $node_size,%imm1)
932        __(pop %temp0)  /* Note the intentional fencepost:
933                           discard the subtype as well.*/
934        __(jge 1b)
935        __(jmp *%ra0)
936_endsubp(gvector)
937
938_spentry(mvpass)
939        __(int $3)
940_endsubp(mvpass)
941
942_spentry(fitvals)
943        __(int $3)
944_endsubp(fitvals)
945
946/* N pushed %nargs values. */
947_spentry(nthvalue)
948        __(movzwl %nargs,%nargs_l)
949        __(leaq (%rsp,%nargs_q),%imm0)
950        __(movq (%imm0),%imm1) 
951        __(movl $nil_value,%arg_z_l)
952        __(cmpq %imm1,%nargs_q)
953        __(jb 1f)
954        __(neg %imm1)
955        __(movq -node_size(%imm0,%imm1),%arg_z)
9561:      __(leaq node_size(%imm0),%rsp)
957        __(jmp *%ra0)   
958_endsubp(nthvalue)
959
960_spentry(values)
961_endsubp(values)
962
963_spentry(default_optional_args)
964_endsubp(default_optional_args)
965
966_spentry(opt_supplied_p)
967_endsubp(opt_supplied_p)
968
969_spentry(heap_rest_arg)
970        __(push_argregs())
971        __(movzwl %nargs,%nargs_l)
972        __(movl %nargs_l,%imm1_l)
973        __(testl %imm1_l,%imm1_l)
974        __(movl $nil_value,%arg_z_l)
975        __(jmp 2f)
976        .p2align 4
9771:      __(pop %temp1)
978        __(Cons(%temp1,%arg_z,%arg_z))
979        __(subl $node_size,%imm1_l)
9802:      __(jg 1b)
981        __(push %arg_z)
982        __(jmp *%ra0)           
983_endsubp(heap_rest_arg)
984
985/* %imm0 contains the number of fixed args ; make an &rest arg out of the others */
986_spentry(req_heap_rest_arg)
987        __(push_argregs())
988        __(movzwl %nargs,%nargs_l)
989        __(movl %nargs_l,%imm1_l)
990        __(subl %imm0_l,%imm1_l)
991        __(movl $nil_value,%arg_z_l)
992        __(jmp 2f)
993        .p2align 4
9941:      __(pop %temp1)
995        __(Cons(%temp1,%arg_z,%arg_z))
996        __(subl $node_size,%imm1_l)
9972:      __(jg 1b)
998        __(push %arg_z)
999        __(jmp *%ra0)           
1000_endsubp(req_heap_rest_arg)
1001
1002/* %imm0 bytes of stuff has already been pushed ; make an &rest out of any others */
1003_spentry(heap_cons_rest_arg)
1004        __(movzwl %nargs,%nargs_l)
1005        __(movl %nargs_l,%imm1_l)
1006        __(subl %imm0_l,%imm1_l)
1007        __(movl $nil_value,%arg_z_l)
1008        __(jmp 2f)
1009        .p2align 4
10101:      __(pop %temp1)
1011        __(Cons(%temp1,%arg_z,%arg_z))
1012        __(subl $node_size,%imm1_l)
10132:      __(jg 1b)
1014        __(push %arg_z)
1015        __(jmp *%ra0)           
1016_endsubp(heap_cons_rest_arg)
1017
1018_spentry(simple_keywords)
1019_endsubp(simple_keywords)
1020
1021_spentry(keyword_args)
1022_endsubp(keyword_args)
1023
1024_spentry(keyword_bind)
1025_endsubp(keyword_bind)
1026
1027_spentry(poweropen_ffcall)
1028_endsubp(poweropen_ffcall)
1029
1030_spentry(unused_0)
1031_endsubp(unused_0)
1032
1033_spentry(ksignalerr)
1034        __(movq $nrs.errdisp,%fname)
1035        __(jump_fname) 
1036_endsubp(ksignalerr)
1037
1038_spentry(stack_rest_arg)
1039        __(xorl %imm0_l,%imm0_l)
1040        __(push_argregs())
1041        __(jmp _SPstack_cons_rest_arg)
1042_endsubp(stack_rest_arg)
1043
1044_spentry(req_stack_rest_arg)
1045        __(push_argregs())
1046        __(jmp _SPstack_cons_rest_arg)
1047_endsubp(req_stack_rest_arg)
1048
1049_spentry(stack_cons_rest_arg)
1050        __(movzwl %nargs,%nargs_l)
1051        __(movl %nargs_l,%imm1_l)
1052        __(subl %imm0_l,%imm1_l)
1053        __(movl $nil_value,%arg_z_l)
1054        __(je 2f)       /* empty list ; make an empty TSP frame */
1055        __(cmpq $(tstack_alloc_limit-dnode_size)/2,%imm1)
1056        __(ja 3f)       /* make empty frame, then heap-cons */
1057        __(addq %imm1,%imm1)
1058        __(dnode_align(%imm1,tsp_frame.fixed_overhead,%imm0))
1059        __(TSP_Alloc_Var(%imm0,%temp0))
1060        __(addq $tsp_frame.fixed_overhead+fulltag_cons,%temp0)
10611:      __(pop %arg_x)
1062        __(_rplacd(%temp0,%arg_z))
1063        __(_rplaca(%temp0,%arg_x))
1064        __(movq %temp0,%arg_z)
1065        __(addq $cons.size,%temp0)
1066        __(subq $dnode_size,%imm1)
1067        __(jne 1b)
1068        __(push %arg_z)
1069        __(jmp *%ra0)
1070/* Length 0, make empty frame */       
10712:
1072        __(TSP_Alloc_Fixed(0,%temp0))
1073        __(push %arg_z)
1074        __(jmp *%ra0)
1075/* Too big to stack-cons, but make an empty frame before heap-consing */
10763:             
1077        __(TSP_Alloc_Fixed(0,%temp0))
1078        __(jmp _SPheap_cons_rest_arg)
1079_endsubp(stack_cons_rest_arg)
1080
1081_spentry(poweropen_callbackX)
1082_endsubp(poweropen_callbackX)
1083
1084/* Prepend all but the first three (2 words of code, inner fn) and last two */
1085/* (function name, lfbits) elements of %fn to the "arglist". */
1086_spentry(call_closure)
1087        __(subq $fulltag_function-fulltag_misc,%fn)
1088        __(header_length(%fn,%imm0))
1089        __(movzwl %nargs,%nargs_l)
1090        __(subq $5<<fixnumshift,%imm0)  /* imm0 = inherited arg count */
1091        __(cmpw $nargregs<<fixnumshift,%nargs)
1092        __(jna,pt local_label(no_insert))
1093       
1094        /* Some arguments have already been pushed.  Push imm0's worth */
1095        /* of NILs, copy those arguments that have already been vpushed from */
1096        /* the old TOS to the new, then insert all of the inerited args */
1097        /* and go to the function. */
1098        __(movq %imm0,%imm1)
1099local_label(push_nil_loop):     
1100        __(push $nil_value)
1101        __(sub $fixnumone,%imm1)
1102        __(jne local_label(push_nil_loop))
1103        /* Need to use arg regs as temporaries here.  */
1104        __(movq %rsp,%temp0)
1105        __(push %arg_z)
1106        __(push %arg_y)
1107        __(push %arg_x)
1108        __(lea (%rsp,%imm0),%arg_x)
1109        __(lea -nargregs<<fixnumshift(%nargs_q),%arg_y)
1110local_label(copy_already_loop):
1111        __(movq (%arg_x),%arg_z)
1112        __(addq $fixnumone,%arg_x)
1113        __(movq %arg_z,(%temp0))
1114        __(addq $fixnumone,%temp0)
1115        __(subq $fixnumone,%arg_y)
1116        __(jne local_label(copy_already_loop))
1117
1118        __(movl $3<<fixnumshift,%imm1_l) /* skip code, new fn */
1119local_label(insert_loop):               
1120        __(movq misc_data_offset(%fn,%imm1),%arg_z)
1121        __(addq $node_size,%imm1)
1122        __(addw $fixnum_one,%nargs)
1123        __(subq $node_size,%arg_x)
1124        __(movq %arg_z,(%arg_x))
1125        __(subq $fixnum_one,%imm0)
1126        __(jne local_label(insert_loop))
1127
1128        /* Recover the argument registers, pushed earlier */
1129        __(pop %arg_x)
1130        __(pop %arg_y)
1131        __(pop %arg_z)
1132        __(jmp local_label(go))
1133
1134        /* Here if nothing was pushed by the caller.  If we're
1135           going to push anything, we have to reserve a stack
1136           frame first. (We'll need to push something if the
1137           sum of %nargs and %imm0 is greater than nargregs */
1138local_label(no_insert):
1139        __(lea (%nargs_q,%imm0),%imm1)
1140        __(cmpq $nargregs<<fixnumshift,%imm1)
1141        __(jna local_label(no_insert_no_frame))
1142        /* Reserve space for a stack frame */
1143        __(push $0)
1144        __(push $0)
1145local_label(no_insert_no_frame):       
1146        /* nargregs or fewer args were already vpushed. */
1147        /* if exactly nargregs, vpush remaining inherited vars. */
1148        __(cmpw $nargregs<<fixnumshift,%nargs)
1149        __(movl $3<<fixnumshift,%imm1_l) /* skip code, new fn */
1150        __(leaq 3<<fixnumshift(%imm0),%temp0)
1151        __(jnz local_label(set_regs))
1152local_label(vpush_remaining): 
1153        __(push misc_data_offset(%fn,%imm1))
1154        __(addq $node_size,%imm1)
1155        __(addw $fixnumone,%nargs)
1156        __(subq $node_size,%imm0)
1157        __(jnz local_label(vpush_remaining))
1158        __(jmp local_label(go))
1159local_label(set_regs):
1160        /* if nargs was > 1 (and we know that it was < 3), it must have */
1161        /* been 2.  Set arg_x, then vpush the remaining args. */
1162        __(cmpw $fixnumone,%nargs)
1163        __(jle local_label(set_y_z))
1164local_label(set_arg_x):
1165        __(subq $node_size,%temp0)
1166        __(movq misc_data_offset(%fn,%temp0),%arg_x)
1167        __(addw $fixnumone,%nargs)
1168        __(subq $fixnumone,%imm0)
1169        __(jne local_label(vpush_remaining))
1170        __(jmp local_label(go))
1171        /* Maybe set arg_y or arg_z, preceding args */
1172local_label(set_y_z):
1173        __(jne local_label(set_arg_z))
1174        /* Set arg_y, maybe arg_x, preceding args */
1175local_label(set_arg_y):
1176        __(subq $node_size,%temp0)
1177        __(movq misc_data_offset(%fn,%temp0),%arg_y)
1178        __(addw $fixnumone,%nargs)
1179        __(subq $fixnum_one,%imm0)
1180        __(jnz local_label(set_arg_x))
1181        __(jmp local_label(go))
1182local_label(set_arg_z):
1183        __(subq $node_size,%temp0)
1184        __(movq misc_data_offset(%fn,%temp0),%arg_z)
1185        __(addw $fixnumone,%nargs)
1186        __(subq $fixnum_one,%imm0)
1187        __(jne local_label(set_arg_y))
1188       
1189local_label(go):       
1190        __(movq misc_data_offset+(2*node_size)(%fn),%fn)
1191        __(jmp *%fn)               
1192       
1193_endsubp(call_closure)
1194
1195_spentry(getxlong)
1196_endsubp(getxlong)
1197
1198_spentry(spreadargz)
1199_endsubp(spreadargz)
1200
1201_spentry(tfuncallgen)
1202_endsubp(tfuncallgen)
1203
1204_spentry(tfuncallslide)
1205_endsubp(tfuncallslide)
1206
1207_spentry(tfuncallvsp)
1208_endsubp(tfuncallvsp)
1209
1210_spentry(tcallsymgen)
1211_endsubp(tcallsymgen)
1212
1213_spentry(tcallsymslide)
1214_endsubp(tcallsymslide)
1215
1216_spentry(tcallsymvsp)
1217_endsubp(tcallsymvsp)
1218
1219_spentry(tcallnfngen)
1220_endsubp(tcallnfngen)
1221
1222_spentry(tcallnfnslide)
1223_endsubp(tcallnfnslide)
1224
1225_spentry(tcallnfnvsp)
1226_endsubp(tcallnfnvsp)
1227
1228
1229_spentry(misc_set)
1230_endsubp(misc_set)
1231
1232_spentry(stkconsyz)
1233_endsubp(stkconsyz)
1234
1235_spentry(stkvcell0)
1236        __(int $3)
1237_endsubp(stkvcell0)
1238
1239_spentry(stkvcellvsp)
1240        __(int $3)
1241_endsubp(stkvcellvsp)
1242
1243/* Make a "raw" area on the foreign stack, stack-cons a macptr to point to it,
1244   and return the macptr.  Size (in bytes, boxed) is in arg_z on entry; macptr
1245   in arg_z on exit. */
1246_spentry(makestackblock)
1247        __(unbox_fixnum(%arg_z,%imm0))
1248        __(dnode_align(%imm0,tsp_frame.fixed_overhead+macptr.size,%imm0))
1249        __(cmpq $tstack_alloc_limit,%imm0)
1250        __(jae 1f)
1251        __(movd %foreign_sp,%arg_z)
1252        __(subq %imm0,%arg_z)
1253        __(movq %foreign_sp,(%arg_z))
1254        __(movd %arg_z,%foreign_sp)
1255        __(lea macptr.size+tsp_frame.fixed_overhead(%arg_z),%imm0)
1256        __(movq $macptr_header,tsp_frame.fixed_overhead(%arg_z))
1257        __(addq fulltag_misc+tsp_frame.fixed_overhead,%arg_z)
1258        __(movq %imm0,macptr.address(%arg_z))
1259        __(movss %fp0,macptr.domain(%arg_z))
1260        __(movss %fp0,macptr.type(%arg_z))
1261        __(jmp *%ra0)
12621:      __(movd %foreign_sp,%imm0)
1263        __(subq $dnode_size,%imm0)
1264        __(movq %foreign_sp,(%imm0))
1265        __(movd %imm0,%foreign_sp)
1266        __(set_nargs(1))
1267        __(movq $nrs.new_gcable_ptr,%fname)
1268        __(jump_fname())
1269_endsubp(makestackblock)
1270
1271_spentry(makestackblock0)
1272        __(unbox_fixnum(%arg_z,%imm0))
1273        __(dnode_align(%imm0,tsp_frame.fixed_overhead+macptr.size,%imm0))
1274        __(cmpq $tstack_alloc_limit,%imm0)
1275        __(jae 9f)
1276        __(movd %foreign_sp,%arg_z)
1277        __(movq %arg_z,%imm1)
1278        __(subq %imm0,%arg_z)
1279        __(movq %foreign_sp,(%arg_z))
1280        __(movd %arg_z,%foreign_sp)
1281        __(lea macptr.size+tsp_frame.fixed_overhead(%arg_z),%imm0)
1282        __(movq $macptr_header,tsp_frame.fixed_overhead(%arg_z))
1283        __(addq fulltag_misc+tsp_frame.fixed_overhead,%arg_z)
1284        __(movq %imm0,macptr.address(%arg_z))
1285        __(movss %fp0,macptr.domain(%arg_z))
1286        __(movss %fp0,macptr.type(%arg_z))
1287        __(jmp 2f)
12881:      __(movapd %fp0,(%imm0))
1289        __(addq $dnode_size,%imm0)
12902:      __(cmpq %imm0,%imm1)
1291        __(jne 1b)             
1292        __(jmp *%ra0)
12939:      __(movd %foreign_sp,%imm0)
1294        __(subq $dnode_size,%imm0)
1295        __(movq %foreign_sp,(%imm0))
1296        __(movd %imm0,%foreign_sp)
1297        __(set_nargs(1))
1298        __(movq $nrs.new_gcable_ptr,%fname)
1299        __(jump_fname())
1300_endsubp(makestackblock0)
1301
1302_spentry(makestacklist)
1303_endsubp(makestacklist)
1304
1305_spentry(stkgvector)
1306_endsubp(stkgvector)
1307
1308_spentry(misc_alloc)
1309_endsubp(misc_alloc)
1310
1311_spentry(poweropen_ffcallX)
1312_endsubp(poweropen_ffcallX)
1313
1314
1315_spentry(macro_bind)
1316_endsubp(macro_bind)
1317
1318_spentry(destructuring_bind)
1319_endsubp(destructuring_bind)
1320
1321_spentry(destructuring_bind_inner)
1322_endsubp(destructuring_bind_inner)
1323
1324_spentry(recover_values)
1325_endsubp(recover_values)
1326
1327_spentry(vpopargregs)
1328_endsubp(vpopargregs)
1329
1330/* If arg_z is an integer, return in imm0 something whose sign
1331   is the same as arg_z's.  If not an integer, error. */
1332_spentry(integer_sign)
1333        __(testb $tagmask,%arg_z_b)
1334        __(movq %arg_z,%imm0)
1335        __(je 8f)
1336        __(extract_typecode(%arg_z,%imm0))
1337        __(cmpb $subtag_bignum,%imm0_b)
1338        __(jne 9f)
1339        __(getvheader(%arg_z,%imm0))
1340        __(shr $num_subtag_bits,%imm0)
1341        __(movl misc_data_offset-4(%arg_z,%imm0),%imm0_l)
1342        __(setae %imm0_b)
1343        __(andl $1,%imm0_l)
13448:      __(jmp *%ra0)
13459:      __(uuo_error_reg_not_type(Rarg_z,error_object_not_integer))
1346_endsubp(integer_sign)
1347
1348_spentry(subtag_misc_set)
1349_endsubp(subtag_misc_set)
1350
1351_spentry(spread_lexprz)
1352_endsubp(spread_lexprz)
1353
1354
1355_spentry(reset)
1356_endsubp(reset)
1357
1358_spentry(mvslide)
1359_endsubp(mvslide)
1360
1361_spentry(save_values)
1362_endsubp(save_values)
1363
1364_spentry(add_values)
1365_endsubp(add_values)
1366
1367_spentry(poweropen_callback)
1368_endsubp(poweropen_callback)
1369
1370_spentry(misc_alloc_init)
1371_endsubp(misc_alloc_init)
1372
1373_spentry(stack_misc_alloc_init)
1374_endsubp(stack_misc_alloc_init)
1375
1376
1377_spentry(unused_1)
1378_endsubp(unused_1)
1379
1380_spentry(callbuiltin)
1381_endsubp(callbuiltin)
1382
1383_spentry(callbuiltin0)
1384_endsubp(callbuiltin0)
1385
1386_spentry(callbuiltin1)
1387_endsubp(callbuiltin1)
1388
1389_spentry(callbuiltin2)
1390_endsubp(callbuiltin2)
1391
1392_spentry(callbuiltin3)
1393_endsubp(callbuiltin3)
1394
1395_spentry(popj)
1396_endsubp(popj)
1397
1398_spentry(restorefullcontext)
1399_endsubp(restorefullcontext)
1400
1401_spentry(savecontextvsp)
1402_endsubp(savecontextvsp)
1403
1404_spentry(savecontext0)
1405_endsubp(savecontext0)
1406
1407_spentry(restorecontext)
1408_endsubp(restorecontext)
1409
1410_spentry(lexpr_entry)
1411_endsubp(lexpr_entry)
1412
1413_spentry(poweropen_syscall)
1414_endsubp(poweropen_syscall)
1415
1416
1417_spentry(breakpoint)
1418_endsubp(breakpoint)
1419
1420_spentry(eabi_ff_call)
1421_endsubp(eabi_ff_call)
1422
1423_spentry(eabi_callback)
1424_endsubp(eabi_callback)
1425
1426_spentry(eabi_syscall)
1427_endsubp(eabi_syscall)
1428
1429_spentry(getu64)
1430_endsubp(getu64)
1431
1432_spentry(gets64)
1433_endsubp(gets64)
1434
1435_spentry(makeu64)
1436_endsubp(makeu64)
1437
1438_spentry(specref)
1439_endsubp(specref)
1440
1441_spentry(specset)
1442_endsubp(specset)
1443
1444_spentry(specrefcheck)
1445_endsubp(specrefcheck)
1446
1447_spentry(restoreintlevel)
1448_endsubp(restoreintlevel)
1449
1450_spentry(makes32)
1451_endsubp(makes32)
1452
1453_spentry(makeu32)
1454_endsubp(makeu32)
1455
1456_spentry(gets32)
1457_endsubp(gets32)
1458
1459_spentry(getu32)
1460_endsubp(getu32)
1461
1462_spentry(fix_overflow)
1463_endsubp(fix_overflow)
1464
1465_spentry(mvpasssym)
1466_endsubp(mvpasssym)
1467
1468_spentry(unused_2)
1469_endsubp(unused_2)
1470
1471_spentry(unused_3)
1472_endsubp(unused_3)
1473
1474_spentry(unused_4)
1475_endsubp(unused_4)
1476
1477_spentry(unused_5)
1478_endsubp(unused_5)
1479
1480_spentry(unused_6)
1481_endsubp(unused_6)
1482
1483_spentry(unbind)
1484        __(movq %rcontext:tcr.db_link,%imm1)
1485        __(movq %rcontext:tcr.tlb_pointer,%arg_x)
1486        __(movq binding.sym(%imm1),%temp1)
1487        __(movq binding.val(%imm1),%arg_y)
1488        __(movq binding.link(%imm1),%imm1)
1489        __(movq %arg_y,(%arg_x,%temp1))
1490        __(movq %imm1,%rcontext:tcr.db_link)
1491        __(jmp *%ra0)   
1492_endsubp(unbind)
1493
1494_spentry(unbind_n)
1495        __(movq %rcontext:tcr.db_link,%imm1)
1496        __(movq %rcontext:tcr.tlb_pointer,%arg_x)
14971:             
1498        __(movq binding.sym(%imm1),%temp1)
1499        __(movq binding.val(%imm1),%arg_y)
1500        __(movq binding.link(%imm1),%imm1)
1501        __(movq %arg_y,(%arg_x,%temp1))
1502        __(subq $1,%imm0)
1503        __(jne 1b)
1504        __(movq %imm1,%rcontext:tcr.db_link)
1505        __(jmp *%ra0)   
1506_endsubp(unbind_n)
1507
1508_spentry(unbind_to)
1509        __(movq %rcontext:tcr.db_link,%imm1)
1510        __(movq %rcontext:tcr.tlb_pointer,%arg_x)
15111:             
1512        __(movq binding.sym(%imm1),%temp1)
1513        __(movq binding.val(%imm1),%arg_y)
1514        __(movq binding.link(%imm1),%imm1)
1515        __(movq %arg_y,(%arg_x,%temp1))
1516        __(cmpq %imm1,%imm0)
1517        __(jne 1b)
1518        __(movq %imm1,%rcontext:tcr.db_link)
1519        __(jmp *%ra0)   
1520_endsubp(unbind_to)
1521
1522
1523/* Bind CCL::*INTERRUPT-LEVEL* to 0.  If its value had been negative, check
1524   for pending interrupts after doing so. */
1525       
1526_spentry(bind_interrupt_level_0)
1527        __(movq %rcontext:tcr.tlb_pointer,%temp1)
1528        __(cmpq $0,INTERRUPT_LEVEL_BINDING_INDEX(%temp1))
1529        __(push INTERRUPT_LEVEL_BINDING_INDEX(%temp1))
1530        __(push $INTERRUPT_LEVEL_BINDING_INDEX)
1531        __(push %rcontext:tcr.db_link)
1532        __(movq $0,INTERRUPT_LEVEL_BINDING_INDEX(%temp1))
1533        __(movq %rsp,%rcontext:tcr.db_link)
1534        __(js,pn 1f)
15350:      __(jmp *%ra0)
1536        /* Interrupt level was negative; interrupt may be pending */
15371:      __(cmpq $0,%rcontext:tcr.interrupt_pending)
1538        __(movq $0,%rcontext:tcr.interrupt_pending)
1539        __(je 0b)
1540        __(interrupt_now())
1541        __(jmp *%ra0)
1542_endsubp(bind_interrupt_level_0)
1543       
1544
1545/* Bind CCL::*INTERRUPT-LEVEL* to the fixnum -1.  (This has the effect
1546   of disabling interrupts.) */
1547
1548_spentry(bind_interrupt_level_m1)
1549        __(movq %rcontext:tcr.tlb_pointer,%temp1)
1550        __(push INTERRUPT_LEVEL_BINDING_INDEX(%temp1))
1551        __(push $INTERRUPT_LEVEL_BINDING_INDEX)
1552        __(push %rcontext:tcr.db_link)
1553        __(movq $-1<<fixnumshift,INTERRUPT_LEVEL_BINDING_INDEX(%temp1))
1554        __(movq %rsp,%rcontext:tcr.db_link)
1555        __(jmp *%ra0)
1556_endsubp(bind_interrupt_level_m1)
1557
1558/* Bind CCL::*INTERRUPT-LEVEL* to the value in arg_z.  If that value's 0,
1559   do what _SPbind_interrupt_level_0 does */
1560_spentry(bind_interrupt_level)
1561        __(testq %arg_z,%arg_z)
1562        __(movq %rcontext:tcr.tlb_pointer,%temp1)
1563        __(jz _SPbind_interrupt_level_0)
1564        __(push INTERRUPT_LEVEL_BINDING_INDEX(%temp1))
1565        __(push $INTERRUPT_LEVEL_BINDING_INDEX)
1566        __(push %rcontext:tcr.db_link)
1567        __(movq %arg_z,INTERRUPT_LEVEL_BINDING_INDEX(%temp1))
1568        __(movq %rsp,%rcontext:tcr.db_link)
1569        __(jmp *%ra0)
1570_endsubp(bind_interrupt_level)
1571
1572/* Unbind CCL::*INTERRUPT-LEVEL*.  If the value changes from negative to
1573   non-negative, check for pending interrupts.  */
1574       
1575_spentry(unbind_interrupt_level)
1576        __(movq %rcontext:tcr.db_link,%imm1)
1577        __(movq %rcontext:tcr.tlb_pointer,%arg_x)
1578        __(movq INTERRUPT_LEVEL_BINDING_INDEX(%arg_x),%imm0)
1579        __(testq %imm0,%imm0)
1580        __(movq binding.val(%imm1),%temp0)
1581        __(movq binding.link(%imm1),%imm1)
1582        __(movq %temp0,INTERRUPT_LEVEL_BINDING_INDEX(%arg_x))
1583        __(movq %imm1,%rcontext:tcr.db_link)
1584        __(js,pn 1f)
15850:      __(jmp *%ra0)
15861:      __(testq %temp0,%temp0)
1587        __(js 0b)
1588        __(cmpq $0,%rcontext:tcr.interrupt_pending)
1589        __(movq $0,%rcontext:tcr.interrupt_pending)
1590        __(je 0b)
1591        __(interrupt_now())
1592        __(jmp *%ra0)   
1593_endsubp(unbind_interrupt_level)
1594
1595       
1596_spentry(progvrestore)
1597_endsubp(progvrestore)
1598       
1599
1600/* %arg_z <- %arg_y + %arg_z.  Do the fixnum case - including overflow -
1601  inline.  Call out otherwise. */
1602_spentry(builtin_plus)
1603        __(movb %arg_z_b,%imm0_b)
1604        __(orb %arg_y_b,%imm0_b)
1605        __(testb $fixnummask,%imm0_b)
1606        __(jne 1f)
1607        __(addq %arg_y,%arg_z)
1608        __(jo,pn C(fix_one_bit_overflow))
1609        __(jmp *%ra0)
16101:      __(jump_builtin(_builtin_plus,2))
1611_endsubp(builtin_plus)
1612       
1613
1614/* %arg_z <- %arg_z - %arg_y.  Do the fixnum case - including overflow -
1615  inline.  Call out otherwise. */
1616_spentry(builtin_minus)                 
1617        __(movb %arg_z_b,%imm0_b)
1618        __(orb %arg_y_b,%imm0_b)
1619        __(testb $fixnummask,%imm0_b)
1620        __(jne 1f)
1621        __(subq %arg_y,%arg_z)
1622        __(jo,pn C(fix_one_bit_overflow))
1623        __(jmp *%ra0)
16241:      __(jump_builtin(_builtin_minus,2))
1625_endsubp(builtin_minus)
1626
1627/* %arg_z <- %arg_z * %arg_y.  Do the fixnum case - including overflow -
1628  inline.  Call out otherwise. */
1629_spentry(builtin_times)
1630        __(movb %arg_z_b,%imm0_b)
1631        __(orb %arg_y_b,%imm0_b)
1632        __(testb $fixnummask,%imm0_b)
1633        __(jne 2f)
1634        __(unbox_fixnum(%arg_z,%imm0))
1635        /* 128-bit fixnum result in %imm1:%imm0. Overflow set if %imm1
1636           is significant */
1637        __(imul %arg_y)
1638        __(jo 1f)
1639        __(mov %imm0,%arg_z)
1640        __(jmp *%ra0)
16411:      __(unbox_fixnum(%arg_z,%imm0))
1642        __(unbox_fixnum(%arg_y,%imm1))
1643        __(imul %imm1)
1644        __(jmp C(makes128))
16452:      __(jump_builtin(_builtin_times,2))
1646_endsubp(builtin_times)
1647
1648_spentry(builtin_div)
1649        __(jump_builtin(_builtin_div,2))
1650
1651/* %arg_z <- (= %arg_y %arg_z). */
1652_spentry(builtin_eq)
1653        __(movb %arg_z_b,%imm0_b)
1654        __(orb %arg_y_b,%imm0_b)
1655        __(testb $fixnummask,%imm0_b)
1656        __(jne 1f)
1657        __(rcmpq(%arg_z,%arg_y))
1658        __(condition_to_boolean(e,%imm0,%arg_z))
1659        __(jmp *%ra0)
16601:      __(jump_builtin(_builtin_eq,2))
1661_endsubp(builtin_eq)
1662       
1663/* %arg_z <- (/= %arg_y %arg_z).        */
1664_spentry(builtin_ne)
1665        __(movb %arg_z_b,%imm0_b)
1666        __(orb %arg_y_b,%imm0_b)
1667        __(testb $fixnummask,%imm0_b)
1668        __(jne 1f)
1669        __(rcmpq(%arg_z,%arg_y))
1670        __(condition_to_boolean(ne,%imm0,%arg_z))
1671        __(jmp *%ra0)
16721:      __(jump_builtin(_builtin_ne,2))
1673_endsubp(builtin_ne)
1674       
1675/* %arg_z <- (> %arg_y %arg_z). */
1676_spentry(builtin_gt)
1677        __(movb %arg_z_b,%imm0_b)
1678        __(orb %arg_y_b,%imm0_b)
1679        __(testb $fixnummask,%imm0_b)
1680        __(jne 1f)
1681        __(rcmpq(%arg_y,%arg_z))
1682        __(condition_to_boolean(g,%imm0,%arg_z))
1683        __(jmp *%ra0)
16841:      __(jump_builtin(_builtin_gt,2))
1685_endsubp(builtin_gt)
1686
1687/* %arg_z <- (>= %arg_y %arg_z).        */
1688_spentry(builtin_ge)
1689        __(movb %arg_z_b,%imm0_b)
1690        __(orb %arg_y_b,%imm0_b)
1691        __(testb $fixnummask,%imm0_b)
1692        __(jne 1f)
1693        __(rcmpq(%arg_y,%arg_z))
1694        __(condition_to_boolean(ge,%imm0,%arg_z))
1695        __(jmp *%ra0)
16961:      __(jump_builtin(_builtin_ge,2))
1697_endsubp(builtin_ge)
1698       
1699/* %arg_z <- (< %arg_y %arg_z). */
1700_spentry(builtin_lt)
1701        __(movb %arg_z_b,%imm0_b)
1702        __(orb %arg_y_b,%imm0_b)
1703        __(testb $fixnummask,%imm0_b)
1704        __(jne 1f)
1705        __(rcmpq(%arg_y,%arg_z))
1706        __(condition_to_boolean(l,%imm0,%arg_z))
1707        __(jmp *%ra0)
17081:      __(jump_builtin(_builtin_lt,2))
1709_endsubp(builtin_lt)
1710
1711/* %arg_z <- (<= %arg_y %arg_z). */
1712_spentry(builtin_le)
1713        __(movb %arg_z_b,%imm0_b)
1714        __(orb %arg_y_b,%imm0_b)
1715        __(testb $fixnummask,%imm0_b)
1716        __(jne 1f)
1717        __(rcmpq(%arg_y,%arg_z))
1718        __(condition_to_boolean(le,%imm0,%arg_z))
1719        __(jmp *%ra0)
17201:      __(jump_builtin(_builtin_le,2))
1721_endsubp(builtin_le)
1722
1723_spentry(builtin_eql)
1724        __(cmpq %arg_y,%arg_z)
1725        __(je 1f)
1726        /* Not EQ.  Could only possibly be EQL if both are fulltag-misc
1727           and both have the same subtag */
1728        __(extract_lisptag(%arg_y,%imm0))
1729        __(extract_lisptag(%arg_z,%imm1))
1730        __(cmpb $fulltag_misc,%imm0_b)
1731        __(jne 2f)
1732        __(cmpb %imm0_b,%imm1_b)
1733        __(jne 2f)
1734        __(extract_subtag(%arg_y,%imm0_b))
1735        __(extract_subtag(%arg_z,%imm1_b))
1736        __(cmpb %imm0_b,%imm1_b)
1737        __(jne 2f)
1738        __(jump_builtin(_builtin_eql,2))
17391:      __(movl $t_value,%arg_z_l)
1740        __(jmp *%ra0)
17412:      __(movl $nil_value,%arg_z_l)
1742        __(jmp *%ra0)   
1743_endsubp(builtin_eql)
1744
1745_spentry(builtin_length)
1746        __(extract_lisptag(%arg_z,%imm0))
1747        __(cmpb $tag_list,%imm0_b)
1748        __(jz 2f)
1749        __(cmpb $tag_misc,%imm0_b)
1750        __(jnz 8f)
1751        __(extract_subtag(%arg_z,%imm0_b))
1752        __(rcmpb(%imm0_b,$min_vector_subtag))
1753        __(jb 8f)
1754        __(je 1f)
1755        /* (simple-array * (*)) */
1756        __(movq %arg_z,%arg_y)
1757        __(vector_length(%arg_y,%arg_z))
1758        __(jmp *%ra0)
17591:      /* vector header */
1760        __(movq vectorH.logsize(%arg_z),%arg_z)
1761        __(jmp *%ra0)
17622:      /* list.  Maybe null, maybe dotted or circular. */
1763        __(movq $-fixnumone,%temp2)
1764        __(movq %arg_z,%temp0)  /* fast pointer */
1765        __(movq %arg_z,%temp1)  /* slow pointer */
17663:      __(extract_lisptag(%temp0,%imm0))       
1767        __(cmpb $fulltag_nil,%temp0_b)
1768        __(addq $fixnumone,%temp2)
1769        __(je 9f)
1770        __(cmpb $tag_list,%imm0_b)
1771        __(je 8f)
1772        __(extract_lisptag(%temp1,%imm1))
1773        __(testb $fixnumone,%temp2_b)
1774        __(_cdr(%temp0,%temp0))
1775        __(je 3b)
1776        __(cmpb $tag_list,%imm1_b)
1777        __(jne 8f)
1778        __(_cdr(%temp1,%temp1))
1779        __(cmpq %temp0,%temp1)
1780        __(jne 3b)
17818:     
1782        __(jump_builtin(_builtin_length,1))
17839:     
1784        __(movq %temp2,%arg_z)
1785        __(jmp *%ra0)           
1786_endsubp(builtin_length)
1787
1788       
1789_spentry(builtin_seqtype)
1790        __(extract_lisptag(%arg_z,%imm0))
1791        __(cmpb $tag_list,%imm0_b)
1792        __(jz 1f)
1793        __(cmpb $tag_misc,%imm0_b)
1794        __(cmovew misc_subtag_offset(%arg_z),%imm0_w)
1795        __(jne 2f)
1796        __(rcmpb(%imm0_b,$min_vector_subtag))
1797        __(jb 2f)
1798        __(movl $nil_value,%arg_z_l)
1799        __(jmp *%ra0)
18001:      __(movl $t_value,%arg_z_l)
1801        __(jmp *%ra0)
18022:     
1803        __(jump_builtin(_builtin_seqtype,1))
1804_endsubp(builtin_seqtype)
1805
1806_spentry(builtin_assq)
1807        __(cmpb $fulltag_nil,%arg_z_b)
1808        __(jz 5f)
18091:      __(movb $tagmask,%imm0_b)
1810        __(andb %arg_z_b,%imm0_b)
1811        __(cmpb $tag_list,%imm0_b)
1812        __(jz,pt 2f)
1813        __(uuo_error_reg_not_list(Rarg_z))
18142:      __(_car(%arg_z,%arg_x))
1815        __(_cdr(%arg_z,%arg_z))
1816        __(cmpb $fulltag_nil,%arg_x_b)
1817        __(jz 4f)
1818        __(movb $tagmask,%imm0_b)
1819        __(andb %arg_x_b,%imm0_b)
1820        __(cmpb $tag_list,%imm0_b)
1821        __(jz,pt 3f)
1822        __(uuo_error_reg_not_list(Rarg_x))
18233:      __(_car(%arg_x,%temp0))
1824        __(cmpq %temp0,%arg_y)
1825        __(jnz 4f)
1826        __(movq %arg_x,%arg_z)
1827        __(jmp *%ra0)
18284:      __(cmpb $fulltag_nil,%arg_z_b)
18295:      __(jnz 1b)
1830        __(jmp *%ra0)                   
1831_endsubp(builtin_assq) 
1832
1833_spentry(builtin_memq)
1834        __(cmpb $fulltag_nil,%arg_z_b)
1835        __(jmp 3f)
18361:      __(movb $tagmask,%imm0_b)
1837        __(andb %arg_z_b,%imm0_b)
1838        __(cmpb $tag_list,%imm0_b)
1839        __(jz,pt 2f)
1840        __(uuo_error_reg_not_list(Rarg_z))
18412:      __(_car(%arg_z,%arg_x))
1842        __(_cdr(%arg_z,%temp0))
1843        __(cmpq %arg_x,%arg_y)
1844        __(jz 4f)
1845        __(cmpb $fulltag_nil,%temp0_b)
1846        __(movq %temp0,%arg_z)
18473:      __(jnz 1b)
18484:      __(jmp *%ra0)                           
1849_endsubp(builtin_memq)
1850
1851        __ifdef([X8664])
1852logbitp_max_bit = 61
1853        __else
1854logbitp_max_bit = 30
1855        __endif
1856       
1857_spentry(builtin_logbitp)
1858        __(movb %arg_z_b,%imm0_b)
1859        __(orb %arg_y_b,%imm0_b)
1860        __(testb $fixnummask,%imm0_b)
1861        __(jnz 1f)
1862        __(cmpq $logbitp_max_bit<<fixnumshift,%arg_y)
1863        __(ja 1f)
1864        __(unbox_fixnum(%arg_y,%imm0))
1865        __(addb $fixnumshift,%imm0_b)
1866        __(bt %imm0,%arg_z)
1867        __(condition_to_boolean(b,%imm0,%arg_z))
1868/*     
1869        __(setb %imm0_b)
1870        __(andb $t_offset,%imm0_b)
1871        __(lea nil_value(%imm0),%arg_z)
1872*/     
1873        __(jmp *%ra0)
18741:      __(jump_builtin(_builtin_logbitp,2))
1875_endsubp(builtin_logbitp)
1876
1877_spentry(builtin_logior)
1878        __(movb %arg_y_b,%imm0_b)
1879        __(orb %arg_z_b,%imm0_b)
1880        __(testb $fixnummask,%imm0_b)
1881        __(jne 1f)
1882        __(orq %arg_y,%arg_z)
1883        __(jmp *%ra0)
18841:     
1885        __(jump_builtin(_builtin_logior,2))
1886               
1887_endsubp(builtin_logior)
1888
1889_spentry(builtin_logand)
1890        __(movb %arg_y_b,%imm0_b)
1891        __(orb %arg_z_b,%imm0_b)
1892        __(testb $fixnummask,%imm0_b)
1893        __(jne 1f)
1894        __(andq %arg_y,%arg_z)
1895        __(jmp *%ra0)
18961:             
1897        __(jump_builtin(_builtin_logand,2))
1898_endsubp(builtin_logand)
1899
1900_spentry(builtin_negate)
1901        __(testb $fixnummask,%arg_z_b)
1902        __(jne 1f)
1903        __(negq %arg_z)
1904        __(jo,pn C(fix_one_bit_overflow))
1905        __(jmp *%ra0)
19061:             
1907        __(jump_builtin(_builtin_negate,1))     
1908_endsubp(builtin_negate)
1909
1910_spentry(builtin_logxor)
1911        __(movb %arg_y_b,%imm0_b)
1912        __(orb %arg_z_b,%imm0_b)
1913        __(testb $fixnummask,%imm0_b)
1914        __(jne 1f)
1915        __(xorq %arg_y,%arg_z)
1916        __(jmp *%ra0)
19171:             
1918        __(jump_builtin(_builtin_logand,2))
1919_endsubp(builtin_logxor)
1920
1921_spentry(builtin_aref1)
1922_endsubp(builtin_aref1)
1923
1924_spentry(builtin_aset1)
1925_endsubp(builtin_aset1)
1926
1927/* We have to be a little careful here  %cl has to be used for
1928   the (unboxed) shift count in all variable-length shifts, and
1929   %temp2 = %rcx.  Zero all but the low 8 (or 6) bits of %rcx,
1930   so that the shift count doesn't confuse the GC.
1931*/
1932_spentry(builtin_ash)
1933        __(movb %arg_y_b,%imm0_b)
1934        __(orb %arg_z_b,%imm0_b)
1935        __(testb $fixnummask,%imm0_b)
1936        __(jne 9f)
1937        __(unbox_fixnum(%arg_y,%imm1))
1938        __(unbox_fixnum(%arg_z,%imm0))
1939        /* Z flag set if zero ASH shift count */
1940        __(jnz 1f)
1941        __(movq %arg_y,%arg_z)  /* shift by 0 */
1942        __(jmp *%ra0)
19431:      __(jns 3f)
1944        __(rcmpq(%imm0,$-63))
1945        __(jg 2f)
1946        __(sar $63,%imm1)
1947        __(box_fixnum(%imm1,%arg_z))
1948        __(jmp *%ra0)
19492:      /* Right-shift by small fixnum */
1950        __(negb %imm0_b)
1951        __(movzbl %imm0_b,%ecx)
1952        __(sar %cl,%imm1)
1953        __(xorl %ecx,%ecx)
1954        __(box_fixnum(%imm1,%arg_z))
1955        __(jmp *%ra0)
19563:    /* Left shift by fixnum. We cant shift by more than 63 bits, though
1957        shifting by 64 is actually easy. */
1958        __(rcmpq(%imm0,$64))
1959        __(jg 9f)
1960        __(jne 4f)
1961        /* left-shift by 64-bits exactly */
1962        __(xorl %imm0_l,%imm0_l)
1963        __(jmp C(makes128))
19644:      /* left-shift by 1..63 bits.  Safe to move shift count to %rcx/%cl */
1965        __(movzbl %imm0_b,%ecx)  /* zero-extending mov */
1966        __(movq %imm1,%imm0)
1967        __(xorq %imm1,%imm1)
1968        __(testq %imm0,%imm0)
1969        __(js 5f)
1970        __(shld %cl,%imm0,%imm1)
1971        __(shl %cl,%imm0)
1972        __(xorb %cl,%cl)
1973        __(jmp C(makeu128))
19745:      __(subq $1,%imm1)
1975        __(shld %cl,%imm0,%imm1)
1976        __(shl %cl,%imm0)
1977        __(xorb %cl,%cl)
1978        __(jmp C(makes128))
19799:     
1980        __(jump_builtin(_builtin_ash,2))
1981_endsubp(builtin_ash)
Note: See TracBrowser for help on using the repository browser.