Index: /trunk/ccl/lisp-kernel/asmutils.s
===================================================================
--- /trunk/ccl/lisp-kernel/asmutils.s	(revision 517)
+++ /trunk/ccl/lisp-kernel/asmutils.s	(revision 518)
@@ -23,5 +23,5 @@
    R5 bytes wide. */
 _exportfn(C(zero_cache_lines))
-	__(cmpwi cr0,r4,0)
+	__(cmpri(cr0,r4,0))
 	__(mtctr r4)
 	__(beqlr)
@@ -232,73 +232,73 @@
 */
 _exportfn(C(put_altivec_registers))
-	__(cmpwi r3,0)
+	__(cmpri(r3,0))
 	__(li r4,0)
 	__(beqlr)
-	__(STVX(0,3,4))
-	__(la r4,16(r4))
-	__(STVX(1,3,4))
-	__(la r4,16(r4))
-	__(STVX(2,3,4))
-	__(la r4,16(r4))
-	__(STVX(3,3,4))
-	__(la r4,16(r4))
-	__(STVX(4,3,4))
-	__(la r4,16(r4))
-	__(STVX(5,3,4))
-	__(la r4,16(r4))
-	__(STVX(6,3,4))
-	__(la r4,16(r4))
-	__(STVX(7,3,4))
-	__(la r4,16(r4))
-	__(STVX(8,3,4))
-	__(la r4,16(r4))
-	__(STVX(9,3,4))
-	__(la r4,16(r4))
-	__(STVX(10,3,4))
-	__(la r4,16(r4))
-	__(STVX(11,3,4))
-	__(la r4,16(r4))
-	__(STVX(12,3,4))
-	__(la r4,16(r4))
-	__(STVX(13,3,4))
-	__(la r4,16(r4))
-	__(STVX(14,3,4))
-	__(la r4,16(r4))
-	__(STVX(15,3,4))
-	__(la r4,16(r4))
-	__(STVX(16,3,4))
-	__(la r4,16(r4))
-	__(STVX(17,3,4))
-	__(la r4,16(r4))
-	__(STVX(18,3,4))
-	__(la r4,16(r4))
-	__(STVX(19,3,4))
-	__(la r4,16(r4))
-	__(STVX(20,3,4))
-	__(la r4,16(r4))
-	__(STVX(21,3,4))
-	__(la r4,16(r4))
-	__(STVX(22,3,4))
-	__(la r4,16(r4))
-	__(STVX(23,3,4))
-	__(la r4,16(r4))
-	__(STVX(24,3,4))
-	__(la r4,16(r4))
-	__(STVX(25,3,4))
-	__(la r4,16(r4))
-	__(STVX(26,3,4))
-	__(la r4,16(r4))
-	__(STVX(27,3,4))
-	__(la r4,16(r4))
-	__(STVX(28,3,4))
-	__(la r4,16(r4))
-	__(STVX(29,3,4))
-	__(la r4,16(r4))
-	__(STVX(30,3,4))
-	__(la r4,16(r4))
-	__(STVX(31,3,4))
-	__(la r4,16(r4))
-	__(MFVSCR(0))
-	__(STVX(0,3,4))
+	__(stvx v0,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v1,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v2,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v3,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v4,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v5,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v6,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v7,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v8,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v9,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v10,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v11,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v12,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v13,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v14,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v15,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v16,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v17,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v18,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v19,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v20,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v21,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v22,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v23,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v24,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v25,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v26,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v27,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v28,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v29,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v30,r3,r4)
+	__(la r4,16(r4))
+	__(stvx v31,r3,r4)
+	__(la r4,16(r4))
+	__(mfvscr v0)
+	__(stvx v0,r3,r4)
 	__(mfspr r5,256)
 	__(stw r5,8(r4))
@@ -307,75 +307,75 @@
 
 _exportfn(C(get_altivec_registers))
-	__(cmpwi r3,0)
+	__(cmpri(r3,0))
 	__(li r4,32*16)
 	__(beqlr)
-	__(LVX(0,3,4))
-	__(MTVSCR(0))
+	__(lvx v0,r3,r4)
+	__(mtvscr v0)
 	__(lwz r5,8(r4))
 	__(mtspr 256,r5)
 	__(la r4,-16(r4))
-	__(LVX(31,3,4))
-	__(la r4,-16(r4))
-	__(LVX(30,3,4))
-	__(la r4,-16(r4))
-	__(LVX(29,3,4))
-	__(la r4,-16(r4))
-	__(LVX(28,3,4))
-	__(la r4,-16(r4))
-	__(LVX(27,3,4))
-	__(la r4,-16(r4))
-	__(LVX(26,3,4))
-	__(la r4,-16(r4))
-	__(LVX(25,3,4))
-	__(la r4,-16(r4))
-	__(LVX(24,3,4))
-	__(la r4,-16(r4))
-	__(LVX(23,3,4))
-	__(la r4,-16(r4))
-	__(LVX(22,3,4))
-	__(la r4,-16(r4))
-	__(LVX(21,3,4))
-	__(la r4,-16(r4))
-	__(LVX(20,3,4))
-	__(la r4,-16(r4))
-	__(LVX(19,3,4))
-	__(la r4,-16(r4))
-	__(LVX(18,3,4))
-	__(la r4,-16(r4))
-	__(LVX(17,3,4))
-	__(la r4,-16(r4))
-	__(LVX(16,3,4))
-	__(la r4,-16(r4))
-	__(LVX(15,3,4))
-	__(la r4,-16(r4))
-	__(LVX(14,3,4))
-	__(la r4,-16(r4))
-	__(LVX(13,3,4))
-	__(la r4,-16(r4))
-	__(LVX(12,3,4))
-	__(la r4,-16(r4))
-	__(LVX(11,3,4))
-	__(la r4,-16(r4))
-	__(LVX(10,3,4))
-	__(la r4,-16(r4))
-	__(LVX(9,3,4))
-	__(la r4,-16(r4))
-	__(LVX(8,3,4))
-	__(la r4,-16(r4))
-	__(LVX(7,3,4))
-	__(la r4,-16(r4))
-	__(LVX(6,3,4))
-	__(la r4,-16(r4))
-	__(LVX(5,3,4))
-	__(la r4,-16(r4))
-	__(LVX(4,3,4))
-	__(la r4,-16(r4))
-	__(LVX(3,3,4))
-	__(la r4,-16(r4))
-	__(LVX(2,3,4))
-	__(la r4,-16(r4))
-	__(LVX(1,3,4))
-	__(la r4,-16(r4))
-	__(LVX(0,3,4))
+	__(lvx v31,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v30,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v29,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v28,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v27,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v26,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v25,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v24,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v23,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v22,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v21,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v20,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v19,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v18,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v17,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v16,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v15,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v14,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v13,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v12,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v11,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v10,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v9,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v8,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v7,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v6,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v5,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v4,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v3,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v2,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v1,r3,r4)
+	__(la r4,-16(r4))
+	__(lvx v0,r3,r4)
 	__(blr)
 _endfn
