Changeset 15366


Ignore:
Timestamp:
May 18, 2012, 9:20:56 PM (7 years ago)
Author:
gb
Message:

Change the write-barrier implementation on x86:
managed_static_area->refbits tracks references to (potential)
ephemeral objects and is traversed by the EGC; the new global
managed_static_refbits tracks references from the managed_static_area to
(potential) dynamic objects and is traversed by full GC.

To support this:

  • add a couple of new kernel globals (actually, conditionally redefine a few PPC-specific things) to keep track of the number of dnodes in the managed static area and the new managed_static_bitvector. (We might want to just add new globals and leave the PPC-specific things there; the managed_static_area is currently X86-specific, but we might want to use it on other architectures.)
  • change the image loading/saving code to restore/save managed_static_refbits
  • change purify/impurify on x86 to maintain both bitvectors
  • change the GC to traverse both bitvectors (in different situations)
  • initialize and maintain the new kernel globals
  • provide a way of reserving address space for reasons other than heap initialization
  • implement the new write barrier in the affected x86 subprimitives
  • maintain both bitvectors in pc_luser_xp
  • other incidental changes.

This seems to work reliably on X8664 Linux. Some stuff needs to be tested
on Win64 and the X8632 write-barrier code probably needs lots of testing
(that machine hardly has any registers ...). Other platforms shouldn't
be affected, but we'll see.

Location:
trunk/source/lisp-kernel
Files:
13 edited

Legend:

Unmodified
Added
Removed
  • trunk/source/lisp-kernel/area.h

    r15055 r15366  
    166166
    167167#define STATIC_RESERVE (2<<12)
    168 
     168#define MANAGED_STATIC_SIZE ((natural)(PURESPACE_RESERVE/2-PURESPACE_SIZE))
    169169
    170170#define SPJUMP_TARGET_ADDRESS (STATIC_BASE_ADDRESS+0x3000)
  • trunk/source/lisp-kernel/gc-common.c

    r15229 r15366  
    8989
    9090Boolean GCDebug = false, GCverbose = false;
    91 bitvector GCmarkbits = NULL, GCdynamic_markbits = NULL;
     91bitvector GCmarkbits = NULL, GCdynamic_markbits = NULL, managed_static_refbits = NULL;
    9292LispObj GCarealow = 0, GCareadynamiclow = 0;
    9393natural GCndnodes_in_area = 0, GCndynamic_dnodes_in_area = 0;
     
    10921092
    10931093void
    1094 forward_memoized_area(area *a, natural num_memo_dnodes)
    1095 {
    1096   bitvector refbits = a->refbits;
     1094forward_memoized_area(area *a, natural num_memo_dnodes, bitvector refbits)
     1095{
    10971096  LispObj *p = (LispObj *) a->low, x1, x2, new;
    10981097#ifdef ARM
     
    11031102  hash_table_vector_header *hashp = NULL;
    11041103  Boolean header_p;
     1104
     1105
    11051106
    11061107  if (num_memo_dnodes) {
     
    12991300mark_managed_static_refs(area *a, BytePtr low_dynamic_address, natural ndynamic_dnodes)
    13001301{
    1301   bitvector refbits = a->refbits;
     1302  bitvector refbits = managed_static_refbits;
    13021303  LispObj *p = (LispObj *) a->low, x1, x2;
    13031304  natural inbits, outbits, bits, bitidx, *bitsp, nextbit, diff, memo_dnode = 0,
     
    15481549      }
    15491550    }
    1550  
     1551
    15511552    if (GCephemeral_low) {
    15521553      mark_memoized_area(tenured_area, area_dnode(a->low,tenured_area->low));
    1553     }
    1554 
    1555     mark_managed_static_refs(managed_static_area,low_markable_address,area_dnode(a->active,low_markable_address));
    1556    
     1554      mark_memoized_area(managed_static_area,managed_static_area->ndnodes);
     1555    } else {
     1556      mark_managed_static_refs(managed_static_area,low_markable_address,area_dnode(a->active,low_markable_address));
     1557    }
    15571558    other_tcr = tcr;
    15581559    do {
     
    16861687
    16871688    if (GCephemeral_low) {
    1688       forward_memoized_area(tenured_area, area_dnode(a->low, tenured_area->low));
    1689     }
    1690  
    1691     forward_memoized_area(managed_static_area,area_dnode(managed_static_area->active,managed_static_area->low));
     1689      forward_memoized_area(tenured_area, area_dnode(a->low, tenured_area->low), tenured_area->refbits);
     1690      forward_memoized_area(managed_static_area,managed_static_area->ndnodes, managed_static_area->refbits);
     1691    } else {
     1692      forward_memoized_area(managed_static_area,area_dnode(managed_static_area->active,managed_static_area->low),managed_static_refbits);
     1693    }
    16921694    a->active = (BytePtr) ptr_from_lispobj(compact_dynamic_heap());
    16931695
  • trunk/source/lisp-kernel/gc.h

    r15202 r15366  
    6464extern LispObj GCarealow, GCareadynamiclow;
    6565extern natural GCndnodes_in_area, GCndynamic_dnodes_in_area;
    66 extern bitvector GCmarkbits, GCdynamic_markbits;
     66extern bitvector GCmarkbits, GCdynamic_markbits,managed_static_refbits;
    6767LispObj *global_reloctab, *GCrelocptr;
    6868LispObj GCfirstunmarked;
     
    192192void update_locref(LispObj *);
    193193void forward_gcable_ptrs(void);
    194 void forward_memoized_area(area *, natural);
     194void forward_memoized_area(area *, natural, bitvector);
    195195void forward_tcr_tlb(TCR *);
    196196void reclaim_static_dnodes(void);
  • trunk/source/lisp-kernel/image.c

    r15093 r15366  
    272272        return;
    273273      }
     274      if (!CommitMemory(global_mark_ref_bits,refbits_size)) {
     275        return;
     276      }
    274277      /* Need to save/restore persistent refbits. */
    275       if (!MapFile(global_mark_ref_bits,
     278      if (!MapFile(managed_static_refbits,
    276279                   align_to_power_of_2(pos+mem_size,log2_page_size),
    277280                   refbits_size,
     
    631634
    632635      seek_to_next_page(fd);
    633       if (writebuf(fd,(char*)a->refbits,nrefbytes)) {
     636      if (writebuf(fd,(char*)managed_static_refbits,nrefbytes)) {
    634637        return errno;
    635638      }
  • trunk/source/lisp-kernel/lisp_globals.h

    r15257 r15366  
    2727#define KERNEL_IMPORTS (-4)     /* some things we need to have imported for us. */
    2828#define OBJC_2_PERSONALITY (-5) /* A good listener.  Doesn't say much */
     29#ifdef X86
     30#define MANAGED_STATIC_REFBITS (-6) /* refs from managed_static to dynamic */
     31#define MANAGED_STATIC_DNODES (-7) /* ndnodes in managed_static_area */
     32#else
    2933#define SAVETOC (-6)            /* Saved TOC register, for some platforms */
    3034#define SAVER13 (-7)            /* Saved (global) r13, on some platforms */
     35#endif
    3136#define SUBPRIMS_BASE (-8)      /* where the dynamic subprims wound up */
    3237#define RET1VALN (-9)           /* magic multiple-values return address */
  • trunk/source/lisp-kernel/memory.c

    r14295 r15366  
    266266MapMemory(LogicalAddress addr, natural nbytes, int protection)
    267267{
     268  LogicalAddress p;
    268269#if DEBUG_MEMORY
    269270  fprintf(dbgout, "Mapping memory at 0x" LISP ", size 0x" LISP "\n", addr, nbytes);
    270271#endif
    271272#ifdef WINDOWS
    272   return VirtualAlloc(addr, nbytes, MEM_RESERVE|MEM_COMMIT, MEMPROTECT_RWX);
     273  p = VirtualAlloc(addr, nbytes, MEM_RESERVE|MEM_COMMIT, MEMPROTECT_RWX);
     274  if (p == NULL) {
     275    wperror("MapMemory");
     276  }
     277  return p;
    273278#else
    274279  {
     
    946951  }
    947952}
     953
     954LogicalAddress
     955ReserveMemory(natural size)
     956{
     957  LogicalAddress p;
     958#ifdef WINDOWS
     959  p = VirtualAlloc(0,
     960                   size,
     961                   MEM_RESERVE,
     962                   PAGE_NOACCESS);
     963  return p;
     964#else
     965  p = mmap(NULL,size,PROT_NONE,MAP_PRIVATE|MAP_ANON|MAP_NORESERVE,-1,0);
     966  if (p == MAP_FAILED) {
     967    return NULL;
     968  }
     969  return p;
     970#endif
     971}
     972
  • trunk/source/lisp-kernel/memprotect.h

    r14295 r15366  
    5252LogicalAddress
    5353ReserveMemoryForHeap(LogicalAddress want, natural totalsize);
     54LogicalAddress
     55ReserveMemory(natural totalsize);
    5456
    5557int
  • trunk/source/lisp-kernel/pmcl-kernel.c

    r15271 r15366  
    553553  if (start == NULL) {
    554554    if (fatal) {
     555#ifdef WINDOWS
     556      wperror("minimal initial mmap");
     557#else
    555558      perror("minimal initial mmap");
     559#endif
    556560      exit(1);
    557561    }
     
    584588  reserved->pred = reserved->succ = reserved;
    585589  all_areas = reserved;
     590#ifdef X86
     591  {
     592    managed_static_refbits = ReserveMemory((((MANAGED_STATIC_SIZE>>dnode_shift)+7)>>3));
     593    if (managed_static_refbits == NULL) {
     594#ifdef WINDOWS
     595      wperror("allocate refbits for managed static area");
     596#else
     597      perror("allocate refbits for managed static area");
     598#endif
     599      exit(1);
     600    }
     601  }
     602#endif
    586603  return reserved;
    587604}
     
    20952112    lisp_global(OLDSPACE_DNODE_COUNT) = area_dnode(managed_static_area->active,managed_static_area->low);
    20962113  }
     2114#ifdef X86
     2115  lisp_global(MANAGED_STATIC_REFBITS) = (LispObj)managed_static_refbits;
     2116  lisp_global(MANAGED_STATIC_DNODES) = (LispObj)managed_static_area->ndnodes;
     2117#endif
    20972118  atexit(lazarus);
    20982119#ifdef ARM
  • trunk/source/lisp-kernel/x86-constants.s

    r14652 r15366  
    7272         _node(initial_tcr)             /* initial thread tcr */
    7373         _node(image_name)              /* --image-name argument */
    74          _node(BADfpscr_save_high)      /* high word of FP reg used to save FPSCR */
     74         _node(weak_gc_method)          /* for weak vector marking */
    7575         _node(unwind_resume)           /* _Unwind_Resume */
    7676         _node(batch_flag)              /* -b */
     
    111111         _node(ret1val_addr)            /* address of "dynamic" subprims magic values return addr   */
    112112         _node(subprims_base)           /* address of dynamic subprims jump table   */
     113        __ifdef(`X86')
     114         _node(managed_static_dnodes)   /* ndnodes of managed_static_area */
     115         _node(managed_static_refbits)  /* refs from managed_static to dynamic */
     116        __else               
    113117         _node(saveR13)                 /* probably don't really need this   */
    114118         _node(saveTOC)                 /* where the 68K emulator stores the  emulated regs   */
     119        __endif       
    115120         _node(objc_2_personality)              /* exception "personality routine" address for ObjC 2.0 */
    116121         _node(kernel_imports)          /* some things we need imported for us   */
  • trunk/source/lisp-kernel/x86-exceptions.c

    r15194 r15366  
    27252725    }
    27262726    if (need_check_memo) {
    2727       natural  bitnumber = area_dnode(ea, lisp_global(REF_BASE));
    2728       if ((bitnumber < lisp_global(OLDSPACE_DNODE_COUNT)) &&
    2729           ((LispObj)ea < val)) {
    2730         atomic_set_bit(refbits, bitnumber);
    2731         if (need_memoize_root) {
    2732           bitnumber = area_dnode(root, lisp_global(REF_BASE));
     2727      if ((LispObj)ea < val) {
     2728        natural  bitnumber = area_dnode(ea, lisp_global(REF_BASE)),
     2729          rootbitnumber = area_dnode(root, lisp_global(REF_BASE));
     2730        if ((bitnumber < lisp_global(OLDSPACE_DNODE_COUNT))) {
    27332731          atomic_set_bit(refbits, bitnumber);
     2732          if (need_memoize_root) {
     2733            atomic_set_bit(refbits, rootbitnumber);
     2734          }
     2735        }
     2736        if (bitnumber < lisp_global(MANAGED_STATIC_DNODES)) {
     2737          atomic_set_bit(managed_static_refbits,bitnumber);
     2738          if (need_memoize_root) {
     2739            atomic_set_bit(managed_static_refbits, rootbitnumber);
     2740          }
    27342741        }
    27352742      }
  • trunk/source/lisp-kernel/x86-gc.c

    r14992 r15366  
    12451245  int mark_method = 3;
    12461246
    1247   if (GCDebug) {
    1248     check_refmap_consistency(p, p+(num_memo_dnodes << 1), refbits);
    1249   }
    1250 
    1251   /* The distinction between "inbits" and "outbits" is supposed to help us
    1252      detect cases where "uninteresting" setfs have been memoized.  Storing
    1253      NIL, fixnums, immediates (characters, etc.) or node pointers to static
    1254      or readonly areas is definitely uninteresting, but other cases are
    1255      more complicated (and some of these cases are hard to detect.)
    1256 
    1257      Some headers are "interesting", to the forwarder if not to us.
    1258 
    1259   */
    1260 
    1261   /*
    1262     We need to ensure that there are no bits set at or beyond
    1263     "num_memo_dnodes" in the bitvector.  (This can happen as the EGC
    1264     tenures/untenures things.)  We find bits by grabbing a fullword at
    1265     a time and doing a cntlzw instruction; and don't want to have to
    1266     check for (< memo_dnode num_memo_dnodes) in the loop.
    1267   */
    1268 
    1269   {
    1270     natural
    1271       bits_in_last_word = (num_memo_dnodes & bitmap_shift_count_mask),
    1272       index_of_last_word = (num_memo_dnodes >> bitmap_shift);
    1273 
    1274     if (bits_in_last_word != 0) {
    1275       natural mask = ~((NATURAL1<<(nbits_in_word-bits_in_last_word))- NATURAL1);
    1276       refbits[index_of_last_word] &= mask;
    1277     }
    1278   }
     1247  if (num_memo_dnodes) {
     1248    if (GCDebug) {
     1249      check_refmap_consistency(p, p+(num_memo_dnodes << 1), refbits);
     1250    }
     1251
     1252    /* The distinction between "inbits" and "outbits" is supposed to help us
     1253       detect cases where "uninteresting" setfs have been memoized.  Storing
     1254       NIL, fixnums, immediates (characters, etc.) or node pointers to static
     1255       or readonly areas is definitely uninteresting, but other cases are
     1256       more complicated (and some of these cases are hard to detect.)
     1257
     1258       Some headers are "interesting", to the forwarder if not to us.
     1259
     1260    */
     1261
     1262    /*
     1263      We need to ensure that there are no bits set at or beyond
     1264      "num_memo_dnodes" in the bitvector.  (This can happen as the EGC
     1265      tenures/untenures things.)  We find bits by grabbing a fullword at
     1266      a time and doing a cntlzw instruction; and don't want to have to
     1267      check for (< memo_dnode num_memo_dnodes) in the loop.
     1268    */
     1269
     1270    {
     1271      natural
     1272        bits_in_last_word = (num_memo_dnodes & bitmap_shift_count_mask),
     1273        index_of_last_word = (num_memo_dnodes >> bitmap_shift);
     1274
     1275      if (bits_in_last_word != 0) {
     1276        natural mask = ~((NATURAL1<<(nbits_in_word-bits_in_last_word))- NATURAL1);
     1277        refbits[index_of_last_word] &= mask;
     1278      }
     1279    }
    12791280       
    1280   set_bitidx_vars(refbits, 0, bitsp, bits, bitidx);
    1281   inbits = outbits = bits;
    1282   while (memo_dnode < num_memo_dnodes) {
    1283     if (bits == 0) {
    1284       int remain = nbits_in_word - bitidx;
    1285       memo_dnode += remain;
    1286       p += (remain+remain);
    1287       if (outbits != inbits) {
    1288         *bitsp = outbits;
    1289       }
    1290       bits = *++bitsp;
    1291       inbits = outbits = bits;
    1292       bitidx = 0;
    1293     } else {
    1294       nextbit = count_leading_zeros(bits);
    1295       if ((diff = (nextbit - bitidx)) != 0) {
    1296         memo_dnode += diff;
    1297         bitidx = nextbit;
    1298         p += (diff+diff);
    1299       }
    1300       x1 = *p++;
    1301       x2 = *p++;
    1302       bits &= ~(BIT0_MASK >> bitidx);
    1303 
    1304 
    1305       if (hashp) {
    1306         Boolean force_x1 = false;
    1307         if ((memo_dnode >= hash_dnode_limit) && (mark_method == 3)) {
    1308           /* if vector_header_count is odd, x1 might be the last word of the header */
    1309           force_x1 = (hash_table_vector_header_count & 1) && (memo_dnode == hash_dnode_limit);
    1310           /* was marking header, switch to data */
    1311           hash_dnode_limit = area_dnode(((LispObj *)hashp)
    1312                                         + 1
    1313                                         + header_element_count(hashp->header),
    1314                                         a->low);
    1315           /* In traditional weak method, don't mark vector entries at all. */
    1316           /* Otherwise mark the non-weak elements only */
    1317           mark_method = ((lisp_global(WEAK_GC_METHOD) == 0) ? 0 :
    1318                          ((hashp->flags & nhash_weak_value_mask)
    1319                           ? (1 + (hash_table_vector_header_count & 1))
    1320                           : (2 - (hash_table_vector_header_count & 1))));
     1281    set_bitidx_vars(refbits, 0, bitsp, bits, bitidx);
     1282    inbits = outbits = bits;
     1283    while (memo_dnode < num_memo_dnodes) {
     1284      if (bits == 0) {
     1285        int remain = nbits_in_word - bitidx;
     1286        memo_dnode += remain;
     1287        p += (remain+remain);
     1288        if (outbits != inbits) {
     1289          *bitsp = outbits;
    13211290        }
    1322 
    1323         if (memo_dnode < hash_dnode_limit) {
    1324           /* perhaps ignore one or both of the elements */
    1325           if (!force_x1 && !(mark_method & 1)) x1 = 0;
    1326           if (!(mark_method & 2)) x2 = 0;
    1327         } else {
    1328           hashp = NULL;
     1291        bits = *++bitsp;
     1292        inbits = outbits = bits;
     1293        bitidx = 0;
     1294      } else {
     1295        nextbit = count_leading_zeros(bits);
     1296        if ((diff = (nextbit - bitidx)) != 0) {
     1297          memo_dnode += diff;
     1298          bitidx = nextbit;
     1299          p += (diff+diff);
    13291300        }
    1330       }
    1331 
    1332       if (header_subtag(x1) == subtag_hash_vector) {
    1333         if (hashp) Bug(NULL, "header inside hash vector?");
    1334         hash_table_vector_header *hp = (hash_table_vector_header *)(p - 2);
    1335         if (hp->flags & nhash_weak_mask) {
    1336           /* Work around the issue that seems to cause ticket:817,
    1337              which is that tenured hash vectors that are weak on value
    1338              aren't always maintained on GCweakvll.  If they aren't and
    1339              we process them weakly here, nothing will delete the unreferenced
    1340              elements. */
    1341           if (!(hp->flags & nhash_weak_value_mask)) {
    1342             /* If header_count is odd, this cuts off the last header field */
    1343             /* That case is handled specially above */
    1344             hash_dnode_limit = memo_dnode + ((hash_table_vector_header_count) >>1);
    1345             hashp = hp;
    1346             mark_method = 3;
    1347 
    1348 
    1349 
    1350 
    1351 
     1301        x1 = *p++;
     1302        x2 = *p++;
     1303        bits &= ~(BIT0_MASK >> bitidx);
     1304
     1305
     1306        if (hashp) {
     1307          Boolean force_x1 = false;
     1308          if ((memo_dnode >= hash_dnode_limit) && (mark_method == 3)) {
     1309            /* if vector_header_count is odd, x1 might be the last word of the header */
     1310            force_x1 = (hash_table_vector_header_count & 1) && (memo_dnode == hash_dnode_limit);
     1311            /* was marking header, switch to data */
     1312            hash_dnode_limit = area_dnode(((LispObj *)hashp)
     1313                                          + 1
     1314                                          + header_element_count(hashp->header),
     1315                                          a->low);
     1316            /* In traditional weak method, don't mark vector entries at all. */
     1317            /* Otherwise mark the non-weak elements only */
     1318            mark_method = ((lisp_global(WEAK_GC_METHOD) == 0) ? 0 :
     1319                           ((hashp->flags & nhash_weak_value_mask)
     1320                            ? (1 + (hash_table_vector_header_count & 1))
     1321                            : (2 - (hash_table_vector_header_count & 1))));
     1322          }
     1323
     1324          if (memo_dnode < hash_dnode_limit) {
     1325            /* perhaps ignore one or both of the elements */
     1326            if (!force_x1 && !(mark_method & 1)) x1 = 0;
     1327            if (!(mark_method & 2)) x2 = 0;
     1328          } else {
     1329            hashp = NULL;
    13521330          }
    13531331        }
    1354       }
    1355 
    1356       keep_x1 = mark_ephemeral_root(x1);
    1357       keep_x2 = mark_ephemeral_root(x2);
    1358       if ((keep_x1 == false) &&
    1359           (keep_x2 == false) &&
    1360           (hashp == NULL)) {
    1361         outbits &= ~(BIT0_MASK >> bitidx);
    1362       }
    1363       memo_dnode++;
    1364       bitidx++;
    1365     }
    1366   }
    1367   if (GCDebug) {
    1368     p = (LispObj *) a->low;
    1369     check_refmap_consistency(p, p+(num_memo_dnodes << 1), refbits);
     1332
     1333        if (header_subtag(x1) == subtag_hash_vector) {
     1334          if (hashp) Bug(NULL, "header inside hash vector?");
     1335          hash_table_vector_header *hp = (hash_table_vector_header *)(p - 2);
     1336          if (hp->flags & nhash_weak_mask) {
     1337            /* Work around the issue that seems to cause ticket:817,
     1338               which is that tenured hash vectors that are weak on value
     1339               aren't always maintained on GCweakvll.  If they aren't and
     1340               we process them weakly here, nothing will delete the unreferenced
     1341               elements. */
     1342            if (!(hp->flags & nhash_weak_value_mask)) {
     1343              /* If header_count is odd, this cuts off the last header field */
     1344              /* That case is handled specially above */
     1345              hash_dnode_limit = memo_dnode + ((hash_table_vector_header_count) >>1);
     1346              hashp = hp;
     1347              mark_method = 3;
     1348
     1349
     1350
     1351
     1352
     1353            }
     1354          }
     1355        }
     1356
     1357        keep_x1 = mark_ephemeral_root(x1);
     1358        keep_x2 = mark_ephemeral_root(x2);
     1359        if ((keep_x1 == false) &&
     1360            (keep_x2 == false) &&
     1361            (hashp == NULL)) {
     1362          outbits &= ~(BIT0_MASK >> bitidx);
     1363        }
     1364        memo_dnode++;
     1365        bitidx++;
     1366      }
     1367    }
     1368    if (GCDebug) {
     1369      p = (LispObj *) a->low;
     1370      check_refmap_consistency(p, p+(num_memo_dnodes << 1), refbits);
     1371    }
    13701372  }
    13711373}
     
    15461548  if (node_regs_mask & (1<<1)) mark_root(regs[REG_ECX]);
    15471549  if (regs[REG_EFL] & EFL_DF) {
     1550    Bug(NULL, "Direction Flag set!");
    15481551    /* DF set means EDX should be treated as an imm reg */
    15491552    ;
     
    27582761    *base = start, *prev = start;
    27592762  int tag;
    2760   bitvector refbits = a->refbits;
     2763  bitvector refbits = managed_static_refbits;
    27612764  natural ref_dnode, node_dnode;
    27622765  Boolean intergen_ref;
     
    29052908       
    29062909        managed_static_area->ndnodes = managed_dnodes;
     2910        lisp_global(MANAGED_STATIC_DNODES) = managed_dnodes;
    29072911        CommitMemory(managed_static_area->refbits, refbytes); /* zeros them */
     2912        CommitMemory(managed_static_refbits,refbytes); /* zeroes them, too */
    29082913        update_managed_refs(managed_static_area, low_markable_address, area_dnode(a->active,low_markable_address));
    29092914      }
     
    32523257  impurify_from_area(tcr, readonly_area);
    32533258  impurify_from_area(tcr, managed_static_area);
     3259  lisp_global(MANAGED_STATIC_DNODES)=0;
    32543260  lisp_global(IN_GC)=0;
    32553261  return 0;
  • trunk/source/lisp-kernel/x86-spentry32.s

    r15295 r15366  
    17151715        __(shrl $dnode_shift,%imm0)
    17161716        __(cmpl lisp_global(oldspace_dnode_count),%imm0)
    1717         __(jae 0b)
     1717        __(jae 2f)
    17181718        __(ref_global(refbits,%temp0))
    17191719        __(xorb $31,%imm0_b)
    17201720        __(lock)
    17211721        __(btsl %imm0,(%temp0))
     1722        __(xorb $31,%imm0_b)
     17232:      __(cmpl lisp_global(managed_static_dnodes),%imm0)
     1724        __(jae 0b)
     1725        __(ref_global(managed_static_refbits,%temp0))
     1726        __(xorb $31,%imm0_b)
     1727        __(lock)
     1728        __(btsl %imm0,(%temp0))       
    17221729        __(ret)
    17231730_endsubp(rplaca)
     
    17341741        __(shrl $dnode_shift,%imm0)
    17351742        __(cmpl lisp_global(oldspace_dnode_count),%imm0)
    1736         __(jae 0b)
     1743        __(jae 2f)
    17371744        __(ref_global(refbits,%temp0))
    17381745        __(xorb $31,%imm0_b)
    17391746        __(lock)
    17401747        __(btsl %imm0,(%temp0))
     1748        __(xorb $31,%imm0_b)       
     17492:      __(cmpl lisp_global(managed_static_dnodes),%imm0)
     1750        __(jae 0b)
     1751        __(ref_global(managed_static_refbits,%temp0))
     1752        __(xorb $31,%imm0_b)
     1753        __(lock)
     1754        __(btsl %imm0,(%temp0))       
    17411755        __(ret)
    17421756_endsubp(rplacd)
     
    17551769        __(shrl $dnode_shift,%imm0)
    17561770        __(cmpl lisp_global(oldspace_dnode_count),%imm0)
    1757         __(jae 0b)
     1771        __(jae 2f)
    17581772        __(ref_global(refbits,%temp1))
    17591773        __(xorb $31,%imm0_b)
    17601774        __(lock)
    17611775        __(btsl %imm0,(%temp1))
     1776        __(xorb $31,%imm0_b)
     17772:      __(cmpl lisp_global(managed_static_dnodes),%imm0)
     1778        __(jae 0b)
     1779        __(ref_global(managed_static_refbits,%temp1))
     1780        __(xorb $31,%imm0_b)
     1781        __(lock)
     1782        __(btsl %imm0,(%temp1))
    17621783        __(ret)
    17631784_endsubp(gvset)
     
    17781799        __(shrl $dnode_shift,%imm0)
    17791800        __(cmpl lisp_global(oldspace_dnode_count),%imm0)
    1780         __(jae 0b)
     1801        __(jae 2f)
    17811802        __(ref_global(refbits,%temp1))
    17821803        __(xorb $31,%imm0_b)
     
    17901811        __(lock)
    17911812        __(btsl %imm0,(%temp1))
     1813        __(lea misc_data_offset(%temp0,%arg_y),%imm0)
     1814        __(subl lisp_global(ref_base),%imm0)
     1815        __(shrl $dnode_shift,%imm0)
     18162:      __(cmpl lisp_global(managed_static_dnodes),%imm0)
     1817        __(jae 0b)
     1818        __(ref_global(managed_static_refbits,%temp1))
     1819        __(xorb $31,%imm0_b)
     1820        __(lock)
     1821        __(btsl %imm0,(%temp1))
     1822        /* Now memoize the address of the hash vector */
     1823        __(movl %temp0,%imm0)
     1824        __(subl lisp_global(ref_base),%imm0)
     1825        __(shrl $dnode_shift,%imm0)
     1826        __(xorb $31,%imm0_b)
     1827        __(lock)
     1828        __(btsl %imm0,(%temp1))               
    17921829        __(ret)
    17931830_endsubp(set_hash_key)
     
    181018470:      __(cmpl %arg_y,misc_data_offset(%temp1,%temp0))
    18111848        __(movl misc_data_offset(%temp1,%temp0),%imm0)
    1812         __(jne 3f)
     1849        __(jne 9f)
    18131850        __(lock)
    18141851        __(cmpxchgl %arg_z,misc_data_offset(%temp1,%temp0))
     
    18251862        __(lock)
    18261863        __(btsl %imm0,(%temp1))
     1864        __(xorb $31,%imm0_b)
     18652:      __(cmpl lisp_global(managed_static_dnodes),%imm0)
     1866        __(jae 8f)
     1867        __(ref_global(managed_static_refbits,%temp1))
     1868        __(xorb $31,%imm0_b)
     1869        __(lock)
     1870        __(btsl %imm0,(%temp1))
    18271871        .globl C(egc_store_node_conditional_success_end)
    18281872C(egc_store_node_conditional_success_end):
    1829 2:      __(movl $t_value,%arg_z)
    1830         __(ret)
    1831 3:      __(movl $nil_value,%arg_z)
     18738:      __(movl $t_value,%arg_z)
     1874        __(ret)
     18759:      __(movl $nil_value,%arg_z)
    18321876        __(ret)
    18331877_endsubp(store_node_conditional)
     
    184318870:      __(cmpl %arg_y,misc_data_offset(%temp1,%temp0))
    18441888        __(movl misc_data_offset(%temp1,%temp0),%imm0)
    1845         __(jne 3f)
     1889        __(jne 9f)
    18461890        __(lock)
    18471891        __(cmpxchgl %arg_z,misc_data_offset(%temp1,%temp0))
     
    18541898        __(cmpl lisp_global(oldspace_dnode_count),%imm0)
    18551899        __(jae 2f)
    1856         __(ref_global(refbits,%temp0))
     1900        __(ref_global(refbits,%arg_y))
    18571901        __(xorb $31,%imm0_b)
    18581902        __(lock)
    1859         __(btsl %imm0,(%temp0))
     1903        __(btsl %imm0,(%arg_y))
    18601904        /* Now memoize the address of the hash vector */
    18611905        __(movl %temp1,%imm0)
     
    18641908        __(xorb $31,%imm0_b)
    18651909        __(lock)
    1866         __(btsl %imm0,(%temp0))
     1910        __(btsl %imm0,(%arg_y))
     1911        __(leal misc_data_offset(%temp1,%temp0),%imm0)
     1912        __(subl lisp_global(ref_base),%imm0)
     1913        __(shrl $dnode_shift,%imm0)
     19142:      __(cmpl lisp_global(managed_static_dnodes),%imm0)
     1915        __(jae 8f)
     1916        __(ref_global(managed_static_refbits,%arg_y))
     1917        __(xorb $31,%imm0_b)
     1918        __(lock)
     1919        __(btsl %imm0,(%arg_y))
     1920        /* Now memoize the address of the hash vector */
     1921        __(movl %temp1,%imm0)
     1922        __(subl lisp_global(ref_base),%imm0)
     1923        __(shrl $dnode_shift,%imm0)
     1924        __(xorb $31,%imm0_b)
     1925        __(lock)
     1926        __(btsl %imm0,(%arg_y))
    18671927        .globl C(egc_write_barrier_end)
    18681928C(egc_write_barrier_end):
    1869 2:      __(movl $t_value,%arg_z)
    1870         __(ret)
    1871 3:      __(movl $nil_value,%arg_z)
     19298:      __(movl $t_value,%arg_z)
     1930        __(ret)
     19319:      __(movl $nil_value,%arg_z)
    18721932        __(ret)
    18731933_endsubp(store_node_conditional)
  • trunk/source/lisp-kernel/x86-spentry64.s

    r15295 r15366  
    17701770        __(subq lisp_global(ref_base),%imm0)
    17711771        __(shrq $dnode_shift,%imm0)
     1772        __(movq %imm0,%imm1)
    17721773        __(cmpq lisp_global(oldspace_dnode_count),%imm0)
    1773         __(jae 0b)
     1774        __(jae 2f)
    17741775        __(ref_global(refbits,%temp0))
    17751776        __(xorb $63,%imm0_b)
    17761777        __(lock)
    17771778        __(btsq %imm0,(%temp0))
     17792:      __(cmpq lisp_global(managed_static_dnodes),%imm1)
     1780        __(jae 0b)
     1781        __(ref_global(managed_static_refbits,%temp0))
     1782        __(xorb $63,%imm1_b)
     1783        __(lock)
     1784        __(btsq %imm1,(%temp0))
    17781785        __(ret)
    17791786_endsubp(rplaca)
     
    17891796        __(subq lisp_global(ref_base),%imm0)
    17901797        __(shrq $dnode_shift,%imm0)
     1798        __(movq %imm0,%imm1)
    17911799        __(cmpq lisp_global(oldspace_dnode_count),%imm0)
    1792         __(jae 0b)
     1800        __(jae 2f)
    17931801        __(ref_global(refbits,%temp0))
    17941802        __(xorb $63,%imm0_b)
     
    17961804        __(btsq %imm0,(%temp0))
    17971805        __(ret)
     18062:      __(cmpq lisp_global(managed_static_dnodes),%imm1)
     1807        __(jae 0b)
     1808        __(ref_global(managed_static_refbits,%temp0))
     1809        __(xorb $63,%imm1_b)
     1810        __(lock)
     1811        __(btsq %imm1,(%temp0))
     1812        __(ret)       
    17981813_endsubp(rplacd)
    17991814
     
    18111826        __(subq lisp_global(ref_base),%imm0)
    18121827        __(shrq $dnode_shift,%imm0)
     1828        __(movq %imm0,%imm1)
    18131829        __(cmpq lisp_global(oldspace_dnode_count),%imm0)
    1814         __(jae 0b)
     1830        __(jae 2f)
    18151831        __(ref_global(refbits,%temp0))
    18161832        __(xorb $63,%imm0_b)
    1817         __(lock) 
     1833        __(lock)
    18181834        __(btsq %imm0,(%temp0))
     18352:      __(cmpq lisp_global(managed_static_dnodes),%imm1)
     1836        __(jae 0b)
     1837        __(ref_global(managed_static_refbits,%temp0))
     1838        __(xorb $63,%imm1_b)
     1839        __(lock)
     1840        __(btsq %imm1,(%temp0))       
    18191841        __(ret)               
    18201842_endsubp(gvset)
     
    18351857        __(subq lisp_global(ref_base),%imm0)
    18361858        __(shrq $dnode_shift,%imm0)
     1859        __(movq %imm0,%imm1)
    18371860        __(cmpq lisp_global(oldspace_dnode_count),%imm0)
    1838         __(jae 0b)
     1861        __(jae 2f)
    18391862        __(ref_global(refbits,%temp0))
    18401863        __(xorb $63,%imm0_b)
     
    18481871        __(lock)
    18491872        __(btsq %imm0,(%temp0))
    1850         __(ret)               
     18732:      __(cmpq lisp_global(managed_static_dnodes),%imm1)
     1874        __(jae 0b)
     1875        __(ref_global(managed_static_refbits,%temp0))
     1876        __(xorb $63,%imm1_b)
     1877        __(lock)
     1878        __(btsq %imm1,(%temp0))
     1879        /* Now memoize the address of the hash vector   */
     1880        __(movq %arg_x,%imm0)
     1881        __(subq lisp_global(ref_base),%imm0)
     1882        __(shrq $dnode_shift,%imm0)
     1883        __(xorb $63,%imm0_b)
     1884        __(lock)
     1885        __(btsq %imm0,(%temp0))
     1886        __(ret)
    18511887_endsubp(set_hash_key)
    18521888
     
    18661902        __(cmpq %arg_y,%temp1)
    18671903        __(movq %temp1,%imm0)
    1868         __(jne 3f)
     1904        __(jne 9f)
    18691905        __(lock)
    18701906        __(cmpxchgq %arg_z,(%arg_x,%imm1))
     
    18751911        __(subq lisp_global(ref_base),%imm0)
    18761912        __(shrq $dnode_shift,%imm0)
     1913        __(movq %imm0,%imm1)
    18771914        __(cmpq lisp_global(oldspace_dnode_count),%imm0)
    18781915        __(ref_global(refbits,%temp1))
     
    18811918        __(lock)
    18821919        __(btsq %imm0,(%temp1))
     19202:      __(cmpq lisp_global(managed_static_dnodes),%imm1)
     1921        __(jae 8f)
     1922        __(ref_global(managed_static_refbits,%temp1))
     1923        __(xorb $63,%imm1_b)
     1924        __(lock)
     1925        __(btsq %imm1,(%temp1))       
    18831926        .globl C(egc_store_node_conditional_success_end)
    18841927C(egc_store_node_conditional_success_end):
    1885 2:      __(movl $t_value,%arg_z_l)
    1886         __(ret)
    1887 3:      __(movl $nil_value,%arg_z_l)
     19288:      __(movl $t_value,%arg_z_l)
     1929        __(ret)
     19309:      __(movl $nil_value,%arg_z_l)
    18881931        __(ret)
    18891932_endsubp(store_node_conditional)
     
    18981941        __(cmpq %arg_y,%temp1)
    18991942        __(movq %temp1,%imm0)
    1900         __(jne 3f)
     1943        __(jne 9f)
    19011944        __(lock)
    19021945        __(cmpxchgq %arg_z,(%arg_x,%imm1))
     
    19071950        __(subq lisp_global(ref_base),%imm0)
    19081951        __(shrq $dnode_shift,%imm0)
     1952        __(movq %imm0,%imm1)
    19091953        __(cmpq lisp_global(oldspace_dnode_count),%imm0)
    19101954        __(ref_global(refbits,%temp1))
     
    19201964        __(lock)
    19211965        __(btsq %imm0,(%temp1))
     19662:      __(cmpq lisp_global(managed_static_dnodes),%imm1)
     1967        __(jae 8f)
     1968        __(ref_global(managed_static_refbits,%temp1))
     1969        __(xorb $63,%imm1_b)
     1970        __(lock)
     1971        __(btsq %imm1,(%temp1))
     1972        /* Now memoize the address of the hash vector   */
     1973        __(movq %arg_x,%imm0)
     1974        __(subq lisp_global(ref_base),%imm0)
     1975        __(shrq $dnode_shift,%imm0)
     1976        __(xorb $63,%imm0_b)
     1977        __(lock)
     1978        __(btsq %imm0,(%temp1))
    19221979        .globl C(egc_write_barrier_end)
    19231980C(egc_write_barrier_end):
    1924 2:      __(movl $t_value,%arg_z_l)
    1925         __(ret)
    1926 3:      __(movl $nil_value,%arg_z_l)
     19818:      __(movl $t_value,%arg_z_l)
     1982        __(ret)
     19839:      __(movl $nil_value,%arg_z_l)
    19271984        __(ret)
    19281985_endsubp(set_hash_key_conditional)
Note: See TracChangeset for help on using the changeset viewer.