source: trunk/source/lisp-kernel/arm-exceptions.c @ 16066

Last change on this file since 16066 was 16066, checked in by gb, 5 years ago

pending GC notification mechanism for ARM (has been on x86 for a few
years.)

(setq ccl:*pending-gc-notification-hook* some-0-arg-function)

(set-gc-notification-threshold N)

arranges to call the value the function when free space before the
next full GC drops below N bytes.

File size: 52.9 KB
Line 
1/*
2   Copyright (C) 2010 Clozure Associates
3   This file is part of Clozure CL. 
4
5   Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
6   License , known as the LLGPL and distributed with Clozure CL as the
7   file "LICENSE".  The LLGPL consists of a preamble and the LGPL,
8   which is distributed with Clozure CL as the file "LGPL".  Where these
9   conflict, the preamble takes precedence. 
10
11   Clozure CL is referenced in the preamble as the "LIBRARY."
12
13   The LLGPL is also available online at
14   http://opensource.franz.com/preamble.html
15*/
16
17#include "lisp.h"
18#include "lisp-exceptions.h"
19#include "lisp_globals.h"
20#include <ctype.h>
21#include <stdio.h>
22#include <stddef.h>
23#include <string.h>
24#include <stdarg.h>
25#include <errno.h>
26#include <stdio.h>
27#ifdef LINUX
28#include <strings.h>
29#include <sys/mman.h>
30#ifndef ANDROID
31#include <fpu_control.h>
32#include <linux/prctl.h>
33#endif
34#endif
35
36#ifdef DARWIN
37#include <sys/mman.h>
38#ifndef SA_NODEFER
39#define SA_NODEFER 0
40#endif
41#include <sysexits.h>
42
43
44/* a distinguished UUO at a distinguished address */
45extern void pseudo_sigreturn(ExceptionInformation *);
46#endif
47
48
49#include "threads.h"
50
51#ifdef ANDROID
52#define pthread_sigmask(how,in,out) rt_sigprocmask(how,in,out,8)
53#endif
54
55#ifdef LINUX
56
57void
58enable_fp_exceptions()
59{
60}
61
62void
63disable_fp_exceptions()
64{
65}
66#endif
67
68/*
69  Handle exceptions.
70
71*/
72
73extern LispObj lisp_nil;
74
75extern natural lisp_heap_gc_threshold;
76extern Boolean grow_dynamic_area(natural);
77
78Boolean allocation_enabled = true;
79
80Boolean
81did_gc_notification_since_last_full_gc = false;
82
83
84
85
86int
87page_size = 4096;
88
89int
90log2_page_size = 12;
91
92
93
94
95
96/*
97  If the PC is pointing to an allocation trap, the previous instruction
98  must have decremented allocptr.  Return the non-zero amount by which
99  allocptr was decremented.
100*/
101signed_natural
102allocptr_displacement(ExceptionInformation *xp)
103{
104  pc program_counter = xpPC(xp);
105  opcode instr = *program_counter, prev_instr;
106  int delta = -3;
107
108  if (IS_ALLOC_TRAP(instr)) {
109    /* The alloc trap must have been preceded by a cmp and a
110       load from tcr.allocbase. */
111    if (IS_BRANCH_AROUND_ALLOC_TRAP(program_counter[-1])) {
112      delta = -4;
113    }
114    prev_instr = program_counter[delta];
115
116    if (IS_SUB_RM_FROM_ALLOCPTR(prev_instr)) {
117      return -((signed_natural)xpGPR(xp,RM_field(prev_instr)));
118    }
119   
120    if (IS_SUB_LO_FROM_ALLOCPTR(prev_instr)) {
121      return -((signed_natural)(prev_instr & 0xff));
122    }
123
124    if (IS_SUB_FROM_ALLOCPTR(prev_instr)) {
125      natural disp = ror(prev_instr&0xff,(prev_instr&0xf00)>>7);
126
127      instr = program_counter[delta-1];
128      if (IS_SUB_LO_FROM_ALLOCPTR(instr)) {
129        return -((signed_natural)(disp | (instr & 0xff)));
130      }
131    }
132    Bug(xp, "Can't determine allocation displacement");
133  }
134  return 0;
135}
136
137
138/*
139  A cons cell's been successfully allocated, but the allocptr's
140  still tagged (as fulltag_cons, of course.)  Emulate any instructions
141  that might follow the allocation (stores to the car or cdr, an
142  assignment to the "result" gpr) that take place while the allocptr's
143  tag is non-zero, advancing over each such instruction.  When we're
144  done, the cons cell will be allocated and initialized, the result
145  register will point to it, the allocptr will be untagged, and
146  the PC will point past the instruction that clears the allocptr's
147  tag.
148*/
149void
150finish_allocating_cons(ExceptionInformation *xp)
151{
152  pc program_counter = xpPC(xp);
153  opcode instr;
154  LispObj cur_allocptr = xpGPR(xp, allocptr);
155  cons *c = (cons *)ptr_from_lispobj(untag(cur_allocptr));
156  int target_reg;
157
158  while (1) {
159    instr = *program_counter++;
160
161    if (IS_CLR_ALLOCPTR_TAG(instr)) {
162      xpGPR(xp, allocptr) = untag(cur_allocptr);
163      xpPC(xp) = program_counter;
164      return;
165    } else if (IS_SET_ALLOCPTR_CAR_RD(instr)) {
166      c->car = xpGPR(xp,RD_field(instr));
167    } else if (IS_SET_ALLOCPTR_CDR_RD(instr)) {
168      c->cdr = xpGPR(xp,RD_field(instr));
169    } else {
170      /* assert(IS_SET_ALLOCPTR_RESULT_RD(instr)) */
171      xpGPR(xp,RD_field(instr)) = cur_allocptr;
172    }
173  }
174}
175
176/*
177  We were interrupted in the process of allocating a uvector; we
178  survived the allocation trap, and allocptr is tagged as fulltag_misc.
179  Emulate any instructions which store a header into the uvector,
180  assign the value of allocptr to some other register, and clear
181  allocptr's tag.  Don't expect/allow any other instructions in
182  this environment.
183*/
184void
185finish_allocating_uvector(ExceptionInformation *xp)
186{
187  pc program_counter = xpPC(xp);
188  opcode instr;
189  LispObj cur_allocptr = xpGPR(xp, allocptr);
190  int target_reg;
191
192  while (1) {
193    instr = *program_counter++;
194    if (IS_CLR_ALLOCPTR_TAG(instr)) {
195      xpGPR(xp, allocptr) = untag(cur_allocptr);
196      xpPC(xp) = program_counter;
197      return;
198    }
199    if (IS_SET_ALLOCPTR_HEADER_RD(instr)) {
200      header_of(cur_allocptr) = xpGPR(xp,RD_field(instr));
201    } else if (IS_SET_ALLOCPTR_RESULT_RD(instr)) {
202      xpGPR(xp,RD_field(instr)) = cur_allocptr;
203    } else {
204      Bug(xp, "Unexpected instruction following alloc trap at " LISP ":",program_counter);
205    }
206  }
207}
208
209
210Boolean
211allocate_object(ExceptionInformation *xp,
212                natural bytes_needed, 
213                signed_natural disp_from_allocptr,
214                TCR *tcr,
215                Boolean *crossed_threshold)
216{
217  area *a = active_dynamic_area;
218
219  /* Maybe do an EGC */
220  if (a->older && lisp_global(OLDEST_EPHEMERAL)) {
221    if (((a->active)-(a->low)) >= a->threshold) {
222      gc_from_xp(xp, 0L);
223    }
224  }
225
226  /* Life is pretty simple if we can simply grab a segment
227     without extending the heap.
228  */
229  if (new_heap_segment(xp, bytes_needed, false, tcr, crossed_threshold)) {
230    xpGPR(xp, allocptr) += disp_from_allocptr;
231    return true;
232  }
233 
234  /* It doesn't make sense to try a full GC if the object
235     we're trying to allocate is larger than everything
236     allocated so far.
237  */
238  if ((lisp_global(HEAP_END)-lisp_global(HEAP_START)) > bytes_needed) {
239    untenure_from_area(tenured_area); /* force a full GC */
240    gc_from_xp(xp, 0L);
241    did_gc_notification_since_last_full_gc = false;
242  }
243 
244  /* Try again, growing the heap if necessary */
245  if (new_heap_segment(xp, bytes_needed, true, tcr, NULL)) {
246    xpGPR(xp, allocptr) += disp_from_allocptr;
247    return true;
248  }
249 
250  return false;
251}
252
253#ifndef XNOMEM
254#define XNOMEM 10
255#endif
256
257void
258update_bytes_allocated(TCR* tcr, void *cur_allocptr)
259{
260  BytePtr
261    last = (BytePtr) tcr->last_allocptr, 
262    current = (BytePtr) cur_allocptr;
263  if (last && (cur_allocptr != ((void *)VOID_ALLOCPTR))) {
264    tcr->bytes_allocated += last-current;
265  }
266  tcr->last_allocptr = 0;
267}
268
269void
270lisp_allocation_failure(ExceptionInformation *xp, TCR *tcr, natural bytes_needed)
271{
272  /* Couldn't allocate the object.  If it's smaller than some arbitrary
273     size (say 128K bytes), signal a "chronically out-of-memory" condition;
274     else signal a "allocation request failed" condition.
275  */
276  xpGPR(xp,allocptr) = xpGPR(xp,allocbase) = VOID_ALLOCPTR;
277  handle_error(xp, bytes_needed < (128<<10) ? XNOMEM : error_alloc_failed,0, NULL);
278}
279
280void
281callback_for_gc_notification(ExceptionInformation *xp, TCR *tcr)
282{
283  LispObj cmain = nrs_CMAIN.vcell;
284
285  did_gc_notification_since_last_full_gc = true;
286  if ((fulltag_of(cmain) == fulltag_misc) &&
287      (header_subtag(header_of(cmain)) == subtag_macptr)) {
288    callback_to_lisp(cmain,xp,SIGTRAP,0,NULL);
289  }
290}
291
292/*
293  Allocate a large list, where "large" means "large enough to
294  possibly trigger the EGC several times if this was done
295  by individually allocating each CONS."  The number of
296  ocnses in question is in arg_z; on successful return,
297  the list will be in arg_z
298*/
299
300Boolean
301allocate_list(ExceptionInformation *xp, TCR *tcr)
302{
303  natural
304    nconses = (unbox_fixnum(xpGPR(xp,arg_z))),
305    bytes_needed = (nconses << dnode_shift);
306  LispObj
307    prev = lisp_nil,
308    current,
309    initial = xpGPR(xp,arg_y);
310  Boolean notify_pending_gc = false;
311
312  if (nconses == 0) {
313    /* Silly case */
314    xpGPR(xp,arg_z) = lisp_nil;
315    return true;
316  }
317  update_bytes_allocated(tcr, (void *)(void *) tcr->save_allocptr);
318  if (allocate_object(xp,bytes_needed,(-bytes_needed)+fulltag_cons,tcr,&notify_pending_gc)) {
319    for (current = xpGPR(xp,allocptr);
320         nconses;
321         prev = current, current+= dnode_size, nconses--) {
322      deref(current,0) = prev;
323      deref(current,1) = initial;
324    }
325    xpGPR(xp,arg_z) = prev;
326    xpGPR(xp,arg_y) = xpGPR(xp,allocptr);
327    xpGPR(xp,allocptr)-=fulltag_cons;
328    if (notify_pending_gc && !did_gc_notification_since_last_full_gc) {
329      callback_for_gc_notification(xp,tcr);
330    }
331  } else {
332    lisp_allocation_failure(xp,tcr,bytes_needed);
333  }
334  return true;
335}
336
337Boolean
338handle_alloc_trap(ExceptionInformation *xp, TCR *tcr, Boolean *notify)
339{
340  pc program_counter;
341  natural cur_allocptr, bytes_needed = 0;
342  opcode prev_instr;
343  signed_natural disp = 0;
344  unsigned allocptr_tag;
345
346  if (!allocation_enabled) {
347    /* Back up before the alloc_trap, then let pc_luser_xp() back
348       up some more. */
349    xpPC(xp)-=1;
350    pc_luser_xp(xp,tcr, NULL);
351    allocation_enabled = true;
352    tcr->save_allocbase = (void *)VOID_ALLOCPTR;
353    handle_error(xp, error_allocation_disabled,0,NULL);
354    return true;
355  }
356
357  cur_allocptr = xpGPR(xp,allocptr);
358
359  allocptr_tag = fulltag_of(cur_allocptr);
360
361  switch (allocptr_tag) {
362  case fulltag_cons:
363    bytes_needed = sizeof(cons);
364    disp = -sizeof(cons) + fulltag_cons;
365    break;
366
367  case fulltag_misc:
368    disp = allocptr_displacement(xp);
369    bytes_needed = (-disp) + fulltag_misc;
370    break;
371
372    /* else fall thru */
373  default:
374    return false;
375  }
376
377  update_bytes_allocated(tcr,((BytePtr)(cur_allocptr-disp)));
378  if (allocate_object(xp, bytes_needed, disp, tcr, notify)) {
379    adjust_exception_pc(xp,4);
380    if (notify && *notify) {
381      pc_luser_xp(xp,tcr,NULL);
382      callback_for_gc_notification(xp,tcr);
383    }
384    return true;
385  }
386  lisp_allocation_failure(xp,tcr,bytes_needed);
387  return true;
388}
389
390natural gc_deferred = 0, full_gc_deferred = 0;
391
392signed_natural
393flash_freeze(TCR *tcr, signed_natural param)
394{
395  return 0;
396}
397
398Boolean
399handle_gc_trap(ExceptionInformation *xp, TCR *tcr)
400{
401  LispObj
402    selector = xpGPR(xp,imm0), 
403    arg = xpGPR(xp,imm1);
404  area *a = active_dynamic_area;
405  Boolean egc_was_enabled = (a->older != NULL);
406  natural gc_previously_deferred = gc_deferred;
407
408
409  switch (selector) {
410  case GC_TRAP_FUNCTION_EGC_CONTROL:
411    egc_control(arg != 0, a->active);
412    xpGPR(xp,arg_z) = lisp_nil + (egc_was_enabled ? t_offset : 0);
413    break;
414
415  case GC_TRAP_FUNCTION_CONFIGURE_EGC:
416    a->threshold = unbox_fixnum(xpGPR(xp, arg_x));
417    g1_area->threshold = unbox_fixnum(xpGPR(xp, arg_y));
418    g2_area->threshold = unbox_fixnum(xpGPR(xp, arg_z));
419    xpGPR(xp,arg_z) = lisp_nil+t_offset;
420    break;
421
422  case GC_TRAP_FUNCTION_SET_LISP_HEAP_THRESHOLD:
423    if (((signed_natural) arg) > 0) {
424      lisp_heap_gc_threshold = 
425        align_to_power_of_2((arg-1) +
426                            (heap_segment_size - 1),
427                            log2_heap_segment_size);
428    }
429    /* fall through */
430  case GC_TRAP_FUNCTION_GET_LISP_HEAP_THRESHOLD:
431    xpGPR(xp, imm0) = lisp_heap_gc_threshold;
432    break;
433
434  case GC_TRAP_FUNCTION_USE_LISP_HEAP_THRESHOLD:
435    /*  Try to put the current threshold in effect.  This may
436        need to disable/reenable the EGC. */
437    untenure_from_area(tenured_area);
438    resize_dynamic_heap(a->active,lisp_heap_gc_threshold);
439    if (egc_was_enabled) {
440      if ((a->high - a->active) >= a->threshold) {
441        tenure_to_area(tenured_area);
442      }
443    }
444    xpGPR(xp, imm0) = lisp_heap_gc_threshold;
445    break;
446
447  case GC_TRAP_FUNCTION_SET_GC_NOTIFICATION_THRESHOLD:
448    if ((signed_natural)arg >= 0) {
449      lisp_heap_notify_threshold = arg;
450      did_gc_notification_since_last_full_gc = false;
451    }
452    /* fall through */
453
454  case GC_TRAP_FUNCTION_GET_GC_NOTIFICATION_THRESHOLD:
455    xpGPR(xp, imm0) = lisp_heap_notify_threshold;
456    break;
457
458
459  case GC_TRAP_FUNCTION_ENSURE_STATIC_CONSES:
460    ensure_static_conses(xp,tcr,32768);
461    break;
462
463  case GC_TRAP_FUNCTION_FLASH_FREEZE:
464    untenure_from_area(tenured_area);
465    gc_like_from_xp(xp,flash_freeze,0);
466    a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size);
467    tenured_area->static_dnodes = area_dnode(a->active, a->low);
468    if (egc_was_enabled) {
469      tenure_to_area(tenured_area);
470    }
471    xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift;
472    break;
473
474  case GC_TRAP_FUNCTION_ALLOCATION_CONTROL:
475    switch(arg) {
476    case 0: /* disable if allocation enabled */
477      xpGPR(xp, arg_z) = lisp_nil;
478      if (allocation_enabled) {
479        TCR *other_tcr;
480        ExceptionInformation *other_context;
481        suspend_other_threads(true);
482        normalize_tcr(xp,tcr,false);
483        for (other_tcr=tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) {
484          other_context = other_tcr->pending_exception_context;
485          if (other_context == NULL) {
486            other_context = other_tcr->suspend_context;
487          }
488          normalize_tcr(other_context, other_tcr, true);
489        }
490        allocation_enabled = false;
491        xpGPR(xp, arg_z) = t_value;
492        resume_other_threads(true);
493      }
494      break;
495
496    case 1:                     /* enable if disabled */
497      xpGPR(xp, arg_z) = lisp_nil;
498      if (!allocation_enabled) {
499        allocation_enabled = true;
500        xpGPR(xp, arg_z) = t_value;
501      }
502      break;
503
504    default:
505      xpGPR(xp, arg_z) = lisp_nil;
506      if (allocation_enabled) {
507        xpGPR(xp, arg_z) = t_value;
508      }
509      break;
510    }
511    break;
512
513       
514  default:
515    update_bytes_allocated(tcr, (void *) ptr_from_lispobj(xpGPR(xp, allocptr)));
516
517    if (selector == GC_TRAP_FUNCTION_IMMEDIATE_GC) {
518      if (!full_gc_deferred) {
519        gc_from_xp(xp, 0L);
520        break;
521      }
522      /* Tried to do a full GC when gc was disabled.  That failed,
523         so try full GC now */
524      selector = GC_TRAP_FUNCTION_GC;
525    }
526   
527    if (egc_was_enabled) {
528      egc_control(false, (BytePtr) a->active);
529    }
530    gc_from_xp(xp, 0L);
531    if (gc_deferred > gc_previously_deferred) {
532      full_gc_deferred = 1;
533    } else {
534      full_gc_deferred = 0;
535    }
536    if (selector > GC_TRAP_FUNCTION_GC) {
537      if (selector & GC_TRAP_FUNCTION_IMPURIFY) {
538        impurify_from_xp(xp, 0L);
539        /*        nrs_GC_EVENT_STATUS_BITS.vcell |= gc_integrity_check_bit; */
540        lisp_global(OLDSPACE_DNODE_COUNT) = 0;
541        gc_from_xp(xp, 0L);
542      }
543      if (selector & GC_TRAP_FUNCTION_PURIFY) {
544        purify_from_xp(xp, 0L);
545        lisp_global(OLDSPACE_DNODE_COUNT) = 0;
546        gc_from_xp(xp, 0L);
547      }
548      if (selector & GC_TRAP_FUNCTION_SAVE_APPLICATION) {
549        OSErr err;
550        extern OSErr save_application(unsigned, Boolean);
551        TCR *tcr = get_tcr(true);
552        area *vsarea = tcr->vs_area;
553       
554        nrs_TOPLFUNC.vcell = *((LispObj *)(vsarea->high)-1);
555        err = save_application(arg, egc_was_enabled);
556        if (err == noErr) {
557          _exit(0);
558        }
559        fatal_oserr(": save_application", err);
560      }
561      switch (selector) {
562
563
564      case GC_TRAP_FUNCTION_FREEZE:
565        a->active = (BytePtr) align_to_power_of_2(a->active, log2_page_size);
566        tenured_area->static_dnodes = area_dnode(a->active, a->low);
567        xpGPR(xp, imm0) = tenured_area->static_dnodes << dnode_shift;
568        break;
569      default:
570        break;
571      }
572    }
573   
574    if (egc_was_enabled) {
575      egc_control(true, NULL);
576    }
577    break;
578   
579  }
580
581  adjust_exception_pc(xp,4);
582  return true;
583}
584
585
586
587void
588signal_stack_soft_overflow(ExceptionInformation *xp, unsigned reg)
589{
590  /* The cstack just overflowed.  Force the current thread's
591     control stack to do so until all stacks are well under their overflow
592     limits.
593  */
594
595#if 0
596  lisp_global(CS_OVERFLOW_LIMIT) = CS_OVERFLOW_FORCE_LIMIT; /* force unsigned traps to fail */
597#endif
598  handle_error(xp, error_stack_overflow, reg, NULL);
599}
600
601/*
602  Lower (move toward 0) the "end" of the soft protected area associated
603  with a by a page, if we can.
604*/
605
606void
607adjust_soft_protection_limit(area *a)
608{
609  char *proposed_new_soft_limit = a->softlimit - 4096;
610  protected_area_ptr p = a->softprot;
611 
612  if (proposed_new_soft_limit >= (p->start+16384)) {
613    p->end = proposed_new_soft_limit;
614    p->protsize = p->end-p->start;
615    a->softlimit = proposed_new_soft_limit;
616  }
617  protect_area(p);
618}
619
620void
621restore_soft_stack_limit(unsigned stkreg)
622{
623  area *a;
624  TCR *tcr = get_tcr(true);
625
626  switch (stkreg) {
627  case Rsp:
628    a = tcr->cs_area;
629    if ((a->softlimit - 4096) > (a->hardlimit + 16384)) {
630      a->softlimit -= 4096;
631    }
632    tcr->cs_limit = (LispObj)ptr_to_lispobj(a->softlimit);
633    break;
634  case vsp:
635    a = tcr->vs_area;
636    adjust_soft_protection_limit(a);
637    break;
638  }
639}
640
641/* Maybe this'll work someday.  We may have to do something to
642   make the thread look like it's not handling an exception */
643void
644reset_lisp_process(ExceptionInformation *xp)
645{
646#if 0
647  TCR *tcr = get_tcr(true);
648  catch_frame *last_catch = (catch_frame *) ptr_from_lispobj(untag(tcr->catch_top));
649
650  tcr->save_allocptr = (void *) ptr_from_lispobj(xpGPR(xp, allocptr));
651
652  tcr->save_vsp = (LispObj *) ptr_from_lispobj(((lisp_frame *)ptr_from_lispobj(last_catch->csp))->savevsp);
653
654  start_lisp(tcr, 1);
655#endif
656}
657
658
659
660void
661platform_new_heap_segment(ExceptionInformation *xp, TCR *tcr, BytePtr low, BytePtr high)
662{
663  tcr->last_allocptr = (void *)high;
664  xpGPR(xp,allocptr) = (LispObj) high;
665  tcr->save_allocbase = (void*) low;
666}
667 
668void
669update_area_active (area **aptr, BytePtr value)
670{
671  area *a = *aptr;
672  for (; a; a = a->older) {
673    if ((a->low <= value) && (a->high >= value)) break;
674  };
675  if (a == NULL) Bug(NULL, "Can't find active area");
676  a->active = value;
677  *aptr = a;
678
679  for (a = a->younger; a; a = a->younger) {
680    a->active = a->high;
681  }
682}
683
684LispObj *
685tcr_frame_ptr(TCR *tcr)
686{
687  ExceptionInformation *xp;
688  LispObj *bp = NULL;
689
690  if (tcr->pending_exception_context)
691    xp = tcr->pending_exception_context;
692  else {
693    xp = tcr->suspend_context;
694  }
695  if (xp) {
696    bp = (LispObj *) xpGPR(xp, Rsp);
697  }
698  return bp;
699}
700
701void
702normalize_tcr(ExceptionInformation *xp, TCR *tcr, Boolean is_other_tcr)
703
704{
705  void *cur_allocptr = NULL;
706  LispObj freeptr = 0;
707
708  if (xp) {
709    if (is_other_tcr) {
710      pc_luser_xp(xp, tcr, NULL);
711      freeptr = xpGPR(xp, allocptr);
712      if (fulltag_of(freeptr) == 0){
713        cur_allocptr = (void *) ptr_from_lispobj(freeptr);
714      }
715    }
716    update_area_active((area **)&tcr->cs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp)));
717    update_area_active((area **)&tcr->vs_area, (BytePtr) ptr_from_lispobj(xpGPR(xp, vsp)));
718  } else {
719    /* In ff-call. */
720    cur_allocptr = (void *) (tcr->save_allocptr);
721    update_area_active((area **)&tcr->vs_area, (BytePtr) tcr->save_vsp);
722    update_area_active((area **)&tcr->cs_area, (BytePtr) tcr->last_lisp_frame);
723  }
724
725
726  tcr->save_allocptr = tcr->save_allocbase = (void *)VOID_ALLOCPTR;
727  if (cur_allocptr) {
728    update_bytes_allocated(tcr, cur_allocptr);
729    if (freeptr) {
730      xpGPR(xp, allocptr) = VOID_ALLOCPTR;
731    }
732  }
733}
734
735TCR *gc_tcr = NULL;
736
737/* Suspend and "normalize" other tcrs, then call a gc-like function
738   in that context.  Resume the other tcrs, then return what the
739   function returned */
740
741signed_natural
742gc_like_from_xp(ExceptionInformation *xp, 
743                signed_natural(*fun)(TCR *, signed_natural), 
744                signed_natural param)
745{
746  TCR *tcr = get_tcr(true), *other_tcr;
747  int result;
748  signed_natural inhibit;
749
750  suspend_other_threads(true);
751  inhibit = (signed_natural)(lisp_global(GC_INHIBIT_COUNT));
752  if (inhibit != 0) {
753    if (inhibit > 0) {
754      lisp_global(GC_INHIBIT_COUNT) = (LispObj)(-inhibit);
755    }
756    resume_other_threads(true);
757    gc_deferred++;
758    return 0;
759  }
760  gc_deferred = 0;
761
762  gc_tcr = tcr;
763
764  xpGPR(xp, allocptr) = VOID_ALLOCPTR;
765
766  normalize_tcr(xp, tcr, false);
767
768
769  for (other_tcr = tcr->next; other_tcr != tcr; other_tcr = other_tcr->next) {
770    if (other_tcr->pending_exception_context) {
771      other_tcr->gc_context = other_tcr->pending_exception_context;
772    } else if (other_tcr->valence == TCR_STATE_LISP) {
773      other_tcr->gc_context = other_tcr->suspend_context;
774    } else {
775      /* no pending exception, didn't suspend in lisp state:
776         must have executed a synchronous ff-call.
777      */
778      other_tcr->gc_context = NULL;
779    }
780    normalize_tcr(other_tcr->gc_context, other_tcr, true);
781  }
782   
783
784
785  result = fun(tcr, param);
786
787  other_tcr = tcr;
788  do {
789    other_tcr->gc_context = NULL;
790    other_tcr = other_tcr->next;
791  } while (other_tcr != tcr);
792
793  gc_tcr = NULL;
794
795  resume_other_threads(true);
796
797  return result;
798
799}
800
801
802
803/* Returns #bytes freed by invoking GC */
804
805signed_natural
806gc_from_tcr(TCR *tcr, signed_natural param)
807{
808  area *a;
809  BytePtr oldfree, newfree;
810  BytePtr oldend, newend;
811
812  a = active_dynamic_area;
813  oldend = a->high;
814  oldfree = a->active;
815  gc(tcr, param);
816  newfree = a->active;
817  newend = a->high;
818#if 0
819  fprintf(dbgout, "End GC  in 0x%lx\n", tcr);
820#endif
821  return ((oldfree-newfree)+(newend-oldend));
822}
823
824signed_natural
825gc_from_xp(ExceptionInformation *xp, signed_natural param)
826{
827  signed_natural status = gc_like_from_xp(xp, gc_from_tcr, param);
828
829  freeGCptrs();
830  return status;
831}
832
833signed_natural
834purify_from_xp(ExceptionInformation *xp, signed_natural param)
835{
836  return gc_like_from_xp(xp, purify, param);
837}
838
839signed_natural
840impurify_from_xp(ExceptionInformation *xp, signed_natural param)
841{
842  return gc_like_from_xp(xp, impurify, param);
843}
844
845
846
847
848
849
850protection_handler
851 * protection_handlers[] = {
852   do_spurious_wp_fault,
853   do_soft_stack_overflow,
854   do_soft_stack_overflow,
855   do_soft_stack_overflow,
856   do_hard_stack_overflow,   
857   do_hard_stack_overflow,
858   do_hard_stack_overflow
859   };
860
861
862Boolean
863is_write_fault(ExceptionInformation *xp, siginfo_t *info)
864{
865  return ((xpFaultStatus(xp) & 0x800) != 0);
866}
867
868Boolean
869handle_protection_violation(ExceptionInformation *xp, siginfo_t *info, TCR *tcr, int old_valence)
870{
871  BytePtr addr;
872  protected_area_ptr area;
873  protection_handler *handler;
874  extern Boolean touch_page(void *);
875  extern void touch_page_end(void);
876
877#ifdef LINUX
878  addr = (BytePtr) ((natural) (xpFaultAddress(xp)));
879#else
880  if (info) {
881    addr = (BytePtr)(info->si_addr);
882  } else {
883    addr = (BytePtr) ((natural) (xpFaultAddress(xp)));
884  }
885#endif
886
887  if (addr && (addr == tcr->safe_ref_address)) {
888    adjust_exception_pc(xp,4);
889
890    xpGPR(xp,imm0) = 0;
891    return true;
892  }
893
894  if (xpPC(xp) == (pc)touch_page) {
895    xpGPR(xp,imm0) = 0;
896    xpPC(xp) = (pc)touch_page_end;
897    return true;
898  }
899
900
901  if (is_write_fault(xp,info)) {
902    area = find_protected_area(addr);
903    if (area != NULL) {
904      handler = protection_handlers[area->why];
905      return handler(xp, area, addr);
906    } else {
907      if ((addr >= readonly_area->low) &&
908          (addr < readonly_area->active)) {
909        UnProtectMemory((LogicalAddress)(truncate_to_power_of_2(addr,log2_page_size)),
910                        page_size);
911        return true;
912      }
913    }
914  }
915  if (old_valence == TCR_STATE_LISP) {
916    LispObj cmain = nrs_CMAIN.vcell;
917   
918    if ((fulltag_of(cmain) == fulltag_misc) &&
919      (header_subtag(header_of(cmain)) == subtag_macptr)) {
920     
921      callback_for_trap(nrs_CMAIN.vcell, xp, is_write_fault(xp,info)?SIGBUS:SIGSEGV, (natural)addr, NULL);
922    }
923  }
924  return false;
925}
926
927
928
929
930
931OSStatus
932do_hard_stack_overflow(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr)
933{
934#ifdef SUPPORT_PRAGMA_UNUSED
935#pragma unused(area,addr)
936#endif
937  reset_lisp_process(xp);
938  return -1;
939}
940
941extern area*
942allocate_vstack(natural useable);       /* This is in "pmcl-kernel.c" */
943
944extern area*
945allocate_tstack(natural useable);       /* This is in "pmcl-kernel.c" */
946
947
948
949
950
951
952Boolean
953lisp_frame_p(lisp_frame *spPtr)
954{
955  return (spPtr->marker == lisp_frame_marker);
956}
957
958
959int ffcall_overflow_count = 0;
960
961
962
963
964
965
966/* Note: CURRENT_VS (CURRENT_TS) is always either the area containing
967  the current value of VSP (TSP) or an older area.  */
968
969OSStatus
970do_vsp_overflow (ExceptionInformation *xp, BytePtr addr)
971{
972  TCR* tcr = get_tcr(true);
973  area *a = tcr->vs_area;
974  protected_area_ptr vsp_soft = a->softprot;
975  unprotect_area(vsp_soft);
976  signal_stack_soft_overflow(xp,vsp);
977  return 0;
978}
979
980
981
982OSStatus
983do_soft_stack_overflow(ExceptionInformation *xp, protected_area_ptr prot_area, BytePtr addr)
984{
985  /* Trying to write into a guard page on the vstack or tstack.
986     Allocate a new stack segment, emulate stwu and stwux for the TSP, and
987     signal an error_stack_overflow condition.
988      */
989  if (prot_area->why == kVSPsoftguard) {
990    return do_vsp_overflow(xp,addr);
991  }
992  unprotect_area(prot_area);
993  signal_stack_soft_overflow(xp,Rsp);
994  return 0;
995}
996
997OSStatus
998do_spurious_wp_fault(ExceptionInformation *xp, protected_area_ptr area, BytePtr addr)
999{
1000#ifdef SUPPORT_PRAGMA_UNUSED
1001#pragma unused(xp,area,addr)
1002#endif
1003  return -1;
1004}
1005
1006
1007
1008
1009     
1010
1011
1012
1013
1014
1015Boolean
1016handle_sigfpe(ExceptionInformation *xp, TCR *tcr)
1017{
1018  return false;
1019}
1020
1021
1022Boolean
1023handle_unimplemented_instruction(ExceptionInformation *xp,
1024                                 opcode instruction,
1025                                 TCR *tcr)
1026{
1027
1028  return false;
1029}
1030
1031Boolean
1032handle_exception(int xnum, 
1033                 ExceptionInformation *xp, 
1034                 TCR *tcr, 
1035                 siginfo_t *info,
1036                 int old_valence)
1037{
1038  pc program_counter;
1039  opcode instruction = 0;
1040
1041  if (old_valence != TCR_STATE_LISP) {
1042    return false;
1043  }
1044
1045  program_counter = xpPC(xp);
1046 
1047  if ((xnum == SIGILL) | (xnum == SIGTRAP)) {
1048    instruction = *program_counter;
1049  }
1050
1051  if (IS_ALLOC_TRAP(instruction)) {
1052    Boolean did_notify = false,
1053      *notify_ptr = &did_notify;
1054    if (did_gc_notification_since_last_full_gc) {
1055      notify_ptr = NULL;
1056    }
1057    return handle_alloc_trap(xp, tcr, notify_ptr);
1058  } else if ((xnum == SIGSEGV) ||
1059             (xnum == SIGBUS)) {
1060    return handle_protection_violation(xp, info, tcr, old_valence);
1061  } else if (xnum == SIGFPE) {
1062    return handle_sigfpe(xp, tcr);
1063  } else if ((xnum == SIGILL)) {
1064    if (IS_GC_TRAP(instruction)) {
1065      return handle_gc_trap(xp, tcr);
1066    } else if (IS_UUO(instruction)) {
1067      return handle_uuo(xp, info, instruction);
1068    } else {
1069      return handle_unimplemented_instruction(xp,instruction,tcr);
1070    }
1071  } else if (xnum == SIGNAL_FOR_PROCESS_INTERRUPT) {
1072    tcr->interrupt_pending = 0;
1073    callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL);
1074    return true;
1075  }
1076
1077  return false;
1078}
1079
1080void
1081adjust_exception_pc(ExceptionInformation *xp, int delta)
1082{
1083  xpPC(xp) += (delta >> 2);
1084}
1085
1086
1087/*
1088  This wants to scan backwards until "where" points to an instruction
1089   whose major opcode is either 63 (double-float) or 59 (single-float)
1090*/
1091
1092OSStatus
1093handle_fpux_binop(ExceptionInformation *xp, pc where)
1094{
1095  OSStatus err = -1;
1096  opcode *there = (opcode *) where, instr, errnum = 0;
1097  return err;
1098}
1099
1100Boolean
1101handle_uuo(ExceptionInformation *xp, siginfo_t *info, opcode the_uuo) 
1102{
1103  unsigned 
1104    format = UUO_FORMAT(the_uuo);
1105  Boolean handled = false;
1106  int bump = 4;
1107  TCR *tcr = get_tcr(true);
1108
1109  switch (format) {
1110  case uuo_format_kernel_service:
1111    {
1112      TCR * target = (TCR *)xpGPR(xp,arg_z);
1113      int service = UUO_UNARY_field(the_uuo);
1114
1115      switch (service) {
1116      case error_propagate_suspend:
1117        handled = true;
1118        break;
1119      case error_interrupt:
1120        xpGPR(xp,imm0) = (LispObj) raise_thread_interrupt(target);
1121        handled = true;
1122        break;
1123      case error_suspend:
1124        xpGPR(xp,imm0) = (LispObj) lisp_suspend_tcr(target);
1125        handled = true;
1126        break;
1127      case error_suspend_all:
1128        lisp_suspend_other_threads();
1129        handled = true;
1130        break;
1131      case error_resume:
1132        xpGPR(xp,imm0) = (LispObj) lisp_resume_tcr(target);
1133        handled = true;
1134        break;
1135      case error_resume_all:
1136        lisp_resume_other_threads();
1137        handled = true;
1138        break;
1139      case error_kill:
1140        xpGPR(xp,imm0) = (LispObj)kill_tcr(target);
1141        handled = true;
1142        break;
1143      case error_allocate_list:
1144        allocate_list(xp,tcr);
1145        handled = true;
1146        break;
1147      default:
1148        handled = false;
1149        break;
1150      }
1151      break;
1152    }
1153
1154  case uuo_format_unary:
1155    switch(UUO_UNARY_field(the_uuo)) {
1156    case 3:
1157      if (extend_tcr_tlb(tcr,xp,UUOA_field(the_uuo))) {
1158        handled = true;
1159        bump = 4;
1160        break;
1161      }
1162      /* fall in */
1163    default:
1164      handled = false;
1165      break;
1166
1167    }
1168    break;
1169
1170  case uuo_format_nullary:
1171    switch (UUOA_field(the_uuo)) {
1172    case 3:
1173      adjust_exception_pc(xp, bump);
1174      bump = 0;
1175      lisp_Debugger(xp, info, debug_entry_dbg, false, "Lisp Breakpoint");
1176      handled = true;
1177      break;
1178
1179    case 4:
1180      tcr->interrupt_pending = 0;
1181      callback_for_trap(nrs_CMAIN.vcell, xp, 0, 0, NULL);
1182      handled = true;
1183      break;
1184    default:
1185      handled = false;
1186      break;
1187    }
1188    break;
1189
1190
1191  case uuo_format_error_lisptag:
1192  case uuo_format_error_fulltag:
1193  case uuo_format_error_xtype:
1194  case uuo_format_cerror_lisptag:
1195  case uuo_format_cerror_fulltag:
1196  case uuo_format_cerror_xtype:
1197  case uuo_format_nullary_error:
1198  case uuo_format_unary_error:
1199  case uuo_format_binary_error:
1200  case uuo_format_ternary:
1201  case uuo_format_ternary2:
1202    handled = handle_error(xp,0,the_uuo, &bump);
1203    break;
1204
1205  default:
1206    handled = false;
1207    bump = 0;
1208  }
1209 
1210  if (handled && bump) {
1211    adjust_exception_pc(xp, bump);
1212  }
1213  return handled;
1214}
1215
1216natural
1217register_codevector_contains_pc (natural lisp_function, pc where)
1218{
1219  natural code_vector, size;
1220
1221  if ((fulltag_of(lisp_function) == fulltag_misc) &&
1222      (header_subtag(header_of(lisp_function)) == subtag_function)) {
1223    code_vector = deref(lisp_function, 2);
1224    size = header_element_count(header_of(code_vector)) << 2;
1225    if ((untag(code_vector) < (natural)where) && 
1226        ((natural)where < (code_vector + size)))
1227      return(code_vector);
1228  }
1229
1230  return(0);
1231}
1232
1233Boolean
1234callback_for_trap (LispObj callback_macptr, ExceptionInformation *xp, natural info,natural arg, int *bumpP)
1235{
1236  return callback_to_lisp(callback_macptr, xp, info,arg, bumpP);
1237}
1238
1239Boolean
1240callback_to_lisp (LispObj callback_macptr, ExceptionInformation *xp,
1241                  natural arg1, natural arg2, int *bumpP)
1242{
1243  natural  callback_ptr;
1244  area *a;
1245  natural fnreg = Rfn,  codevector, offset;
1246  pc where = xpPC(xp);
1247  int delta;
1248
1249  codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where);
1250  if (codevector == 0) {
1251    fnreg = nfn;
1252    codevector = register_codevector_contains_pc(xpGPR(xp,fnreg), where);
1253    if (codevector == 0) {
1254      fnreg = 0;
1255    }
1256  }
1257  if (codevector) {
1258    offset = (natural)where - (codevector - (fulltag_misc-node_size));
1259  } else {
1260    offset = (natural)where;
1261  }
1262                                                 
1263                                               
1264
1265  TCR *tcr = get_tcr(true);
1266
1267  /* Put the active stack pointer where .SPcallback expects it */
1268  a = tcr->cs_area;
1269  a->active = (BytePtr) ptr_from_lispobj(xpGPR(xp, Rsp));
1270
1271  /* Copy globals from the exception frame to tcr */
1272  tcr->save_allocptr = (void *)ptr_from_lispobj(xpGPR(xp, allocptr));
1273  tcr->save_vsp = (LispObj*) ptr_from_lispobj(xpGPR(xp, vsp));
1274
1275
1276
1277  /* Call back.
1278     Lisp will handle trampolining through some code that
1279     will push lr/fn & pc/nfn stack frames for backtrace.
1280  */
1281  callback_ptr = ((macptr *)ptr_from_lispobj(untag(callback_macptr)))->address;
1282  UNLOCK(lisp_global(EXCEPTION_LOCK), tcr);
1283  delta = ((int (*)())callback_ptr) (xp, arg1, arg2, fnreg, offset);
1284  LOCK(lisp_global(EXCEPTION_LOCK), tcr);
1285
1286  if (bumpP) {
1287    *bumpP = delta;
1288  }
1289
1290  /* Copy GC registers back into exception frame */
1291  xpGPR(xp, allocptr) = (LispObj) ptr_to_lispobj(tcr->save_allocptr);
1292  return true;
1293}
1294
1295area *
1296allocate_no_stack (natural size)
1297{
1298#ifdef SUPPORT_PRAGMA_UNUSED
1299#pragma unused(size)
1300#endif
1301
1302  return (area *) NULL;
1303}
1304
1305
1306
1307
1308
1309
1310/* callback to (symbol-value cmain) if it is a macptr,
1311   otherwise report cause and function name to console.
1312   Returns noErr if exception handled OK */
1313OSStatus
1314handle_trap(ExceptionInformation *xp, opcode the_trap, pc where, siginfo_t *info)
1315{
1316  LispObj   cmain = nrs_CMAIN.vcell;
1317  TCR *tcr = TCR_FROM_TSD(xpGPR(xp, rcontext));
1318
1319}
1320
1321
1322
1323
1324void non_fatal_error( char *msg )
1325{
1326  fprintf( dbgout, "Non-fatal error: %s.\n", msg );
1327  fflush( dbgout );
1328}
1329
1330
1331
1332Boolean
1333handle_error(ExceptionInformation *xp, unsigned arg1, unsigned arg2, int *bumpP)
1334{
1335  LispObj   errdisp = nrs_ERRDISP.vcell;
1336
1337  if ((fulltag_of(errdisp) == fulltag_misc) &&
1338      (header_subtag(header_of(errdisp)) == subtag_macptr)) {
1339    /* errdisp is a macptr, we can call back to lisp */
1340    return callback_for_trap(errdisp, xp, arg1, arg2, bumpP);
1341    }
1342
1343  return false;
1344}
1345               
1346
1347/*
1348   Current thread has all signals masked.  Before unmasking them,
1349   make it appear that the current thread has been suspended.
1350   (This is to handle the case where another thread is trying
1351   to GC before this thread is able to sieze the exception lock.)
1352*/
1353int
1354prepare_to_wait_for_exception_lock(TCR *tcr, ExceptionInformation *context)
1355{
1356  int old_valence = tcr->valence;
1357
1358  tcr->pending_exception_context = context;
1359  tcr->valence = TCR_STATE_EXCEPTION_WAIT;
1360
1361  ALLOW_EXCEPTIONS(context);
1362  return old_valence;
1363} 
1364
1365void
1366wait_for_exception_lock_in_handler(TCR *tcr, 
1367                                   ExceptionInformation *context,
1368                                   xframe_list *xf)
1369{
1370
1371  LOCK(lisp_global(EXCEPTION_LOCK), tcr);
1372  xf->curr = context;
1373  xf->prev = tcr->xframe;
1374  tcr->xframe =  xf;
1375  tcr->pending_exception_context = NULL;
1376  tcr->valence = TCR_STATE_FOREIGN; 
1377}
1378
1379void
1380unlock_exception_lock_in_handler(TCR *tcr)
1381{
1382  tcr->pending_exception_context = tcr->xframe->curr;
1383  tcr->xframe = tcr->xframe->prev;
1384  tcr->valence = TCR_STATE_EXCEPTION_RETURN;
1385  UNLOCK(lisp_global(EXCEPTION_LOCK),tcr);
1386}
1387
1388/*
1389   If an interrupt is pending on exception exit, try to ensure
1390   that the thread sees it as soon as it's able to run.
1391*/
1392void
1393raise_pending_interrupt(TCR *tcr)
1394{
1395  if (TCR_INTERRUPT_LEVEL(tcr) > 0) {
1396    pthread_kill((pthread_t)ptr_from_lispobj(tcr->osid), SIGNAL_FOR_PROCESS_INTERRUPT);
1397  }
1398}
1399
1400void
1401exit_signal_handler(TCR *tcr, int old_valence, natural old_last_lisp_frame)
1402{
1403#ifndef ANDROID
1404  sigset_t mask;
1405  sigfillset(&mask);
1406#else
1407  int mask [] = {-1,-1};
1408#endif
1409 
1410  pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask, NULL);
1411  tcr->valence = old_valence;
1412  tcr->pending_exception_context = NULL;
1413  tcr->last_lisp_frame = old_last_lisp_frame;
1414}
1415
1416
1417void
1418signal_handler(int signum, siginfo_t *info, ExceptionInformation  *context
1419#ifdef DARWIN
1420, TCR *tcr, int old_valence, natural old_last_lisp_frame
1421#endif
1422)
1423{
1424  xframe_list xframe_link;
1425#ifndef DARWIN
1426   
1427    TCR *tcr = (TCR *) get_interrupt_tcr(false);
1428 
1429    /* The signal handler's entered with all signals (notably the
1430       thread_suspend signal) blocked.  Don't allow any other signals
1431       (notably the thread_suspend signal) to preempt us until we've
1432       set the TCR's xframe slot to include the current exception
1433       context.
1434    */
1435   
1436    natural  old_last_lisp_frame = tcr->last_lisp_frame;
1437    int old_valence;
1438
1439    tcr->last_lisp_frame = xpGPR(context,Rsp);
1440    old_valence = prepare_to_wait_for_exception_lock(tcr, context);
1441#endif
1442
1443  if (tcr->flags & (1<<TCR_FLAG_BIT_PENDING_SUSPEND)) {
1444    CLR_TCR_FLAG(tcr, TCR_FLAG_BIT_PENDING_SUSPEND);
1445    pthread_kill(pthread_self(), thread_suspend_signal);
1446  }
1447
1448 
1449  wait_for_exception_lock_in_handler(tcr, context, &xframe_link);
1450  if ((!handle_exception(signum, context, tcr, info, old_valence))) {
1451    char msg[512];
1452    snprintf(msg, sizeof(msg), "Unhandled exception %d at 0x%lx, context->regs at #x%lx", signum, xpPC(context), (natural)xpGPRvector(context));
1453    if (lisp_Debugger(context, info, signum, (old_valence != TCR_STATE_LISP), msg)) {
1454      SET_TCR_FLAG(tcr,TCR_FLAG_BIT_PROPAGATE_EXCEPTION);
1455    }
1456  }
1457  unlock_exception_lock_in_handler(tcr);
1458
1459  /* This thread now looks like a thread that was suspended while
1460     executing lisp code.  If some other thread gets the exception
1461     lock and GCs, the context (this thread's suspend_context) will
1462     be updated.  (That's only of concern if it happens before we
1463     can return to the kernel/to the Mach exception handler).
1464  */
1465  exit_signal_handler(tcr, old_valence, old_last_lisp_frame);
1466  raise_pending_interrupt(tcr);
1467}
1468
1469
1470void
1471sigill_handler(int signum, siginfo_t *info, ExceptionInformation  *xp)
1472{
1473  pc program_counter = xpPC(xp);
1474  opcode instr = *program_counter;
1475
1476  if (IS_UUO(instr)) {
1477    natural psr = xpPSR(xp);
1478    Boolean opcode_matched_condition = false,
1479      flip = ((instr & (1<<28)) != 0);
1480   
1481
1482    switch (instr >> 29) {
1483    case 0: 
1484      opcode_matched_condition = ((psr & PSR_Z_MASK) != 0);
1485      break;
1486    case 1:
1487      opcode_matched_condition = ((psr & PSR_C_MASK) != 0);
1488      break;
1489    case 2:
1490      opcode_matched_condition = ((psr & PSR_N_MASK) != 0);
1491      break;
1492    case 3:
1493      opcode_matched_condition = ((psr & PSR_V_MASK) != 0);
1494      break;
1495    case 4:
1496      opcode_matched_condition = (((psr & PSR_C_MASK) != 0) &&
1497                                  ((psr & PSR_Z_MASK) == 0));
1498      break;
1499    case 5:
1500      opcode_matched_condition = (((psr & PSR_N_MASK) != 0) ==
1501                                  ((psr & PSR_V_MASK) != 0));
1502      break;
1503    case 6:
1504      opcode_matched_condition = ((((psr & PSR_N_MASK) != 0) ==
1505                                   ((psr & PSR_V_MASK) != 0)) &&
1506                                  ((psr & PSR_Z_MASK) == 0));
1507      break;
1508    case 7:
1509      opcode_matched_condition = true;
1510      flip = false;
1511      break;
1512    }
1513    if (flip) {
1514      opcode_matched_condition = !opcode_matched_condition;
1515    }
1516    if (!opcode_matched_condition) {
1517      adjust_exception_pc(xp,4);
1518      return;
1519    }
1520  }
1521  signal_handler(signum,info,xp);
1522}
1523
1524
1525#ifdef USE_SIGALTSTACK
1526void
1527invoke_handler_on_main_stack(int signo, siginfo_t *info, ExceptionInformation *xp, void *return_address, void *handler)
1528{
1529  ExceptionInformation *xp_copy;
1530  siginfo_t *info_copy;
1531  extern void call_handler_on_main_stack(int, siginfo_t *, ExceptionInformation *,void *, void *);
1532 
1533  BytePtr target_sp= (BytePtr)xpGPR(xp,Rsp);
1534  target_sp -= sizeof(ucontext_t);
1535  xp_copy = (ExceptionInformation *)target_sp;
1536  memmove(target_sp,xp,sizeof(*xp));
1537  xp_copy->uc_stack.ss_sp = 0;
1538  xp_copy->uc_stack.ss_size = 0;
1539  xp_copy->uc_stack.ss_flags = 0;
1540  xp_copy->uc_link = NULL;
1541  target_sp -= sizeof(siginfo_t);
1542  info_copy = (siginfo_t *)target_sp;
1543  memmove(target_sp,info,sizeof(*info));
1544  call_handler_on_main_stack(signo, info_copy, xp_copy, return_address, handler);
1545}
1546 
1547void
1548altstack_signal_handler(int signo, siginfo_t *info, ExceptionInformation *xp)
1549{
1550  TCR *tcr=get_tcr(true);
1551 
1552  if (signo == SIGBUS) {
1553    BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address); 
1554    area *a = tcr->cs_area;
1555    if (((BytePtr)truncate_to_power_of_2(addr,log2_page_size))== a->softlimit) 
1556{
1557      if (mmap(a->softlimit,
1558               page_size,
1559               PROT_READ|PROT_WRITE|PROT_EXEC,
1560               MAP_PRIVATE|MAP_ANON|MAP_FIXED,
1561               -1,
1562               0) == a->softlimit) {
1563        return;
1564      }
1565    }
1566  } else if (signo == SIGSEGV) {
1567    BytePtr addr = (BytePtr)(xp->uc_mcontext.fault_address);
1568    area *a = tcr->cs_area;
1569   
1570    if ((addr >= a->low) &&
1571        (addr < a->softlimit)) {
1572      if (addr < a->hardlimit) {
1573        Bug(xp, "hard stack overflow");
1574      } else {
1575        UnProtectMemory(a->hardlimit,a->softlimit-a->hardlimit);
1576      }
1577    }
1578  }
1579  invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), signal_handler);
1580}
1581#endif
1582
1583/*
1584  If it looks like we're in the middle of an atomic operation, make
1585  it seem as if that operation is either complete or hasn't started
1586  yet.
1587
1588  The cases handled include:
1589
1590  a) storing into a newly-allocated lisp frame on the stack.
1591  b) marking a newly-allocated TSP frame as containing "raw" data.
1592  c) consing: the GC has its own ideas about how this should be
1593     handled, but other callers would be best advised to back
1594     up or move forward, according to whether we're in the middle
1595     of allocating a cons cell or allocating a uvector.
1596  d) a STMW to the vsp
1597  e) EGC write-barrier subprims.
1598*/
1599
1600extern opcode
1601  egc_write_barrier_start,
1602  egc_write_barrier_end, 
1603  egc_store_node_conditional, 
1604  egc_store_node_conditional_test,
1605  egc_set_hash_key, egc_set_hash_key_did_store,
1606  egc_gvset, egc_gvset_did_store,
1607  egc_rplaca, egc_rplaca_did_store,
1608  egc_rplacd, egc_rplacd_did_store,
1609  egc_set_hash_key_conditional,
1610  egc_set_hash_key_conditional_test,
1611  swap_lr_lisp_frame_temp0,
1612  swap_lr_lisp_frame_arg_z;
1613
1614
1615extern opcode ffcall_return_window, ffcall_return_window_end;
1616
1617void
1618pc_luser_xp(ExceptionInformation *xp, TCR *tcr, signed_natural *alloc_disp)
1619{
1620  pc program_counter = xpPC(xp);
1621  opcode instr = *program_counter;
1622  lisp_frame *frame = (lisp_frame *)ptr_from_lispobj(xpGPR(xp,Rsp));
1623  LispObj cur_allocptr = xpGPR(xp, allocptr);
1624  int allocptr_tag = fulltag_of(cur_allocptr);
1625 
1626
1627
1628  if ((program_counter < &egc_write_barrier_end) && 
1629      (program_counter >= &egc_write_barrier_start)) {
1630    LispObj *ea = 0, val = 0, root = 0;
1631    bitvector refbits = (bitvector)(lisp_global(REFBITS));
1632    Boolean need_check_memo = true, need_memoize_root = false;
1633
1634    if (program_counter >= &egc_set_hash_key_conditional) {
1635      if ((program_counter < &egc_set_hash_key_conditional_test) ||
1636          ((program_counter == &egc_set_hash_key_conditional_test) &&
1637           (! (xpPSR(xp) & PSR_Z_MASK)))) {
1638        return;
1639      }
1640      root = xpGPR(xp,arg_x);
1641      ea = (LispObj *) (root+unbox_fixnum(xpGPR(xp,temp2)));
1642      need_memoize_root = true;
1643    } else if (program_counter >= &egc_store_node_conditional) {
1644      if ((program_counter < &egc_store_node_conditional_test) ||
1645          ((program_counter == &egc_store_node_conditional_test) &&
1646           (! (xpPSR(xp) & PSR_Z_MASK)))) {
1647        /* The conditional store either hasn't been attempted yet, or
1648           has failed.  No need to adjust the PC, or do memoization. */
1649        return;
1650      }
1651      ea = (LispObj*)(xpGPR(xp,arg_x) + unbox_fixnum(xpGPR(xp,temp2)));
1652      xpGPR(xp,arg_z) = t_value;
1653    } else if (program_counter >= &egc_set_hash_key) {
1654      if (program_counter < &egc_set_hash_key_did_store) {
1655        return;
1656      }
1657      root = xpGPR(xp,arg_x);
1658      val = xpGPR(xp,arg_z);
1659      ea = (LispObj *) (root+xpGPR(xp,arg_y)+misc_data_offset);
1660      need_memoize_root = true;
1661    } else if (program_counter >= &egc_gvset) {
1662      if (program_counter < &egc_gvset_did_store) {
1663        return;
1664      } 
1665      ea = (LispObj *) (xpGPR(xp,arg_x)+xpGPR(xp,arg_y)+misc_data_offset);
1666      val = xpGPR(xp,arg_z);
1667    } else if (program_counter >= &egc_rplacd) {
1668      if (program_counter < &egc_rplacd_did_store) {
1669        return;
1670      } 
1671      ea = (LispObj *) untag(xpGPR(xp,arg_y));
1672      val = xpGPR(xp,arg_z);
1673    } else {                      /* egc_rplaca */
1674      if (program_counter < &egc_rplaca_did_store) {
1675        return;
1676      } 
1677      ea =  ((LispObj *) untag(xpGPR(xp,arg_y)))+1;
1678      val = xpGPR(xp,arg_z);
1679    }
1680    if (need_check_memo) {
1681      natural  bitnumber = area_dnode(ea, lisp_global(REF_BASE));
1682      if ((bitnumber < lisp_global(OLDSPACE_DNODE_COUNT)) &&
1683          ((LispObj)ea < val)) {
1684        atomic_set_bit(refbits, bitnumber);
1685        atomic_set_bit(global_refidx, bitnumber>>8);
1686        if (need_memoize_root) {
1687          bitnumber = area_dnode(root, lisp_global(REF_BASE));
1688          atomic_set_bit(refbits, bitnumber);
1689          atomic_set_bit(global_refidx,bitnumber>>8);
1690        }
1691      }
1692    }
1693    xpPC(xp) = xpLR(xp);
1694    return;
1695  }
1696
1697
1698 
1699  if (allocptr_tag != tag_fixnum) {
1700    signed_natural disp = allocptr_displacement(xp);
1701
1702    if (disp) {
1703      /* Being architecturally "at" the alloc trap doesn't tell
1704         us much (in particular, it doesn't tell us whether
1705         or not the thread has committed to taking the trap
1706         and is waiting for the exception lock (or waiting
1707         for the Mach exception thread to tell it how bad
1708         things are) or is about to execute a conditional
1709         trap.
1710         Regardless of which case applies, we want the
1711         other thread to take (or finish taking) the
1712         trap, and we don't want it to consider its
1713         current allocptr to be valid.
1714         The difference between this case (suspend other
1715         thread for GC) and the previous case (suspend
1716         current thread for interrupt) is solely a
1717         matter of what happens after we leave this
1718         function: some non-current thread will stay
1719         suspended until the GC finishes, then take
1720         (or start processing) the alloc trap.   The
1721         current thread will go off and do PROCESS-INTERRUPT
1722         or something, and may return from the interrupt
1723         and need to finish the allocation that got interrupted.
1724      */
1725
1726      if (alloc_disp) {
1727        *alloc_disp = disp;
1728        xpGPR(xp,allocptr) -= disp;
1729        /* Leave the PC at the alloc trap.  When the interrupt
1730           handler returns, it'll decrement allocptr by disp
1731           and the trap may or may not be taken.
1732        */
1733      } else {
1734        Boolean ok = false;
1735        update_bytes_allocated(tcr, (void *) ptr_from_lispobj(cur_allocptr - disp));
1736        xpGPR(xp, allocptr) = VOID_ALLOCPTR + disp;
1737        instr = program_counter[-1];
1738        if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) {
1739          instr = program_counter[-2];
1740          if (IS_COMPARE_ALLOCPTR_TO_RM(instr)){
1741            xpGPR(xp,RM_field(instr)) = VOID_ALLOCPTR;
1742            ok = true;
1743          }
1744        }
1745        if (ok) {
1746          /* Clear the carry bit, so that the trap will be taken. */
1747          xpPSR(xp) &= ~PSR_C_MASK;
1748        } else {
1749          Bug(NULL, "unexpected instruction preceding alloc trap.");
1750        }
1751      }
1752    } else {
1753      /* we may be before or after the alloc trap.  If before, set
1754         allocptr to VOID_ALLOCPTR and back up to the start of the
1755         instruction sequence; if after, finish the allocation. */
1756      Boolean before_alloc_trap = false;
1757
1758      if (IS_BRANCH_AROUND_ALLOC_TRAP(instr)) {
1759        before_alloc_trap = true;
1760        --program_counter;
1761        instr = *program_counter;
1762      }
1763      if (IS_COMPARE_ALLOCPTR_TO_RM(instr)) {
1764        before_alloc_trap = true;
1765        --program_counter;
1766        instr = *program_counter;
1767      }
1768      if (IS_LOAD_RD_FROM_ALLOCBASE(instr)) {
1769        before_alloc_trap = true;
1770        --program_counter;
1771        instr = *program_counter;
1772      }
1773      if (IS_SUB_HI_FROM_ALLOCPTR(instr)) {
1774        before_alloc_trap = true;
1775        --program_counter;
1776      }
1777      if (before_alloc_trap) {
1778        xpPC(xp) = program_counter;
1779        xpGPR(xp,allocptr) = VOID_ALLOCPTR;
1780      } else {
1781        /* If we're already past the alloc_trap, finish allocating
1782           the object. */
1783        if (allocptr_tag == fulltag_cons) {
1784          finish_allocating_cons(xp);
1785        } else {
1786          if (allocptr_tag == fulltag_misc) {
1787            finish_allocating_uvector(xp);
1788          } else {
1789            Bug(xp, "what's being allocated here ?");
1790          }
1791        }
1792        /* Whatever we finished allocating, reset allocptr/allocbase to
1793           VOID_ALLOCPTR */
1794        xpGPR(xp,allocptr) = VOID_ALLOCPTR;
1795        tcr->save_allocptr = tcr->save_allocbase = VOID_ALLOCPTR;
1796      }
1797      return;
1798    }
1799    return;
1800  }
1801  {
1802    lisp_frame *swap_frame = NULL;
1803    pc base = &swap_lr_lisp_frame_temp0;
1804   
1805    if ((program_counter >base)             /* sic */
1806        && (program_counter < (base+3))) {
1807      swap_frame = (lisp_frame *)xpGPR(xp,temp0);
1808    } else {
1809      base = &swap_lr_lisp_frame_arg_z;
1810      if ((program_counter > base) && (program_counter < (base+3))) { 
1811        swap_frame = (lisp_frame *)xpGPR(xp,arg_z);
1812      }
1813    }
1814    if (swap_frame) {
1815      if (program_counter == (base+1)) {
1816        swap_frame->savelr = xpGPR(xp,Rlr);
1817      }
1818      xpGPR(xp,Rlr) = xpGPR(xp,imm0);
1819      xpPC(xp) = base+3;
1820      return;
1821    }
1822  }
1823}
1824
1825void
1826interrupt_handler (int signum, siginfo_t *info, ExceptionInformation *context)
1827{
1828  TCR *tcr = get_interrupt_tcr(false);
1829  if (tcr) {
1830    if (TCR_INTERRUPT_LEVEL(tcr) < 0) {
1831      tcr->interrupt_pending = 1 << fixnumshift;
1832    } else {
1833      LispObj cmain = nrs_CMAIN.vcell;
1834
1835      if ((fulltag_of(cmain) == fulltag_misc) &&
1836          (header_subtag(header_of(cmain)) == subtag_macptr)) {
1837        /*
1838           This thread can (allegedly) take an interrupt now.
1839           It's tricky to do that if we're executing
1840           foreign code (especially Linuxthreads code, much
1841           of which isn't reentrant.)
1842           If we're unwinding the stack, we also want to defer
1843           the interrupt.
1844        */
1845        if ((tcr->valence != TCR_STATE_LISP) ||
1846            (tcr->unwinding != 0)) {
1847          tcr->interrupt_pending = 1 << fixnumshift;
1848        } else {
1849          xframe_list xframe_link;
1850          int old_valence;
1851          signed_natural disp=0;
1852          natural old_last_lisp_frame = tcr->last_lisp_frame;
1853         
1854          tcr->last_lisp_frame = xpGPR(context,Rsp);
1855          pc_luser_xp(context, tcr, &disp);
1856          old_valence = prepare_to_wait_for_exception_lock(tcr, context);
1857          wait_for_exception_lock_in_handler(tcr, context, &xframe_link);
1858          handle_exception(signum, context, tcr, info, old_valence);
1859          if (disp) {
1860            xpGPR(context,allocptr) -= disp;
1861          }
1862          unlock_exception_lock_in_handler(tcr);
1863          exit_signal_handler(tcr, old_valence, old_last_lisp_frame);
1864        }
1865      }
1866    }
1867  }
1868#ifdef DARWIN
1869    DarwinSigReturn(context);
1870#endif
1871}
1872
1873#ifdef USE_SIGALTSTACK
1874void
1875altstack_interrupt_handler(int signum, siginfo_t *info, ExceptionInformation *context)
1876{
1877  invoke_handler_on_main_stack(signum, info, context, __builtin_return_address(0),interrupt_handler);
1878}
1879#endif
1880
1881
1882void
1883install_signal_handler(int signo, void *handler, unsigned flags)
1884{
1885  struct sigaction sa;
1886  int err;
1887
1888  sigfillset(&sa.sa_mask);
1889 
1890  sa.sa_sigaction = (void *)handler;
1891  sigfillset(&sa.sa_mask);
1892  sa.sa_flags = SA_SIGINFO;
1893
1894#ifdef ANDROID
1895  sa.sa_flags |= SA_NODEFER;
1896#endif
1897#ifdef USE_SIGALTSTACK
1898  if (flags & ON_ALTSTACK)
1899    sa.sa_flags |= SA_ONSTACK;
1900#endif
1901  if (flags & RESTART_SYSCALLS)
1902    sa.sa_flags |= SA_RESTART;
1903  if (flags & RESERVE_FOR_LISP) {
1904    extern sigset_t user_signals_reserved;
1905    sigaddset(&user_signals_reserved, signo);
1906  }
1907
1908  err = sigaction(signo, &sa, NULL);
1909  if (err) {
1910    perror("sigaction");
1911    exit(1);
1912  }
1913}
1914
1915
1916void
1917install_pmcl_exception_handlers()
1918{
1919  install_signal_handler(SIGILL, (void *)sigill_handler, RESERVE_FOR_LISP);
1920  install_signal_handler(SIGSEGV, (void *)ALTSTACK(signal_handler),
1921                         RESERVE_FOR_LISP|ON_ALTSTACK);
1922  install_signal_handler(SIGBUS, (void *)ALTSTACK(signal_handler),
1923                           RESERVE_FOR_LISP|ON_ALTSTACK);
1924  install_signal_handler(SIGNAL_FOR_PROCESS_INTERRUPT,
1925                         (void *)interrupt_handler, RESERVE_FOR_LISP);
1926  signal(SIGPIPE, SIG_IGN);
1927}
1928
1929
1930#ifdef USE_SIGALTSTACK
1931void
1932setup_sigaltstack(area *a)
1933{
1934  stack_t stack;
1935#if 0
1936  stack.ss_sp = a->low;
1937  a->low += SIGSTKSZ*8;
1938#endif
1939  stack.ss_size = SIGSTKSZ*8;
1940  stack.ss_flags = 0;
1941  stack.ss_sp = mmap(NULL,stack.ss_size, PROT_READ|PROT_WRITE|PROT_EXEC,MAP_ANON|MAP_PRIVATE,-1,0);
1942  if (sigaltstack(&stack, NULL) != 0) {
1943    perror("sigaltstack");
1944    exit(-1);
1945  }
1946}
1947#endif
1948
1949void
1950thread_kill_handler(int signum, siginfo_t info, ExceptionInformation *xp)
1951{
1952  TCR *tcr = get_tcr(false);
1953  area *a;
1954#ifndef ANDROID
1955  sigset_t mask;
1956 
1957  sigemptyset(&mask);
1958#else
1959  int mask[] = {0,0};
1960#endif
1961
1962  if (tcr) {
1963    tcr->valence = TCR_STATE_FOREIGN;
1964    a = tcr->vs_area;
1965    if (a) {
1966      a->active = a->high;
1967    }
1968    a = tcr->cs_area;
1969    if (a) {
1970      a->active = a->high;
1971    }
1972  }
1973 
1974  pthread_sigmask(SIG_SETMASK,(sigset_t *)&mask,NULL);
1975  pthread_exit(NULL);
1976}
1977
1978#ifdef USE_SIGALTSTACK
1979void
1980altstack_thread_kill_handler(int signo, siginfo_t *info, ExceptionInformation *xp)
1981{
1982  invoke_handler_on_main_stack(signo, info, xp, __builtin_return_address(0), thread_kill_handler);
1983}
1984#endif
1985
1986void
1987thread_signal_setup()
1988{
1989  thread_suspend_signal = SIG_SUSPEND_THREAD;
1990  thread_kill_signal = SIG_KILL_THREAD;
1991
1992  install_signal_handler(thread_suspend_signal, (void *)suspend_resume_handler,
1993                         RESERVE_FOR_LISP|RESTART_SYSCALLS);
1994  install_signal_handler(thread_kill_signal, (void *)thread_kill_handler,
1995                         RESERVE_FOR_LISP);
1996}
1997
1998
1999
2000void
2001unprotect_all_areas()
2002{
2003  protected_area_ptr p;
2004
2005  for(p = AllProtectedAreas, AllProtectedAreas = NULL; p; p = p->next) {
2006    unprotect_area(p);
2007  }
2008}
2009
2010/*
2011  A binding subprim has just done "twlle limit_regno,idx_regno" and
2012  the trap's been taken.  Extend the tcr's tlb so that the index will
2013  be in bounds and the new limit will be on a page boundary, filling
2014  in the new page(s) with 'no_thread_local_binding_marker'.  Update
2015  the tcr fields and the registers in the xp and return true if this
2016  all works, false otherwise.
2017
2018  Note that the tlb was allocated via malloc, so realloc can do some
2019  of the hard work.
2020*/
2021Boolean
2022extend_tcr_tlb(TCR *tcr, 
2023               ExceptionInformation *xp, 
2024               unsigned idx_regno)
2025{
2026  unsigned
2027    index = (unsigned) (xpGPR(xp,idx_regno)),
2028    old_limit = tcr->tlb_limit,
2029    new_limit = align_to_power_of_2(index+1,12),
2030    new_bytes = new_limit-old_limit;
2031  LispObj
2032    *old_tlb = tcr->tlb_pointer,
2033    *new_tlb = realloc(old_tlb, new_limit),
2034    *work;
2035
2036  if (new_tlb == NULL) {
2037    return false;
2038  }
2039 
2040  work = (LispObj *) ((BytePtr)new_tlb+old_limit);
2041
2042  while (new_bytes) {
2043    *work++ = no_thread_local_binding_marker;
2044    new_bytes -= sizeof(LispObj);
2045  }
2046  tcr->tlb_pointer = new_tlb;
2047  tcr->tlb_limit = new_limit;
2048  return true;
2049}
2050
2051
2052
2053void
2054exception_init()
2055{
2056  install_pmcl_exception_handlers();
2057}
2058
2059
2060
2061
2062
2063
2064
2065void
2066early_signal_handler(int signum, siginfo_t *info, ExceptionInformation  *context)
2067{
2068  extern pc feature_check_fpu,feature_check_ldrex,feature_check_clrex;
2069  extern int arm_architecture_version;
2070 
2071  if ((xpPC(context) == feature_check_fpu) ||
2072      (xpPC(context) == feature_check_ldrex)) {
2073    arm_architecture_version = 5;
2074  } else {
2075    arm_architecture_version = 6;
2076  }
2077  xpPC(context) = xpLR(context);
2078}
Note: See TracBrowser for help on using the repository browser.